From 1840416b9e8892a685202f30b4079fd04607151f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 2 Jun 2011 16:21:08 -0500 Subject: Add a PGP key field on the dev profile Signed-off-by: Dan McGee --- devel/views.py | 7 + .../0049_auto__add_field_userprofile_pgp_key.py | 160 +++++++++++++++++++++ main/models.py | 13 ++ main/templatetags/pgp.py | 19 +++ settings.py | 2 + templates/public/developer_list.html | 9 +- 6 files changed, 207 insertions(+), 3 deletions(-) create mode 100644 main/migrations/0049_auto__add_field_userprofile_pgp_key.py create mode 100644 main/templatetags/pgp.py diff --git a/devel/views.py b/devel/views.py index 475376ad..25ad2ccf 100644 --- a/devel/views.py +++ b/devel/views.py @@ -102,6 +102,13 @@ def clean(self): return self.cleaned_data class UserProfileForm(forms.ModelForm): + def clean_pgp_key(self): + data = self.cleaned_data['pgp_key'] + # strip 0x prefix if provided; store uppercase + if data.startswith('0x'): + data = data[2:] + return data.upper() + class Meta: model = UserProfile exclude = ['allowed_repos', 'user'] diff --git a/main/migrations/0049_auto__add_field_userprofile_pgp_key.py b/main/migrations/0049_auto__add_field_userprofile_pgp_key.py new file mode 100644 index 00000000..10fb9390 --- /dev/null +++ b/main/migrations/0049_auto__add_field_userprofile_pgp_key.py @@ -0,0 +1,160 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.add_column('user_profiles', 'pgp_key', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True), keep_default=False) + + def backwards(self, orm): + db.delete_column('user_profiles', 'pgp_key') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True'}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True'}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 59dc154b..926e7377 100644 --- a/main/models.py +++ b/main/models.py @@ -1,6 +1,8 @@ from django.db import models +from django.core.validators import RegexValidator from django.contrib.auth.models import User from django.contrib.sites.models import Site +from django.forms import ValidationError from main.utils import cache_function, make_choice from packages.models import PackageRelation @@ -9,6 +11,12 @@ from itertools import groupby import pytz +def validate_pgp_key_length(value): + if len(value) not in (8, 16, 40): + raise ValidationError( + u'Ensure this value has 8, 16, or 40 characters (it has %d).' % len(value), + 'pgp_key_value') + class UserProfile(models.Model): notify = models.BooleanField( "Send notifications", @@ -26,6 +34,11 @@ class UserProfile(models.Model): max_length=50, help_text="Required field") other_contact = models.CharField(max_length=100, null=True, blank=True) + pgp_key = models.CharField(max_length=40, null=True, blank=True, + verbose_name="PGP key", validators=[RegexValidator(r'^[0-9A-F]+$', + "Ensure this value consists of only hex characters.", 'hex_char'), + validate_pgp_key_length], + help_text="PGP Key ID or fingerprint (8, 16, or 40 hex digits)") website = models.CharField(max_length=200, null=True, blank=True) yob = models.IntegerField("Year of birth", null=True, blank=True) location = models.CharField(max_length=50, null=True, blank=True) diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py new file mode 100644 index 00000000..d9126dbd --- /dev/null +++ b/main/templatetags/pgp.py @@ -0,0 +1,19 @@ +from django import template +from django.conf import settings + +register = template.Library() + +@register.simple_tag +def pgp_key_link(key_id): + if not key_id: + return "Unknown" + # Something like 'pgp.mit.edu:11371' + pgp_server = getattr(settings, 'PGP_SERVER', None) + if not pgp_server: + return "0x%s" % key_id + url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \ + (pgp_server, key_id) + values = (url, key_id, key_id) + return '0x%s' % values + +# vim: set ts=4 sw=4 et: diff --git a/settings.py b/settings.py index db1c93d3..e0d1b1a2 100644 --- a/settings.py +++ b/settings.py @@ -107,6 +107,8 @@ 'releng', ) +PGP_SERVER = 'pgp.mit.edu:11371' + ## Import local settings from local_settings import * diff --git a/templates/public/developer_list.html b/templates/public/developer_list.html index bb03f439..2abbbfe4 100644 --- a/templates/public/developer_list.html +++ b/templates/public/developer_list.html @@ -1,12 +1,12 @@ -
+{% load pgp %} +

{% for dev in dev_list %} {{ dev.first_name }}{{ dev.last_name.0|capfirst}}    {% endfor %}

-
@@ -31,13 +31,16 @@ + + + - -- cgit v1.2.3-54-g00ecf From 5fe626c6cc8444603cf7ae5199271b69d38ff255 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 9 Jun 2011 16:17:42 -0500 Subject: Management command cleanup Now that we aren't seeing odd segfaults and hung tasks, we can remove the traceback stuff from the scripts. Also use the 'io' module only, it has been long enough. Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 27 +++++---------------------- mirrors/management/commands/mirrorcheck.py | 5 ----- 2 files changed, 5 insertions(+), 27 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index a8875c7e..1adc359e 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -18,7 +18,7 @@ from django.db import transaction from django.db.models import Q -import codecs +import io import os import re import sys @@ -27,14 +27,6 @@ from datetime import datetime from optparse import make_option -# New in 2.6, but fast (C implementation) in 2.7. We will use it over codecs if -# available. Eventually remove the codecs import completely. -io = None -try: - import io -except ImportError: - pass - from main.models import Arch, Package, PackageDepend, PackageFile, Repo from packages.models import Conflict, Provision, Replacement @@ -74,11 +66,6 @@ def handle(self, arch=None, filename=None, **options): elif v == 2: logger.level = logging.DEBUG - import signal, traceback - handler = lambda sig, stack: traceback.print_stack(stack) - signal.signal(signal.SIGQUIT, handler) - signal.signal(signal.SIGUSR1, handler) - return read_repo(arch, filename, options) @@ -101,8 +88,7 @@ def __init__(self, repo): setattr(self, k, None) for k in self.collections: setattr(self, k, ()) - # So we can tell the diffence between a package with no files, and a DB - # without files entries + self.files = None self.has_files = False def populate(self, values): @@ -461,16 +447,13 @@ def parse_repo(repopath): if fname not in dbfiles: continue data_file = repodb.extractfile(tarinfo) - if io is None: - data_file = codecs.EncodedFile(data_file, 'utf-8') - else: - data_file = io.TextIOWrapper(io.BytesIO(data_file.read()), - encoding='utf=8') + data_file = io.TextIOWrapper(io.BytesIO(data_file.read()), + encoding='utf=8') try: data = parse_info(data_file) p = pkgs.setdefault(pkgid, Pkg(reponame)) p.populate(data) - except UnicodeDecodeError, e: + except UnicodeDecodeError: logger.warn("Could not correctly decode %s, skipping file", tarinfo.name) data_file.close() diff --git a/mirrors/management/commands/mirrorcheck.py b/mirrors/management/commands/mirrorcheck.py index ea43d558..7bd79c83 100644 --- a/mirrors/management/commands/mirrorcheck.py +++ b/mirrors/management/commands/mirrorcheck.py @@ -45,11 +45,6 @@ def handle_noargs(self, **options): elif v == 2: logger.level = logging.DEBUG - import signal, traceback - handler = lambda sig, stack: traceback.print_stack(stack) - signal.signal(signal.SIGQUIT, handler) - signal.signal(signal.SIGUSR1, handler) - return check_current_mirrors() def check_mirror_url(mirror_url): -- cgit v1.2.3-54-g00ecf From 895f8a20d35a18f3a0cc6e1530eb40292270fc7c Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 9 Jun 2011 17:25:32 -0500 Subject: reporead: allow batching of package updates The real reason I originally added transactions to this code was to prevent half-updates; e.g. a package gets in without the matching depends values. We can safely commit between packages and resume processing the database at a later time. Take advantage of this fact and commit every so often in batch fashion if we have a lot of updates piling up. In the case of updating the files DB, this can really cut down on the need to hold open a long-running, statement heavy transaction and get the information public faster. Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 58 +++++++++++++++++++++++++++++------ 1 file changed, 49 insertions(+), 9 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 1adc359e..e9878c93 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -221,6 +221,8 @@ def create_multivalued(dbpkg, repopkg, db_attr, repo_attr): collection.create(name=name) def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): + db_score = 1 + if repopkg.base: dbpkg.pkgbase = repopkg.base else: @@ -251,7 +253,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.last_update = timestamp dbpkg.save() - populate_files(dbpkg, repopkg, force=force) + db_score += populate_files(dbpkg, repopkg, force=force) dbpkg.packagedepend_set.all().delete() for y in repopkg.depends: @@ -272,6 +274,15 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): create_multivalued(dbpkg, repopkg, 'groups', 'groups') create_multivalued(dbpkg, repopkg, 'licenses', 'license') + related_score = (len(repopkg.depends) + len(repopkg.optdepends) + + len(repopkg.conflicts) + len(repopkg.provides) + + len(repopkg.replaces) + len(repopkg.groups) + + len(repopkg.license)) + if related_score: + db_score += (related_score / 20) + 1 + + return db_score + def populate_files(dbpkg, repopkg, force=False): if not force: @@ -280,11 +291,11 @@ def populate_files(dbpkg, repopkg, force=False): logger.info("DB version (%s) didn't match repo version " "(%s) for package %s, skipping file list addition", dbpkg.full_version, repopkg.full_version, dbpkg.pkgname) - return + return 0 if not dbpkg.files_last_update or not dbpkg.last_update: pass elif dbpkg.files_last_update > dbpkg.last_update: - return + return 0 # only delete files if we are reading a DB that contains them if repopkg.has_files: dbpkg.packagefile_set.all().delete() @@ -303,6 +314,28 @@ def populate_files(dbpkg, repopkg, force=False): pkgfile.save(force_insert=True) dbpkg.files_last_update = datetime.utcnow() dbpkg.save() + return (len(repopkg.files) / 50) + 1 + return 0 + + +class Batcher(object): + def __init__(self, threshold, start=0): + self.threshold = threshold + self.meter = start + + def batch_commit(self, score): + """ + Track updates to the database and perform a commit if the batch + becomes sufficiently large. "Large" is defined by waiting for the + sum of scores to exceed the arbitrary threshold value; once it is + hit a commit is issued. + """ + self.meter += score + if self.meter > self.threshold: + logger.debug("Committing transaction, batch threshold hit") + transaction.commit() + self.meter = 0 + @transaction.commit_on_success def db_update(archname, reponame, pkgs, options): @@ -355,19 +388,23 @@ def db_update(archname, reponame, pkgs, options): elif dbpercent < 75.0: logger.warning(msg) + batcher = Batcher(100) + if not filesonly: # packages in syncdb and not in database (add to database) for p in [x for x in pkgs if x.name in in_sync_not_db]: logger.info("Adding package %s", p.name) pkg = Package(pkgname = p.name, arch = architecture, repo = repository) - populate_pkg(pkg, p, timestamp=datetime.utcnow()) + score = populate_pkg(pkg, p, timestamp=datetime.utcnow()) + batcher.batch_commit(score) # packages in database and not in syncdb (remove from database) in_db_not_sync = dbset - syncset for p in in_db_not_sync: - logger.info("Removing package %s from database", p) + logger.info("Removing package %s", p) dbp = dbdict[p] dbp.delete() + batcher.batch_commit(score) # packages in both database and in syncdb (update in database) pkg_in_both = syncset & dbset @@ -385,12 +422,15 @@ def db_update(archname, reponame, pkgs, options): continue else: timestamp = datetime.utcnow() + if filesonly: - logger.debug("Checking files for package %s in database", p.name) - populate_files(dbp, p, force=force) + logger.debug("Checking files for package %s", p.name) + score = populate_files(dbp, p, force=force) else: - logger.info("Updating package %s in database", p.name) - populate_pkg(dbp, p, force=force, timestamp=timestamp) + logger.info("Updating package %s", p.name) + score = populate_pkg(dbp, p, force=force, timestamp=timestamp) + + batcher.batch_commit(score) logger.info('Finished updating Arch: %s', archname) -- cgit v1.2.3-54-g00ecf From 01b07b5b07cd152949c9f01fec91408945273583 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 9 Jun 2011 17:30:50 -0500 Subject: Fix busted batch score on package removal Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index e9878c93..0bd5587a 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -404,7 +404,7 @@ def db_update(archname, reponame, pkgs, options): logger.info("Removing package %s", p) dbp = dbdict[p] dbp.delete() - batcher.batch_commit(score) + batcher.batch_commit(1) # packages in both database and in syncdb (update in database) pkg_in_both = syncset & dbset -- cgit v1.2.3-54-g00ecf From 3284b302550dbd255762b8606343288d49402056 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 13 Jun 2011 13:18:28 -0500 Subject: Add a long out-of-date developer report This shows packages that have been marked out of date for more than 90 days in the repos. Signed-off-by: Dan McGee --- devel/views.py | 7 ++++++- templates/devel/index.html | 3 +++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/devel/views.py b/devel/views.py index 25ad2ccf..2b8bd43e 100644 --- a/devel/views.py +++ b/devel/views.py @@ -140,9 +140,14 @@ def report(request, report, username=None): if report == 'old': title = 'Packages last built more than two years ago' - cutoff = datetime.now() - timedelta(days=730) + cutoff = datetime.now() - timedelta(days=365 * 2) packages = packages.filter( build_date__lt=cutoff).order_by('build_date') + elif report == 'long-out-of-date': + title = 'Packages marked out-of-date more than 90 days ago' + cutoff = datetime.now() - timedelta(days=90) + packages = packages.filter( + flag_date__lt=cutoff).order_by('flag_date') elif report == 'big': title = 'Packages with compressed size > 50 MiB' cutoff = 50 * 1024 * 1024 diff --git a/templates/devel/index.html b/templates/devel/index.html index 015ae1b2..eb73ac2d 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -106,6 +106,9 @@

Developer Reports

  • Old: Packages last built more than two years ago (yours only)
  • +
  • Long Out-of-date: + Packages marked out-of-date more than 90 days ago + (yours only)
  • Uncompressed Manpages: Self-explanatory (yours only)
  • -- cgit v1.2.3-54-g00ecf From 92dbad587ab77b84130b86153464647d583b677e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 10 Jun 2011 10:43:57 -0500 Subject: reporead: two small cleanups * Parse builddate when reading from repo database file * Use defaultdict where it comes in handy Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 0bd5587a..4d30388e 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -18,6 +18,7 @@ from django.db import transaction from django.db.models import Q +from collections import defaultdict import io import os import re @@ -72,7 +73,7 @@ def handle(self, arch=None, filename=None, **options): class Pkg(object): """An interim 'container' object for holding Arch package data.""" bare = ( 'name', 'base', 'arch', 'desc', 'filename', - 'md5sum', 'url', 'builddate', 'packager' ) + 'md5sum', 'url', 'packager' ) number = ( 'csize', 'isize' ) collections = ( 'depends', 'optdepends', 'conflicts', 'provides', 'replaces', 'groups', 'license', 'files' ) @@ -104,6 +105,16 @@ def populate(self, values): self.rel = match.group(4) if match.group(2): self.epoch = int(match.group(2)) + elif k == 'builddate': + try: + self.builddate = datetime.utcfromtimestamp(int(v[0])) + except ValueError: + try: + self.builddate = datetime.strptime(v[0], + '%a %b %d %H:%M:%S %Y') + except ValueError: + logger.warning('Package %s had unparsable build date %s', + self.name, v[0]) elif k == 'files': self.files = v self.has_files = True @@ -235,15 +246,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.filename = repopkg.filename dbpkg.compressed_size = repopkg.csize dbpkg.installed_size = repopkg.isize - try: - dbpkg.build_date = datetime.utcfromtimestamp(int(repopkg.builddate)) - except ValueError: - try: - dbpkg.build_date = datetime.strptime(repopkg.builddate, - '%a %b %d %H:%M:%S %Y') - except ValueError: - logger.warning('Package %s had unparsable build date %s', - repopkg.name, repopkg.builddate) + dbpkg.build_date = repopkg.builddate dbpkg.packager_str = repopkg.packager # attempt to find the corresponding django user for this string dbpkg.packager = find_user(repopkg.packager) @@ -394,7 +397,7 @@ def db_update(archname, reponame, pkgs, options): # packages in syncdb and not in database (add to database) for p in [x for x in pkgs if x.name in in_sync_not_db]: logger.info("Adding package %s", p.name) - pkg = Package(pkgname = p.name, arch = architecture, repo = repository) + pkg = Package(pkgname=p.name, arch=architecture, repo=repository) score = populate_pkg(pkg, p, timestamp=datetime.utcnow()) batcher.batch_commit(score) @@ -480,7 +483,8 @@ def parse_repo(repopath): repodb = tarfile.open(repopath, "r") logger.debug("Starting package parsing") dbfiles = ('desc', 'depends', 'files') - pkgs = {} + newpkg = lambda: Pkg(reponame) + pkgs = defaultdict(newpkg) for tarinfo in repodb.getmembers(): if tarinfo.isreg(): pkgid, fname = os.path.split(tarinfo.name) @@ -490,9 +494,7 @@ def parse_repo(repopath): data_file = io.TextIOWrapper(io.BytesIO(data_file.read()), encoding='utf=8') try: - data = parse_info(data_file) - p = pkgs.setdefault(pkgid, Pkg(reponame)) - p.populate(data) + pkgs[pkgid].populate(parse_info(data_file)) except UnicodeDecodeError: logger.warn("Could not correctly decode %s, skipping file", tarinfo.name) -- cgit v1.2.3-54-g00ecf From b336dd15598132d1c501a9d44bc4d5a0e64bfb2e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 10 Jun 2011 10:46:06 -0500 Subject: reporead: small memory/perf improvements Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 4d30388e..baf7fee1 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -116,11 +116,11 @@ def populate(self, values): logger.warning('Package %s had unparsable build date %s', self.name, v[0]) elif k == 'files': - self.files = v + self.files = tuple(v) self.has_files = True else: # anything left in collections - setattr(self, k, v) + setattr(self, k, tuple(v)) @property def full_version(self): @@ -528,7 +528,9 @@ def read_repo(primary_arch, repo_file, options): else: # we don't include mis-arched packages logger.warning("Package %s arch = %s", - package.name,package.arch) + package.name, package.arch) + del packages + logger.info('Starting database updates.') for arch in sorted(packages_arches.keys()): db_update(arch, repo, packages_arches[arch], options) -- cgit v1.2.3-54-g00ecf From 2fd78dfa0019d1b1d25f89a7729b5ebb0f341a93 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 18:11:09 -0500 Subject: Allow mirror country field to be persisted to DB as NULL You need a custom field type in Django to allow this. Signed-off-by: Dan McGee --- .../0009_auto__chg_field_mirrorurl_country.py | 66 ++++++++++++++++++++++ mirrors/models.py | 15 ++++- 2 files changed, 78 insertions(+), 3 deletions(-) create mode 100644 mirrors/migrations/0009_auto__chg_field_mirrorurl_country.py diff --git a/mirrors/migrations/0009_auto__chg_field_mirrorurl_country.py b/mirrors/migrations/0009_auto__chg_field_mirrorurl_country.py new file mode 100644 index 00000000..941ecfbb --- /dev/null +++ b/mirrors/migrations/0009_auto__chg_field_mirrorurl_country.py @@ -0,0 +1,66 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.alter_column('mirrors_mirrorurl', 'country', self.gf('mirrors.models.NullCharField')(max_length=255, null=True)) + + + def backwards(self, orm): + db.alter_column('mirrors_mirrorurl', 'country', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)) + + models = { + 'mirrors.mirror': { + 'Meta': {'ordering': "('country', 'name')", 'object_name': 'Mirror'}, + 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'admin_email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'isos': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'rsync_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}), + 'rsync_user': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}), + 'tier': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}), + 'upstream': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mirrors.Mirror']", 'null': 'True'}) + }, + 'mirrors.mirrorlog': { + 'Meta': {'object_name': 'MirrorLog'}, + 'check_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'duration': ('django.db.models.fields.FloatField', [], {'null': 'True'}), + 'error': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_success': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'url': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'logs'", 'to': "orm['mirrors.MirrorUrl']"}) + }, + 'mirrors.mirrorprotocol': { + 'Meta': {'ordering': "('protocol',)", 'object_name': 'MirrorProtocol'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_download': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'protocol': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}) + }, + 'mirrors.mirrorrsync': { + 'Meta': {'object_name': 'MirrorRsync'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'ip': ('django.db.models.fields.CharField', [], {'max_length': '24'}), + 'mirror': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rsync_ips'", 'to': "orm['mirrors.Mirror']"}) + }, + 'mirrors.mirrorurl': { + 'Meta': {'object_name': 'MirrorUrl'}, + 'country': ('mirrors.models.NullCharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'has_ipv4': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'has_ipv6': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'mirror': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'urls'", 'to': "orm['mirrors.Mirror']"}), + 'protocol': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'urls'", 'to': "orm['mirrors.MirrorProtocol']"}), + 'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + } + } + + complete_apps = ['mirrors'] diff --git a/mirrors/models.py b/mirrors/models.py index bcde210c..4f70e5a9 100644 --- a/mirrors/models.py +++ b/mirrors/models.py @@ -1,8 +1,17 @@ +import socket +from urlparse import urlparse + from django.db import models from django.core.exceptions import ValidationError -import socket -from urlparse import urlparse +class NullCharField(models.CharField): + description = "String (up to %(max_length)s), NULL if value is empty" + _south_introspects = True + + def get_prep_value(self, value): + if value == '': + return None + return self.to_python(value) TIER_CHOICES = ( (0, 'Tier 0'), @@ -58,7 +67,7 @@ class MirrorUrl(models.Model): protocol = models.ForeignKey(MirrorProtocol, related_name="urls", editable=False, on_delete=models.PROTECT) mirror = models.ForeignKey(Mirror, related_name="urls") - country = models.CharField(max_length=255, blank=True, null=True, + country = NullCharField(max_length=255, null=True, blank=True, db_index=True) has_ipv4 = models.BooleanField("IPv4 capable", default=True, editable=False) -- cgit v1.2.3-54-g00ecf From f465a6afb71573bac0876dd8cc56aaae4d67035c Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 18:16:00 -0500 Subject: Mark several package columns as non-NULL These have been around for a long time now so they don't need to be NULL-able anymore. We can also add a custom field type for our numbers to at least get a check constraint at the Django level. Signed-off-by: Dan McGee --- ...mpressed_size__chg_field_package_installed_s.py | 163 +++++++++++++++++++++ main/models.py | 18 ++- 2 files changed, 177 insertions(+), 4 deletions(-) create mode 100644 main/migrations/0050_auto__chg_field_package_compressed_size__chg_field_package_installed_s.py diff --git a/main/migrations/0050_auto__chg_field_package_compressed_size__chg_field_package_installed_s.py b/main/migrations/0050_auto__chg_field_package_compressed_size__chg_field_package_installed_s.py new file mode 100644 index 00000000..7aa09596 --- /dev/null +++ b/main/migrations/0050_auto__chg_field_package_compressed_size__chg_field_package_installed_s.py @@ -0,0 +1,163 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.alter_column('packages', 'compressed_size', self.gf('main.models.PositiveBigIntegerField')(default=0)) + db.alter_column('packages', 'installed_size', self.gf('main.models.PositiveBigIntegerField')(default=0)) + db.alter_column('packages', 'last_update', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2000, 1, 1))) + + def backwards(self, orm): + db.alter_column('packages', 'compressed_size', self.gf('django.db.models.fields.BigIntegerField')(null=True)) + db.alter_column('packages', 'installed_size', self.gf('django.db.models.fields.BigIntegerField')(null=True)) + db.alter_column('packages', 'last_update', self.gf('django.db.models.fields.DateTimeField')(null=True)) + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 926e7377..57e9650e 100644 --- a/main/models.py +++ b/main/models.py @@ -11,6 +11,17 @@ from itertools import groupby import pytz +class PositiveBigIntegerField(models.BigIntegerField): + _south_introspects = True + + def get_internal_type(self): + return "PositiveBigIntegerField" + + def formfield(self, **kwargs): + defaults = {'min_value': 0} + defaults.update(kwargs) + return super(PositiveBigIntegerField, self).formfield(**defaults) + def validate_pgp_key_length(value): if len(value) not in (8, 16, 40): raise ValidationError( @@ -131,11 +142,10 @@ class Package(models.Model): pkgdesc = models.CharField(max_length=255, null=True) url = models.CharField(max_length=255, null=True) filename = models.CharField(max_length=255) - # TODO: it would be nice to have the >0 check constraint back here - compressed_size = models.BigIntegerField(null=True) - installed_size = models.BigIntegerField(null=True) + compressed_size = PositiveBigIntegerField() + installed_size = PositiveBigIntegerField() build_date = models.DateTimeField(null=True) - last_update = models.DateTimeField(null=True, blank=True) + last_update = models.DateTimeField() files_last_update = models.DateTimeField(null=True, blank=True) packager_str = models.CharField(max_length=255) packager = models.ForeignKey(User, null=True, -- cgit v1.2.3-54-g00ecf From 7b2c16014837c0b134a7b902a0d2cc8db77f92b8 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 18:27:00 -0500 Subject: Add some more developer links This should take care of FS#24022 as well. Signed-off-by: Dan McGee --- templates/base.html | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/templates/base.html b/templates/base.html index 88bb3414..1a217505 100644 --- a/templates/base.html +++ b/templates/base.html @@ -32,17 +32,21 @@
    {% if user.is_authenticated %} -- cgit v1.2.3-54-g00ecf From 037f67bb3c0b63239614ece9884b33c2d265b8ae Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 18:35:26 -0500 Subject: Link to stale relations page Signed-off-by: Dan McGee --- templates/devel/index.html | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/templates/devel/index.html b/templates/devel/index.html index eb73ac2d..2a0058d3 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -187,6 +187,10 @@

    Stats by Repository

    Stats by Maintainer

    + {% if perms.main.change_package %} +

    Look for stale relations

    + {% endif %} +
    Other Contact: {{ prof.other_contact }}
    PGP Key:{% pgp_key_link prof.pgp_key %}
    Roles: {{ prof.roles }}
    Website:{% if prof %}{% if prof.website %} {{ prof.website }}{% endif %}
    -- cgit v1.2.3-54-g00ecf From dc9dedaeb075630fa284036fb2f39de3c69004f9 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 19:10:16 -0500 Subject: Add JSON package details and files views These are retrieved by adding 'json/' to the normal package details or files view. Signed-off-by: Dan McGee --- mirrors/views.py | 5 ++--- packages/urls.py | 3 ++- packages/views.py | 54 ++++++++++++++++++++++++++++++++++++++++++++---------- 3 files changed, 48 insertions(+), 14 deletions(-) diff --git a/mirrors/views.py b/mirrors/views.py index f03a2e8a..6135cee3 100644 --- a/mirrors/views.py +++ b/mirrors/views.py @@ -155,9 +155,8 @@ def default(self, obj): # mainly for queryset serialization return list(obj) if isinstance(obj, MirrorUrl): - data = {} - for attr in self.url_attributes: - data[attr] = getattr(obj, attr) + data = dict((attr, getattr(obj, attr)) + for attr in self.url_attributes) # separate because it isn't on the URL directly data['country'] = obj.real_country return data diff --git a/packages/urls.py b/packages/urls.py index e0362fa2..3c2c87b0 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -2,8 +2,9 @@ package_patterns = patterns('packages.views', (r'^$', 'details'), + (r'^json/$', 'details_json'), (r'^files/$', 'files'), - (r'^maintainer/$', 'getmaintainer'), + (r'^files/json/$', 'files_json'), (r'^flag/$', 'flag'), (r'^flag/done/$', 'flag_confirmed', {}, 'package-flag-confirmed'), (r'^unflag/$', 'unflag'), diff --git a/packages/views.py b/packages/views.py index 23797f71..44a1db96 100644 --- a/packages/views.py +++ b/packages/views.py @@ -5,6 +5,7 @@ from django.contrib.auth.decorators import permission_required from django.conf import settings from django.core.mail import send_mail +from django.core.serializers.json import DjangoJSONEncoder from django.db.models import Q from django.http import HttpResponse, Http404 from django.shortcuts import get_object_or_404, redirect @@ -24,9 +25,30 @@ from main.models import Arch, Repo, Signoff from main.utils import make_choice from mirrors.models import MirrorUrl -from .models import PackageRelation +from .models import PackageRelation, PackageGroup from .utils import get_group_info, get_differences_info, get_wrong_permissions +class PackageJSONEncoder(DjangoJSONEncoder): + pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', + 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size', + 'installed_size', 'build_date', 'last_update', 'flag_date' ] + + def default(self, obj): + if hasattr(obj, '__iter__'): + # mainly for queryset serialization + return list(obj) + if isinstance(obj, Package): + data = dict((attr, getattr(obj, attr)) + for attr in self.pkg_attributes) + data['groups'] = obj.groups.all() + return data + if isinstance(obj, PackageFile): + filename = obj.filename or '' + return obj.directory + filename + if isinstance(obj, (Repo, Arch, PackageGroup)): + return obj.name.lower() + return super(PackageJSONEncoder, self).default(obj) + def opensearch(request): if request.is_secure(): domain = "https://%s" % request.META['HTTP_HOST'] @@ -146,15 +168,6 @@ def group_details(request, arch, name): } return direct_to_template(request, 'packages/packages_list.html', context) -def getmaintainer(request, name, repo, arch): - "Returns the maintainers as plaintext." - - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - names = [m.username for m in pkg.maintainers] - - return HttpResponse(str('\n'.join(names)), mimetype='text/plain') - def coerce_limit_value(value): if not value: return None @@ -287,6 +300,27 @@ def files(request, name, repo, arch): return direct_to_template(request, template, {'pkg':pkg, 'files':fileslist}) +def details_json(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + to_json = simplejson.dumps(pkg, ensure_ascii=False, + cls=PackageJSONEncoder) + return HttpResponse(to_json, mimetype='application/json') + +def files_json(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') + data = { + 'pkgname': pkg.pkgname, + 'repo': pkg.repo.name.lower(), + 'arch': pkg.arch.name.lower(), + 'files': fileslist, + } + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=PackageJSONEncoder) + return HttpResponse(to_json, mimetype='application/json') + @permission_required('main.change_package') def unflag(request, name, repo, arch): pkg = get_object_or_404(Package, -- cgit v1.2.3-54-g00ecf From f252aede11319c694f3a81626b996403eb14444f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 19:18:14 -0500 Subject: Add named devel URLs Signed-off-by: Dan McGee --- devel/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/devel/urls.py b/devel/urls.py index 8759562e..bc3bcace 100644 --- a/devel/urls.py +++ b/devel/urls.py @@ -3,8 +3,8 @@ urlpatterns = patterns('devel.views', (r'^admin_log/$','admin_log'), (r'^admin_log/(?P.*)/$','admin_log'), - (r'^clock/$', 'clock'), - (r'^$', 'index'), + (r'^clock/$', 'clock', {}, 'devel-clocks'), + (r'^$', 'index', {}, 'devel-index'), (r'^newuser/$', 'new_user_form'), (r'^profile/$', 'change_profile'), (r'^reports/(?P.*)/(?P.*)/$', 'report'), -- cgit v1.2.3-54-g00ecf From ef61eacd08b678b55eeaacfd5cc29ada37662e10 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 14 Jun 2011 19:20:23 -0500 Subject: Simplify jQuery CDN tag Signed-off-by: Dan McGee --- main/templatetags/cdn.py | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/main/templatetags/cdn.py b/main/templatetags/cdn.py index 83ee61fb..1923af80 100644 --- a/main/templatetags/cdn.py +++ b/main/templatetags/cdn.py @@ -3,21 +3,16 @@ register = template.Library() -@register.tag -def jquery(parser, token): - return JQueryNode() - -class JQueryNode(template.Node): - def render(self, context): - prefixes = { False: 'http', True: 'https' } - version = '1.4.3' - oncdn = getattr(settings, 'CDN_ENABLED', True) - if oncdn: - jquery = 'https://ajax.googleapis.com/ajax/libs/jquery/' \ - '%s/jquery.min.js' % version - else: - jquery = '/media/jquery-%s.min.js' % version - return '' % jquery +@register.simple_tag +def jquery(): + version = '1.4.3' + oncdn = getattr(settings, 'CDN_ENABLED', True) + if oncdn: + link = 'https://ajax.googleapis.com/ajax/libs/jquery/' \ + '%s/jquery.min.js' % version + else: + link = '/media/jquery-%s.min.js' % version + return '' % link @register.tag def cdnprefix(parser, token): -- cgit v1.2.3-54-g00ecf From 199cd423d19d1ba42ff4550ed252ee3f8f254e14 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:01:59 -0500 Subject: Update to jQuery 1.4.4 Signed-off-by: Dan McGee --- main/templatetags/cdn.py | 2 +- media/jquery-1.4.3.min.js | 166 --------------------------------------------- media/jquery-1.4.4.min.js | 167 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 168 insertions(+), 167 deletions(-) delete mode 100644 media/jquery-1.4.3.min.js create mode 100644 media/jquery-1.4.4.min.js diff --git a/main/templatetags/cdn.py b/main/templatetags/cdn.py index 1923af80..c25040c0 100644 --- a/main/templatetags/cdn.py +++ b/main/templatetags/cdn.py @@ -5,7 +5,7 @@ @register.simple_tag def jquery(): - version = '1.4.3' + version = '1.4.4' oncdn = getattr(settings, 'CDN_ENABLED', True) if oncdn: link = 'https://ajax.googleapis.com/ajax/libs/jquery/' \ diff --git a/media/jquery-1.4.3.min.js b/media/jquery-1.4.3.min.js deleted file mode 100644 index c941a5f7..00000000 --- a/media/jquery-1.4.3.min.js +++ /dev/null @@ -1,166 +0,0 @@ -/*! - * jQuery JavaScript Library v1.4.3 - * http://jquery.com/ - * - * Copyright 2010, John Resig - * Dual licensed under the MIT or GPL Version 2 licenses. - * http://jquery.org/license - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * Copyright 2010, The Dojo Foundation - * Released under the MIT, BSD, and GPL Licenses. - * - * Date: Thu Oct 14 23:10:06 2010 -0400 - */ -(function(E,A){function U(){return false}function ba(){return true}function ja(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ga(a){var b,d,e=[],f=[],h,k,l,n,s,v,B,D;k=c.data(this,this.nodeType?"events":"__events__");if(typeof k==="function")k=k.events;if(!(a.liveFired===this||!k||!k.live||a.button&&a.type==="click")){if(a.namespace)D=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var H=k.live.slice(0);for(n=0;nd)break;a.currentTarget=f.elem;a.data=f.handleObj.data; -a.handleObj=f.handleObj;D=f.handleObj.origHandler.apply(f.elem,arguments);if(D===false||a.isPropagationStopped()){d=f.level;if(D===false)b=false}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(Ha,"`").replace(Ia,"&")}function ka(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Ja.test(b))return c.filter(b, -e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function la(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this,e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var k in e[h])c.event.add(this,h,e[h][k],e[h][k].data)}}})}function Ka(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)} -function ma(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?La:Ma,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a,"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function ca(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Na.test(a)?e(a,h):ca(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)? -e(a,""):c.each(b,function(f,h){ca(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(na.concat.apply([],na.slice(0,b)),function(){d[this]=a});return d}function oa(a){if(!da[a]){var b=c("<"+a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";da[a]=d}return da[a]}function ea(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var u=E.document,c=function(){function a(){if(!b.isReady){try{u.documentElement.doScroll("left")}catch(i){setTimeout(a, -1);return}b.ready()}}var b=function(i,r){return new b.fn.init(i,r)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,k=/\S/,l=/^\s+/,n=/\s+$/,s=/\W/,v=/\d/,B=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,D=/^[\],:{}\s]*$/,H=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,G=/(?:^|:|,)(?:\s*\[)+/g,M=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,j=/(msie) ([\w.]+)/,o=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false, -q=[],t,x=Object.prototype.toString,C=Object.prototype.hasOwnProperty,P=Array.prototype.push,N=Array.prototype.slice,R=String.prototype.trim,Q=Array.prototype.indexOf,L={};b.fn=b.prototype={init:function(i,r){var y,z,F;if(!i)return this;if(i.nodeType){this.context=this[0]=i;this.length=1;return this}if(i==="body"&&!r&&u.body){this.context=u;this[0]=u.body;this.selector="body";this.length=1;return this}if(typeof i==="string")if((y=h.exec(i))&&(y[1]||!r))if(y[1]){F=r?r.ownerDocument||r:u;if(z=B.exec(i))if(b.isPlainObject(r)){i= -[u.createElement(z[1])];b.fn.attr.call(i,r,true)}else i=[F.createElement(z[1])];else{z=b.buildFragment([y[1]],[F]);i=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this,i)}else{if((z=u.getElementById(y[2]))&&z.parentNode){if(z.id!==y[2])return f.find(i);this.length=1;this[0]=z}this.context=u;this.selector=i;return this}else if(!r&&!s.test(i)){this.selector=i;this.context=u;i=u.getElementsByTagName(i);return b.merge(this,i)}else return!r||r.jquery?(r||f).find(i):b(r).find(i); -else if(b.isFunction(i))return f.ready(i);if(i.selector!==A){this.selector=i.selector;this.context=i.context}return b.makeArray(i,this)},selector:"",jquery:"1.4.3",length:0,size:function(){return this.length},toArray:function(){return N.call(this,0)},get:function(i){return i==null?this.toArray():i<0?this.slice(i)[0]:this[i]},pushStack:function(i,r,y){var z=b();b.isArray(i)?P.apply(z,i):b.merge(z,i);z.prevObject=this;z.context=this.context;if(r==="find")z.selector=this.selector+(this.selector?" ": -"")+y;else if(r)z.selector=this.selector+"."+r+"("+y+")";return z},each:function(i,r){return b.each(this,i,r)},ready:function(i){b.bindReady();if(b.isReady)i.call(u,b);else q&&q.push(i);return this},eq:function(i){return i===-1?this.slice(i):this.slice(i,+i+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(i){return this.pushStack(b.map(this,function(r,y){return i.call(r, -y,r)}))},end:function(){return this.prevObject||b(null)},push:P,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var i=arguments[0]||{},r=1,y=arguments.length,z=false,F,I,K,J,fa;if(typeof i==="boolean"){z=i;i=arguments[1]||{};r=2}if(typeof i!=="object"&&!b.isFunction(i))i={};if(y===r){i=this;--r}for(;r0)){if(q){for(var r=0;i=q[r++];)i.call(u,b);q=null}b.fn.triggerHandler&&b(u).triggerHandler("ready")}}},bindReady:function(){if(!p){p=true;if(u.readyState==="complete")return setTimeout(b.ready, -1);if(u.addEventListener){u.addEventListener("DOMContentLoaded",t,false);E.addEventListener("load",b.ready,false)}else if(u.attachEvent){u.attachEvent("onreadystatechange",t);E.attachEvent("onload",b.ready);var i=false;try{i=E.frameElement==null}catch(r){}u.documentElement.doScroll&&i&&a()}}},isFunction:function(i){return b.type(i)==="function"},isArray:Array.isArray||function(i){return b.type(i)==="array"},isWindow:function(i){return i&&typeof i==="object"&&"setInterval"in i},isNaN:function(i){return i== -null||!v.test(i)||isNaN(i)},type:function(i){return i==null?String(i):L[x.call(i)]||"object"},isPlainObject:function(i){if(!i||b.type(i)!=="object"||i.nodeType||b.isWindow(i))return false;if(i.constructor&&!C.call(i,"constructor")&&!C.call(i.constructor.prototype,"isPrototypeOf"))return false;for(var r in i);return r===A||C.call(i,r)},isEmptyObject:function(i){for(var r in i)return false;return true},error:function(i){throw i;},parseJSON:function(i){if(typeof i!=="string"||!i)return null;i=b.trim(i); -if(D.test(i.replace(H,"@").replace(w,"]").replace(G,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(i):(new Function("return "+i))();else b.error("Invalid JSON: "+i)},noop:function(){},globalEval:function(i){if(i&&k.test(i)){var r=u.getElementsByTagName("head")[0]||u.documentElement,y=u.createElement("script");y.type="text/javascript";if(b.support.scriptEval)y.appendChild(u.createTextNode(i));else y.text=i;r.insertBefore(y,r.firstChild);r.removeChild(y)}},nodeName:function(i,r){return i.nodeName&&i.nodeName.toUpperCase()=== -r.toUpperCase()},each:function(i,r,y){var z,F=0,I=i.length,K=I===A||b.isFunction(i);if(y)if(K)for(z in i){if(r.apply(i[z],y)===false)break}else for(;F";a=u.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var s=u.createElement("div"); -s.style.width=s.style.paddingLeft="1px";u.body.appendChild(s);c.boxModel=c.support.boxModel=s.offsetWidth===2;if("zoom"in s.style){s.style.display="inline";s.style.zoom=1;c.support.inlineBlockNeedsLayout=s.offsetWidth===2;s.style.display="";s.innerHTML="
    ";c.support.shrinkWrapBlocks=s.offsetWidth!==2}s.innerHTML="
    t
    ";var v=s.getElementsByTagName("td");c.support.reliableHiddenOffsets=v[0].offsetHeight=== -0;v[0].style.display="";v[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&v[0].offsetHeight===0;s.innerHTML="";u.body.removeChild(s).style.display="none"});a=function(s){var v=u.createElement("div");s="on"+s;var B=s in v;if(!B){v.setAttribute(s,"return;");B=typeof v[s]==="function"}return B};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength", -cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var pa={},Oa=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?pa:a;var e=a.nodeType,f=e?a[c.expando]:null,h=c.cache;if(!(e&&!f&&typeof b==="string"&&d===A)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]= -c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==A)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?pa:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando);else if(d)delete f[e];else for(var k in a)delete a[k]}},acceptData:function(a){if(a.nodeName){var b= -c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){if(typeof a==="undefined")return this.length?c.data(this[0]):null;else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===A){var e=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(e===A&&this.length){e=c.data(this[0],a);if(e===A&&this[0].nodeType===1){e=this[0].getAttribute("data-"+a);if(typeof e=== -"string")try{e=e==="true"?true:e==="false"?false:e==="null"?null:!c.isNaN(e)?parseFloat(e):Oa.test(e)?c.parseJSON(e):e}catch(f){}else e=A}}return e===A&&d[1]?this.data(d[0]):e}else return this.each(function(){var h=c(this),k=[d[0],b];h.triggerHandler("setData"+d[1]+"!",k);c.data(this,a,b);h.triggerHandler("changeData"+d[1]+"!",k)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var e=c.data(a,b);if(!d)return e|| -[];if(!e||c.isArray(d))e=c.data(a,b,c.makeArray(d));else e.push(d);return e}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),e=d.shift();if(e==="inprogress")e=d.shift();if(e){b==="fx"&&d.unshift("inprogress");e.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===A)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this, -a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var qa=/[\n\t]/g,ga=/\s+/,Pa=/\r/g,Qa=/^(?:href|src|style)$/,Ra=/^(?:button|input)$/i,Sa=/^(?:button|input|object|select|textarea)$/i,Ta=/^a(?:rea)?$/i,ra=/^(?:radio|checkbox)$/i;c.fn.extend({attr:function(a,b){return c.access(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this, -a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(s){var v=c(this);v.addClass(a.call(this,s,v.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ga),d=0,e=this.length;d-1)return true;return false}, -val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one";if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h=0;else if(c.nodeName(this,"select")){var B=c.makeArray(v);c("option",this).each(function(){this.selected= -c.inArray(c(this).val(),B)>=0});if(!B.length)this.selectedIndex=-1}else this.value=v}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return A;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==A;b=e&&c.props[b]||b;if(a.nodeType===1){var h=Qa.test(b);if((b in a||a[b]!==A)&&e&&!h){if(f){b==="type"&&Ra.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); -if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:Sa.test(a.nodeName)||Ta.test(a.nodeName)&&a.href?0:A;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return A;a=!c.support.hrefNormalized&&e&& -h?a.getAttribute(b,2):a.getAttribute(b);return a===null?A:a}}});var X=/\.(.*)$/,ha=/^(?:textarea|input|select)$/i,Ha=/\./g,Ia=/ /g,Ua=/[^\w\s.|`]/g,Va=function(a){return a.replace(Ua,"\\$&")},sa={focusin:0,focusout:0};c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var k=a.nodeType?"events":"__events__",l=h[k],n=h.handle;if(typeof l=== -"function"){n=l.handle;l=l.events}else if(!l){a.nodeType||(h[k]=h=function(){});h.events=l={}}if(!n)h.handle=n=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(n.elem,arguments):A};n.elem=a;b=b.split(" ");for(var s=0,v;k=b[s++];){h=f?c.extend({},f):{handler:d,data:e};if(k.indexOf(".")>-1){v=k.split(".");k=v.shift();h.namespace=v.slice(0).sort().join(".")}else{v=[];h.namespace=""}h.type=k;if(!h.guid)h.guid=d.guid;var B=l[k],D=c.event.special[k]||{};if(!B){B=l[k]=[]; -if(!D.setup||D.setup.call(a,e,v,n)===false)if(a.addEventListener)a.addEventListener(k,n,false);else a.attachEvent&&a.attachEvent("on"+k,n)}if(D.add){D.add.call(a,h);if(!h.handler.guid)h.handler.guid=d.guid}B.push(h);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,k=0,l,n,s,v,B,D,H=a.nodeType?"events":"__events__",w=c.data(a),G=w&&w[H];if(w&&G){if(typeof G==="function"){w=G;G=G.events}if(b&&b.type){d=b.handler;b=b.type}if(!b|| -typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in G)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[k++];){v=f;l=f.indexOf(".")<0;n=[];if(!l){n=f.split(".");f=n.shift();s=RegExp("(^|\\.)"+c.map(n.slice(0).sort(),Va).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(B=G[f])if(d){v=c.event.special[f]||{};for(h=e||0;h=0){a.type= -f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return A;a.result=A;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)=== -false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){e=a.target;var k,l=f.replace(X,""),n=c.nodeName(e,"a")&&l==="click",s=c.event.special[l]||{};if((!s._default||s._default.call(d,a)===false)&&!n&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[l]){if(k=e["on"+l])e["on"+l]=null;c.event.triggered=true;e[l]()}}catch(v){}if(k)e["on"+l]=k;c.event.triggered=false}}},handle:function(a){var b,d,e; -d=[];var f,h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var k=d.length;f-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ha.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=va(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===A||f===e))if(e!=null||f){a.type="change";a.liveFired= -A;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",va(a))}},setup:function(){if(this.type=== -"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ha.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ha.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}u.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){sa[b]++===0&&u.addEventListener(a,d,true)},teardown:function(){--sa[b]=== -0&&u.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=A}var k=b==="one"?c.proxy(f,function(n){c(this).unbind(n,k);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var l=this.length;h0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}}); -(function(){function a(g,j,o,m,p,q){p=0;for(var t=m.length;p0){C=x;break}}x=x[g]}m[p]=C}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,k=true;[0,0].sort(function(){k=false;return 0});var l=function(g,j,o,m){o=o||[];var p=j=j||u;if(j.nodeType!==1&&j.nodeType!==9)return[];if(!g||typeof g!=="string")return o;var q=[],t,x,C,P,N=true,R=l.isXML(j),Q=g,L;do{d.exec("");if(t=d.exec(Q)){Q=t[3];q.push(t[1]);if(t[2]){P=t[3]; -break}}}while(t);if(q.length>1&&s.exec(g))if(q.length===2&&n.relative[q[0]])x=M(q[0]+q[1],j);else for(x=n.relative[q[0]]?[j]:l(q.shift(),j);q.length;){g=q.shift();if(n.relative[g])g+=q.shift();x=M(g,x)}else{if(!m&&q.length>1&&j.nodeType===9&&!R&&n.match.ID.test(q[0])&&!n.match.ID.test(q[q.length-1])){t=l.find(q.shift(),j,R);j=t.expr?l.filter(t.expr,t.set)[0]:t.set[0]}if(j){t=m?{expr:q.pop(),set:D(m)}:l.find(q.pop(),q.length===1&&(q[0]==="~"||q[0]==="+")&&j.parentNode?j.parentNode:j,R);x=t.expr?l.filter(t.expr, -t.set):t.set;if(q.length>0)C=D(x);else N=false;for(;q.length;){t=L=q.pop();if(n.relative[L])t=q.pop();else L="";if(t==null)t=j;n.relative[L](C,t,R)}}else C=[]}C||(C=x);C||l.error(L||g);if(f.call(C)==="[object Array]")if(N)if(j&&j.nodeType===1)for(g=0;C[g]!=null;g++){if(C[g]&&(C[g]===true||C[g].nodeType===1&&l.contains(j,C[g])))o.push(x[g])}else for(g=0;C[g]!=null;g++)C[g]&&C[g].nodeType===1&&o.push(x[g]);else o.push.apply(o,C);else D(C,o);if(P){l(P,p,o,m);l.uniqueSort(o)}return o};l.uniqueSort=function(g){if(w){h= -k;g.sort(w);if(h)for(var j=1;j0};l.find=function(g,j,o){var m;if(!g)return[];for(var p=0,q=n.order.length;p":function(g,j){var o=typeof j==="string",m,p=0,q=g.length;if(o&&!/\W/.test(j))for(j=j.toLowerCase();p=0))o||m.push(t);else if(o)j[q]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},CHILD:function(g){if(g[1]==="nth"){var j=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=j[1]+(j[2]||1)-0;g[3]=j[3]-0}g[0]=e++;return g},ATTR:function(g,j,o, -m,p,q){j=g[1].replace(/\\/g,"");if(!q&&n.attrMap[j])g[1]=n.attrMap[j];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,j,o,m,p){if(g[1]==="not")if((d.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=l(g[3],null,null,j);else{g=l.filter(g[3],j,o,true^p);o||m.push.apply(m,g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled=== -true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,j,o){return!!l(o[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"=== -g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},setFilters:{first:function(g,j){return j===0},last:function(g,j,o,m){return j===m.length-1},even:function(g,j){return j%2===0},odd:function(g,j){return j%2===1},lt:function(g,j,o){return jo[3]-0},nth:function(g,j,o){return o[3]- -0===j},eq:function(g,j,o){return o[3]-0===j}},filter:{PSEUDO:function(g,j,o,m){var p=j[1],q=n.filters[p];if(q)return q(g,o,j,m);else if(p==="contains")return(g.textContent||g.innerText||l.getText([g])||"").indexOf(j[3])>=0;else if(p==="not"){j=j[3];o=0;for(m=j.length;o=0}},ID:function(g,j){return g.nodeType===1&&g.getAttribute("id")===j},TAG:function(g,j){return j==="*"&&g.nodeType===1||g.nodeName.toLowerCase()=== -j},CLASS:function(g,j){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(j)>-1},ATTR:function(g,j){var o=j[1];o=n.attrHandle[o]?n.attrHandle[o](g):g[o]!=null?g[o]:g.getAttribute(o);var m=o+"",p=j[2],q=j[4];return o==null?p==="!=":p==="="?m===q:p==="*="?m.indexOf(q)>=0:p==="~="?(" "+m+" ").indexOf(q)>=0:!q?m&&o!==false:p==="!="?m!==q:p==="^="?m.indexOf(q)===0:p==="$="?m.substr(m.length-q.length)===q:p==="|="?m===q||m.substr(0,q.length+1)===q+"-":false},POS:function(g,j,o,m){var p=n.setFilters[j[2]]; -if(p)return p(g,o,j,m)}}},s=n.match.POS,v=function(g,j){return"\\"+(j-0+1)},B;for(B in n.match){n.match[B]=RegExp(n.match[B].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[B]=RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[B].source.replace(/\\(\d+)/g,v))}var D=function(g,j){g=Array.prototype.slice.call(g,0);if(j){j.push.apply(j,g);return j}return g};try{Array.prototype.slice.call(u.documentElement.childNodes,0)}catch(H){D=function(g,j){var o=j||[],m=0;if(f.call(g)==="[object Array]")Array.prototype.push.apply(o, -g);else if(typeof g.length==="number")for(var p=g.length;m";var o=u.documentElement;o.insertBefore(g,o.firstChild);if(u.getElementById(j)){n.find.ID=function(m,p,q){if(typeof p.getElementById!=="undefined"&&!q)return(p=p.getElementById(m[1]))?p.id===m[1]||typeof p.getAttributeNode!=="undefined"&&p.getAttributeNode("id").nodeValue===m[1]?[p]:A:[]};n.filter.ID=function(m,p){var q=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&q&&q.nodeValue===p}}o.removeChild(g); -o=g=null})();(function(){var g=u.createElement("div");g.appendChild(u.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(j,o){var m=o.getElementsByTagName(j[1]);if(j[1]==="*"){for(var p=[],q=0;m[q];q++)m[q].nodeType===1&&p.push(m[q]);m=p}return m};g.innerHTML="";if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(j){return j.getAttribute("href",2)};g=null})();u.querySelectorAll&& -function(){var g=l,j=u.createElement("div");j.innerHTML="

    ";if(!(j.querySelectorAll&&j.querySelectorAll(".TEST").length===0)){l=function(m,p,q,t){p=p||u;if(!t&&!l.isXML(p))if(p.nodeType===9)try{return D(p.querySelectorAll(m),q)}catch(x){}else if(p.nodeType===1&&p.nodeName.toLowerCase()!=="object"){var C=p.id,P=p.id="__sizzle__";try{return D(p.querySelectorAll("#"+P+" "+m),q)}catch(N){}finally{if(C)p.id=C;else p.removeAttribute("id")}}return g(m,p,q,t)};for(var o in g)l[o]=g[o]; -j=null}}();(function(){var g=u.documentElement,j=g.matchesSelector||g.mozMatchesSelector||g.webkitMatchesSelector||g.msMatchesSelector,o=false;try{j.call(u.documentElement,":sizzle")}catch(m){o=true}if(j)l.matchesSelector=function(p,q){try{if(o||!n.match.PSEUDO.test(q))return j.call(p,q)}catch(t){}return l(q,null,null,[p]).length>0}})();(function(){var g=u.createElement("div");g.innerHTML="
    ";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length=== -0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(j,o,m){if(typeof o.getElementsByClassName!=="undefined"&&!m)return o.getElementsByClassName(j[1])};g=null}}})();l.contains=u.documentElement.contains?function(g,j){return g!==j&&(g.contains?g.contains(j):true)}:function(g,j){return!!(g.compareDocumentPosition(j)&16)};l.isXML=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false};var M=function(g, -j){for(var o=[],m="",p,q=j.nodeType?[j]:j;p=n.match.PSEUDO.exec(g);){m+=p[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;p=0;for(var t=q.length;p0)for(var h=d;h0},closest:function(a, -b){var d=[],e,f,h=this[0];if(c.isArray(a)){var k={},l,n=1;if(h&&a.length){e=0;for(f=a.length;e-1:c(h).is(e))d.push({selector:l,elem:h,level:n})}h=h.parentNode;n++}}return d}k=$a.test(a)?c(a,b||this.context):null;e=0;for(f=this.length;e-1:c.find.matchesSelector(h,a)){d.push(h);break}else{h=h.parentNode;if(!h|| -!h.ownerDocument||h===b)break}d=d.length>1?c.unique(d):d;return this.pushStack(d,"closest",a)},index:function(a){if(!a||typeof a==="string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var d=typeof a==="string"?c(a,b||this.context):c.makeArray(a),e=c.merge(this.get(),d);return this.pushStack(!d[0]||!d[0].parentNode||d[0].parentNode.nodeType===11||!e[0]||!e[0].parentNode||e[0].parentNode.nodeType===11?e:c.unique(e))},andSelf:function(){return this.add(this.prevObject)}}); -c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling", -d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,e){var f=c.map(this,b,d);Wa.test(a)||(e=d);if(e&&typeof e==="string")f=c.filter(e,f);f=this.length>1?c.unique(f):f;if((this.length>1||Ya.test(e))&&Xa.test(a))f=f.reverse();return this.pushStack(f,a,Za.call(arguments).join(","))}}); -c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return b.length===1?c.find.matchesSelector(b[0],a)?[b[0]]:[]:c.find.matches(a,b)},dir:function(a,b,d){var e=[];for(a=a[b];a&&a.nodeType!==9&&(d===A||a.nodeType!==1||!c(a).is(d));){a.nodeType===1&&e.push(a);a=a[b]}return e},nth:function(a,b,d){b=b||1;for(var e=0;a;a=a[d])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var xa=/ jQuery\d+="(?:\d+|null)"/g, -$=/^\s+/,ya=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,za=/<([\w:]+)/,ab=/\s]+\/)>/g,O={option:[1,""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "], -area:[1,"",""],_default:[0,"",""]};O.optgroup=O.option;O.tbody=O.tfoot=O.colgroup=O.caption=O.thead;O.th=O.td;if(!c.support.htmlSerialize)O._default=[1,"div
    ","
    "];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==A)return this.empty().append((this[0]&&this[0].ownerDocument||u).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this, -d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})}, -unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a= -c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,e;(e=this[d])!=null;d++)if(!a||c.filter(a,[e]).length){if(!b&&e.nodeType===1){c.cleanData(e.getElementsByTagName("*")); -c.cleanData([e])}e.parentNode&&e.parentNode.removeChild(e)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,e=this.ownerDocument;if(!d){d=e.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(xa,"").replace(cb,'="$1">').replace($, -"")],e)[0]}else return this.cloneNode(true)});if(a===true){la(this,b);la(this.find("*"),b.find("*"))}return b},html:function(a){if(a===A)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(xa,""):null;else if(typeof a==="string"&&!Aa.test(a)&&(c.support.leadingWhitespace||!$.test(a))&&!O[(za.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ya,"<$1>");try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?l.cloneNode(true):l)}k.length&&c.each(k,Ka)}return this}});c.buildFragment=function(a,b,d){var e,f,h;b=b&&b[0]?b[0].ownerDocument||b[0]:u;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===u&&!Aa.test(a[0])&&(c.support.checkClone|| -!Ba.test(a[0]))){f=true;if(h=c.fragments[a[0]])if(h!==1)e=h}if(!e){e=b.createDocumentFragment();c.clean(a,b,e,d)}if(f)c.fragments[a[0]]=h?e:1;return{fragment:e,cacheable:f}};c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var e=[];d=c(d);var f=this.length===1&&this[0].parentNode;if(f&&f.nodeType===11&&f.childNodes.length===1&&d.length===1){d[b](this[0]);return this}else{f=0;for(var h= -d.length;f0?this.clone(true):this).get();c(d[f])[b](k);e=e.concat(k)}return this.pushStack(e,a,d.selector)}}});c.extend({clean:function(a,b,d,e){b=b||u;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||u;for(var f=[],h=0,k;(k=a[h])!=null;h++){if(typeof k==="number")k+="";if(k){if(typeof k==="string"&&!bb.test(k))k=b.createTextNode(k);else if(typeof k==="string"){k=k.replace(ya,"<$1>");var l=(za.exec(k)||["",""])[1].toLowerCase(),n=O[l]||O._default, -s=n[0],v=b.createElement("div");for(v.innerHTML=n[1]+k+n[2];s--;)v=v.lastChild;if(!c.support.tbody){s=ab.test(k);l=l==="table"&&!s?v.firstChild&&v.firstChild.childNodes:n[1]===""&&!s?v.childNodes:[];for(n=l.length-1;n>=0;--n)c.nodeName(l[n],"tbody")&&!l[n].childNodes.length&&l[n].parentNode.removeChild(l[n])}!c.support.leadingWhitespace&&$.test(k)&&v.insertBefore(b.createTextNode($.exec(k)[0]),v.firstChild);k=v.childNodes}if(k.nodeType)f.push(k);else f=c.merge(f,k)}}if(d)for(h=0;f[h];h++)if(e&& -c.nodeName(f[h],"script")&&(!f[h].type||f[h].type.toLowerCase()==="text/javascript"))e.push(f[h].parentNode?f[h].parentNode.removeChild(f[h]):f[h]);else{f[h].nodeType===1&&f.splice.apply(f,[h+1,0].concat(c.makeArray(f[h].getElementsByTagName("script"))));d.appendChild(f[h])}return f},cleanData:function(a){for(var b,d,e=c.cache,f=c.event.special,h=c.support.deleteExpando,k=0,l;(l=a[k])!=null;k++)if(!(l.nodeName&&c.noData[l.nodeName.toLowerCase()]))if(d=l[c.expando]){if((b=e[d])&&b.events)for(var n in b.events)f[n]? -c.event.remove(l,n):c.removeEvent(l,n,b.handle);if(h)delete l[c.expando];else l.removeAttribute&&l.removeAttribute(c.expando);delete e[d]}}});var Ca=/alpha\([^)]*\)/i,db=/opacity=([^)]*)/,eb=/-([a-z])/ig,fb=/([A-Z])/g,Da=/^-?\d+(?:px)?$/i,gb=/^-?\d/,hb={position:"absolute",visibility:"hidden",display:"block"},La=["Left","Right"],Ma=["Top","Bottom"],W,ib=u.defaultView&&u.defaultView.getComputedStyle,jb=function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){if(arguments.length===2&&b===A)return this; -return c.access(this,a,b,true,function(d,e,f){return f!==A?c.style(d,e,f):c.css(d,e)})};c.extend({cssHooks:{opacity:{get:function(a,b){if(b){var d=W(a,"opacity","opacity");return d===""?"1":d}else return a.style.opacity}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true,zoom:true,lineHeight:true},cssProps:{"float":c.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,d,e){if(!(!a||a.nodeType===3||a.nodeType===8||!a.style)){var f,h=c.camelCase(b),k=a.style,l=c.cssHooks[h];b=c.cssProps[h]|| -h;if(d!==A){if(!(typeof d==="number"&&isNaN(d)||d==null)){if(typeof d==="number"&&!c.cssNumber[h])d+="px";if(!l||!("set"in l)||(d=l.set(a,d))!==A)try{k[b]=d}catch(n){}}}else{if(l&&"get"in l&&(f=l.get(a,false,e))!==A)return f;return k[b]}}},css:function(a,b,d){var e,f=c.camelCase(b),h=c.cssHooks[f];b=c.cssProps[f]||f;if(h&&"get"in h&&(e=h.get(a,true,d))!==A)return e;else if(W)return W(a,b,f)},swap:function(a,b,d){var e={},f;for(f in b){e[f]=a.style[f];a.style[f]=b[f]}d.call(a);for(f in b)a.style[f]= -e[f]},camelCase:function(a){return a.replace(eb,jb)}});c.curCSS=c.css;c.each(["height","width"],function(a,b){c.cssHooks[b]={get:function(d,e,f){var h;if(e){if(d.offsetWidth!==0)h=ma(d,b,f);else c.swap(d,hb,function(){h=ma(d,b,f)});return h+"px"}},set:function(d,e){if(Da.test(e)){e=parseFloat(e);if(e>=0)return e+"px"}else return e}}});if(!c.support.opacity)c.cssHooks.opacity={get:function(a,b){return db.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"": -b?"1":""},set:function(a,b){var d=a.style;d.zoom=1;var e=c.isNaN(b)?"":"alpha(opacity="+b*100+")",f=d.filter||"";d.filter=Ca.test(f)?f.replace(Ca,e):d.filter+" "+e}};if(ib)W=function(a,b,d){var e;d=d.replace(fb,"-$1").toLowerCase();if(!(b=a.ownerDocument.defaultView))return A;if(b=b.getComputedStyle(a,null)){e=b.getPropertyValue(d);if(e===""&&!c.contains(a.ownerDocument.documentElement,a))e=c.style(a,d)}return e};else if(u.documentElement.currentStyle)W=function(a,b){var d,e,f=a.currentStyle&&a.currentStyle[b], -h=a.style;if(!Da.test(f)&&gb.test(f)){d=h.left;e=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;h.left=b==="fontSize"?"1em":f||0;f=h.pixelLeft+"px";h.left=d;a.runtimeStyle.left=e}return f};if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=a.offsetHeight;return a.offsetWidth===0&&b===0||!c.support.reliableHiddenOffsets&&(a.style.display||c.css(a,"display"))==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var kb=c.now(),lb=/)<[^<]*)*<\/script>/gi, -mb=/^(?:select|textarea)/i,nb=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ob=/^(?:GET|HEAD|DELETE)$/,Na=/\[\]$/,T=/\=\?(&|$)/,ia=/\?/,pb=/([?&])_=[^&]*/,qb=/^(\w+:)?\/\/([^\/?#]+)/,rb=/%20/g,sb=/#.*$/,Ea=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!=="string"&&Ea)return Ea.apply(this,arguments);else if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var f=a.slice(e,a.length);a=a.slice(0,e)}e="GET";if(b)if(c.isFunction(b)){d= -b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);e="POST"}var h=this;c.ajax({url:a,type:e,dataType:"html",data:b,complete:function(k,l){if(l==="success"||l==="notmodified")h.html(f?c("
    ").append(k.responseText.replace(lb,"")).find(f):k.responseText);d&&h.each(d,[k.responseText,l,k])}});return this},serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&& -!this.disabled&&(this.checked||mb.test(this.nodeName)||nb.test(this.type))}).map(function(a,b){var d=c(this).val();return d==null?null:c.isArray(d)?c.map(d,function(e){return{name:b.name,value:e}}):{name:b.name,value:d}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:e})}, -getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:e})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return new E.XMLHttpRequest},accepts:{xml:"application/xml, text/xml",html:"text/html", -script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},ajax:function(a){var b=c.extend(true,{},c.ajaxSettings,a),d,e,f,h=b.type.toUpperCase(),k=ob.test(h);b.url=b.url.replace(sb,"");b.context=a&&a.context!=null?a.context:b;if(b.data&&b.processData&&typeof b.data!=="string")b.data=c.param(b.data,b.traditional);if(b.dataType==="jsonp"){if(h==="GET")T.test(b.url)||(b.url+=(ia.test(b.url)?"&":"?")+(b.jsonp||"callback")+"=?");else if(!b.data|| -!T.test(b.data))b.data=(b.data?b.data+"&":"")+(b.jsonp||"callback")+"=?";b.dataType="json"}if(b.dataType==="json"&&(b.data&&T.test(b.data)||T.test(b.url))){d=b.jsonpCallback||"jsonp"+kb++;if(b.data)b.data=(b.data+"").replace(T,"="+d+"$1");b.url=b.url.replace(T,"="+d+"$1");b.dataType="script";var l=E[d];E[d]=function(m){f=m;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);if(c.isFunction(l))l(m);else{E[d]=A;try{delete E[d]}catch(p){}}v&&v.removeChild(B)}}if(b.dataType==="script"&&b.cache===null)b.cache= -false;if(b.cache===false&&h==="GET"){var n=c.now(),s=b.url.replace(pb,"$1_="+n);b.url=s+(s===b.url?(ia.test(b.url)?"&":"?")+"_="+n:"")}if(b.data&&h==="GET")b.url+=(ia.test(b.url)?"&":"?")+b.data;b.global&&c.active++===0&&c.event.trigger("ajaxStart");n=(n=qb.exec(b.url))&&(n[1]&&n[1]!==location.protocol||n[2]!==location.host);if(b.dataType==="script"&&h==="GET"&&n){var v=u.getElementsByTagName("head")[0]||u.documentElement,B=u.createElement("script");if(b.scriptCharset)B.charset=b.scriptCharset;B.src= -b.url;if(!d){var D=false;B.onload=B.onreadystatechange=function(){if(!D&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){D=true;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);B.onload=B.onreadystatechange=null;v&&B.parentNode&&v.removeChild(B)}}}v.insertBefore(B,v.firstChild);return A}var H=false,w=b.xhr();if(w){b.username?w.open(h,b.url,b.async,b.username,b.password):w.open(h,b.url,b.async);try{if(b.data!=null&&!k||a&&a.contentType)w.setRequestHeader("Content-Type", -b.contentType);if(b.ifModified){c.lastModified[b.url]&&w.setRequestHeader("If-Modified-Since",c.lastModified[b.url]);c.etag[b.url]&&w.setRequestHeader("If-None-Match",c.etag[b.url])}n||w.setRequestHeader("X-Requested-With","XMLHttpRequest");w.setRequestHeader("Accept",b.dataType&&b.accepts[b.dataType]?b.accepts[b.dataType]+", */*; q=0.01":b.accepts._default)}catch(G){}if(b.beforeSend&&b.beforeSend.call(b.context,w,b)===false){b.global&&c.active--===1&&c.event.trigger("ajaxStop");w.abort();return false}b.global&& -c.triggerGlobal(b,"ajaxSend",[w,b]);var M=w.onreadystatechange=function(m){if(!w||w.readyState===0||m==="abort"){H||c.handleComplete(b,w,e,f);H=true;if(w)w.onreadystatechange=c.noop}else if(!H&&w&&(w.readyState===4||m==="timeout")){H=true;w.onreadystatechange=c.noop;e=m==="timeout"?"timeout":!c.httpSuccess(w)?"error":b.ifModified&&c.httpNotModified(w,b.url)?"notmodified":"success";var p;if(e==="success")try{f=c.httpData(w,b.dataType,b)}catch(q){e="parsererror";p=q}if(e==="success"||e==="notmodified")d|| -c.handleSuccess(b,w,e,f);else c.handleError(b,w,e,p);d||c.handleComplete(b,w,e,f);m==="timeout"&&w.abort();if(b.async)w=null}};try{var g=w.abort;w.abort=function(){w&&g.call&&g.call(w);M("abort")}}catch(j){}b.async&&b.timeout>0&&setTimeout(function(){w&&!H&&M("timeout")},b.timeout);try{w.send(k||b.data==null?null:b.data)}catch(o){c.handleError(b,w,null,o);c.handleComplete(b,w,e,f)}b.async||M();return w}},param:function(a,b){var d=[],e=function(h,k){k=c.isFunction(k)?k():k;d[d.length]=encodeURIComponent(h)+ -"="+encodeURIComponent(k)};if(b===A)b=c.ajaxSettings.traditional;if(c.isArray(a)||a.jquery)c.each(a,function(){e(this.name,this.value)});else for(var f in a)ca(f,a[f],b,e);return d.join("&").replace(rb,"+")}});c.extend({active:0,lastModified:{},etag:{},handleError:function(a,b,d,e){a.error&&a.error.call(a.context,b,d,e);a.global&&c.triggerGlobal(a,"ajaxError",[b,a,e])},handleSuccess:function(a,b,d,e){a.success&&a.success.call(a.context,e,d,b);a.global&&c.triggerGlobal(a,"ajaxSuccess",[b,a])},handleComplete:function(a, -b,d){a.complete&&a.complete.call(a.context,b,d);a.global&&c.triggerGlobal(a,"ajaxComplete",[b,a]);a.global&&c.active--===1&&c.event.trigger("ajaxStop")},triggerGlobal:function(a,b,d){(a.context&&a.context.url==null?c(a.context):c.event).trigger(b,d)},httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===1223}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),e=a.getResponseHeader("Etag"); -if(d)c.lastModified[b]=d;if(e)c.etag[b]=e;return a.status===304},httpData:function(a,b,d){var e=a.getResponseHeader("content-type")||"",f=b==="xml"||!b&&e.indexOf("xml")>=0;a=f?a.responseXML:a.responseText;f&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b==="json"||!b&&e.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&e.indexOf("javascript")>=0)c.globalEval(a);return a}});if(E.ActiveXObject)c.ajaxSettings.xhr= -function(){if(E.location.protocol!=="file:")try{return new E.XMLHttpRequest}catch(a){}try{return new E.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}};c.support.ajax=!!c.ajaxSettings.xhr();var da={},tb=/^(?:toggle|show|hide)$/,ub=/^([+\-]=)?([\d+.\-]+)(.*)$/,aa,na=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b,d){if(a||a===0)return this.animate(S("show",3),a,b,d);else{a= -0;for(b=this.length;a=0;e--)if(d[e].elem===this){b&&d[e](true);d.splice(e,1)}});b||this.dequeue();return this}});c.each({slideDown:S("show",1),slideUp:S("hide",1),slideToggle:S("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,e,f){return this.animate(b, -d,e,f)}});c.extend({speed:function(a,b,d){var e=a&&typeof a==="object"?c.extend({},a):{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};e.duration=c.fx.off?0:typeof e.duration==="number"?e.duration:e.duration in c.fx.speeds?c.fx.speeds[e.duration]:c.fx.speeds._default;e.old=e.complete;e.complete=function(){e.queue!==false&&c(this).dequeue();c.isFunction(e.old)&&e.old.call(this)};return e},easing:{linear:function(a,b,d,e){return d+e*a},swing:function(a,b,d,e){return(-Math.cos(a* -Math.PI)/2+0.5)*e+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||c.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a=parseFloat(c.css(this.elem,this.prop));return a&&a>-1E4?a:0},custom:function(a,b,d){function e(h){return f.step(h)} -this.startTime=c.now();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;this.pos=this.state=0;var f=this;a=c.fx;e.elem=this.elem;if(e()&&c.timers.push(e)&&!aa)aa=setInterval(a.tick,a.interval)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true; -this.custom(this.cur(),0)},step:function(a){var b=c.now(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var e in this.options.curAnim)if(this.options.curAnim[e]!==true)d=false;if(d){if(this.options.overflow!=null&&!c.support.shrinkWrapBlocks){var f=this.elem,h=this.options;c.each(["","X","Y"],function(l,n){f.style["overflow"+n]=h.overflow[l]})}this.options.hide&&c(this.elem).hide();if(this.options.hide|| -this.options.show)for(var k in this.options.curAnim)c.style(this.elem,k,this.options.orig[k]);this.options.complete.call(this.elem)}return false}else{a=b-this.startTime;this.state=a/this.options.duration;b=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||b](this.state,a,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a= -c.timers,b=0;b-1;e={};var s={};if(n)s=f.position();k=n?s.top:parseInt(k,10)||0;l=n?s.left:parseInt(l,10)||0;if(c.isFunction(b))b=b.call(a,d,h);if(b.top!=null)e.top=b.top-h.top+k;if(b.left!=null)e.left=b.left-h.left+l;"using"in b?b.using.call(a, -e):f.css(e)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),e=Fa.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.css(a,"marginTop"))||0;d.left-=parseFloat(c.css(a,"marginLeft"))||0;e.top+=parseFloat(c.css(b[0],"borderTopWidth"))||0;e.left+=parseFloat(c.css(b[0],"borderLeftWidth"))||0;return{top:d.top-e.top,left:d.left-e.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||u.body;a&&!Fa.test(a.nodeName)&& -c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(e){var f=this[0],h;if(!f)return null;if(e!==A)return this.each(function(){if(h=ea(this))h.scrollTo(!a?e:c(h).scrollLeft(),a?e:c(h).scrollTop());else this[d]=e});else return(h=ea(f))?"pageXOffset"in h?h[a?"pageYOffset":"pageXOffset"]:c.support.boxModel&&h.document.documentElement[d]||h.document.body[d]:f[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase(); -c.fn["inner"+b]=function(){return this[0]?parseFloat(c.css(this[0],d,"padding")):null};c.fn["outer"+b]=function(e){return this[0]?parseFloat(c.css(this[0],d,e?"margin":"border")):null};c.fn[d]=function(e){var f=this[0];if(!f)return e==null?null:this;if(c.isFunction(e))return this.each(function(h){var k=c(this);k[d](e.call(this,h,k[d]()))});return c.isWindow(f)?f.document.compatMode==="CSS1Compat"&&f.document.documentElement["client"+b]||f.document.body["client"+b]:f.nodeType===9?Math.max(f.documentElement["client"+ -b],f.body["scroll"+b],f.documentElement["scroll"+b],f.body["offset"+b],f.documentElement["offset"+b]):e===A?parseFloat(c.css(f,d)):this.css(d,typeof e==="string"?e:e+"px")}})})(window); diff --git a/media/jquery-1.4.4.min.js b/media/jquery-1.4.4.min.js new file mode 100644 index 00000000..8f3ca2e2 --- /dev/null +++ b/media/jquery-1.4.4.min.js @@ -0,0 +1,167 @@ +/*! + * jQuery JavaScript Library v1.4.4 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Thu Nov 11 19:04:53 2010 -0500 + */ +(function(E,B){function ka(a,b,d){if(d===B&&a.nodeType===1){d=a.getAttribute("data-"+b);if(typeof d==="string"){try{d=d==="true"?true:d==="false"?false:d==="null"?null:!c.isNaN(d)?parseFloat(d):Ja.test(d)?c.parseJSON(d):d}catch(e){}c.data(a,b,d)}else d=B}return d}function U(){return false}function ca(){return true}function la(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ka(a){var b,d,e,f,h,l,k,o,x,r,A,C=[];f=[];h=c.data(this,this.nodeType?"events":"__events__");if(typeof h==="function")h= +h.events;if(!(a.liveFired===this||!h||!h.live||a.button&&a.type==="click")){if(a.namespace)A=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var J=h.live.slice(0);for(k=0;kd)break;a.currentTarget=f.elem;a.data=f.handleObj.data;a.handleObj=f.handleObj;A=f.handleObj.origHandler.apply(f.elem,arguments);if(A===false||a.isPropagationStopped()){d=f.level;if(A===false)b=false;if(a.isImmediatePropagationStopped())break}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(La, +"`").replace(Ma,"&")}function ma(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Na.test(b))return c.filter(b,e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function na(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this, +e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var l in e[h])c.event.add(this,h,e[h][l],e[h][l].data)}}})}function Oa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function oa(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?Pa:Qa,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a, +"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function da(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Ra.test(a)?e(a,h):da(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)?e(a,""):c.each(b,function(f,h){da(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(pa.concat.apply([],pa.slice(0,b)),function(){d[this]=a});return d}function qa(a){if(!ea[a]){var b=c("<"+ +a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";ea[a]=d}return ea[a]}function fa(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var t=E.document,c=function(){function a(){if(!b.isReady){try{t.documentElement.doScroll("left")}catch(j){setTimeout(a,1);return}b.ready()}}var b=function(j,s){return new b.fn.init(j,s)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,l=/\S/,k=/^\s+/,o=/\s+$/,x=/\W/,r=/\d/,A=/^<(\w+)\s*\/?>(?:<\/\1>)?$/, +C=/^[\],:{}\s]*$/,J=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,I=/(?:^|:|,)(?:\s*\[)+/g,L=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,i=/(msie) ([\w.]+)/,n=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false,q=[],u,y=Object.prototype.toString,F=Object.prototype.hasOwnProperty,M=Array.prototype.push,N=Array.prototype.slice,O=String.prototype.trim,D=Array.prototype.indexOf,R={};b.fn=b.prototype={init:function(j, +s){var v,z,H;if(!j)return this;if(j.nodeType){this.context=this[0]=j;this.length=1;return this}if(j==="body"&&!s&&t.body){this.context=t;this[0]=t.body;this.selector="body";this.length=1;return this}if(typeof j==="string")if((v=h.exec(j))&&(v[1]||!s))if(v[1]){H=s?s.ownerDocument||s:t;if(z=A.exec(j))if(b.isPlainObject(s)){j=[t.createElement(z[1])];b.fn.attr.call(j,s,true)}else j=[H.createElement(z[1])];else{z=b.buildFragment([v[1]],[H]);j=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this, +j)}else{if((z=t.getElementById(v[2]))&&z.parentNode){if(z.id!==v[2])return f.find(j);this.length=1;this[0]=z}this.context=t;this.selector=j;return this}else if(!s&&!x.test(j)){this.selector=j;this.context=t;j=t.getElementsByTagName(j);return b.merge(this,j)}else return!s||s.jquery?(s||f).find(j):b(s).find(j);else if(b.isFunction(j))return f.ready(j);if(j.selector!==B){this.selector=j.selector;this.context=j.context}return b.makeArray(j,this)},selector:"",jquery:"1.4.4",length:0,size:function(){return this.length}, +toArray:function(){return N.call(this,0)},get:function(j){return j==null?this.toArray():j<0?this.slice(j)[0]:this[j]},pushStack:function(j,s,v){var z=b();b.isArray(j)?M.apply(z,j):b.merge(z,j);z.prevObject=this;z.context=this.context;if(s==="find")z.selector=this.selector+(this.selector?" ":"")+v;else if(s)z.selector=this.selector+"."+s+"("+v+")";return z},each:function(j,s){return b.each(this,j,s)},ready:function(j){b.bindReady();if(b.isReady)j.call(t,b);else q&&q.push(j);return this},eq:function(j){return j=== +-1?this.slice(j):this.slice(j,+j+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(j){return this.pushStack(b.map(this,function(s,v){return j.call(s,v,s)}))},end:function(){return this.prevObject||b(null)},push:M,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var j,s,v,z,H,G=arguments[0]||{},K=1,Q=arguments.length,ga=false; +if(typeof G==="boolean"){ga=G;G=arguments[1]||{};K=2}if(typeof G!=="object"&&!b.isFunction(G))G={};if(Q===K){G=this;--K}for(;K0))if(q){var s=0,v=q;for(q=null;j=v[s++];)j.call(t,b);b.fn.trigger&&b(t).trigger("ready").unbind("ready")}}},bindReady:function(){if(!p){p=true;if(t.readyState==="complete")return setTimeout(b.ready,1);if(t.addEventListener){t.addEventListener("DOMContentLoaded",u,false);E.addEventListener("load",b.ready,false)}else if(t.attachEvent){t.attachEvent("onreadystatechange",u);E.attachEvent("onload", +b.ready);var j=false;try{j=E.frameElement==null}catch(s){}t.documentElement.doScroll&&j&&a()}}},isFunction:function(j){return b.type(j)==="function"},isArray:Array.isArray||function(j){return b.type(j)==="array"},isWindow:function(j){return j&&typeof j==="object"&&"setInterval"in j},isNaN:function(j){return j==null||!r.test(j)||isNaN(j)},type:function(j){return j==null?String(j):R[y.call(j)]||"object"},isPlainObject:function(j){if(!j||b.type(j)!=="object"||j.nodeType||b.isWindow(j))return false;if(j.constructor&& +!F.call(j,"constructor")&&!F.call(j.constructor.prototype,"isPrototypeOf"))return false;for(var s in j);return s===B||F.call(j,s)},isEmptyObject:function(j){for(var s in j)return false;return true},error:function(j){throw j;},parseJSON:function(j){if(typeof j!=="string"||!j)return null;j=b.trim(j);if(C.test(j.replace(J,"@").replace(w,"]").replace(I,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(j):(new Function("return "+j))();else b.error("Invalid JSON: "+j)},noop:function(){},globalEval:function(j){if(j&& +l.test(j)){var s=t.getElementsByTagName("head")[0]||t.documentElement,v=t.createElement("script");v.type="text/javascript";if(b.support.scriptEval)v.appendChild(t.createTextNode(j));else v.text=j;s.insertBefore(v,s.firstChild);s.removeChild(v)}},nodeName:function(j,s){return j.nodeName&&j.nodeName.toUpperCase()===s.toUpperCase()},each:function(j,s,v){var z,H=0,G=j.length,K=G===B||b.isFunction(j);if(v)if(K)for(z in j){if(s.apply(j[z],v)===false)break}else for(;H
    a";var f=d.getElementsByTagName("*"),h=d.getElementsByTagName("a")[0],l=t.createElement("select"), +k=l.appendChild(t.createElement("option"));if(!(!f||!f.length||!h)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(h.getAttribute("style")),hrefNormalized:h.getAttribute("href")==="/a",opacity:/^0.55$/.test(h.style.opacity),cssFloat:!!h.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:k.selected,deleteExpando:true,optDisabled:false,checkClone:false, +scriptEval:false,noCloneEvent:true,boxModel:null,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableHiddenOffsets:true};l.disabled=true;c.support.optDisabled=!k.disabled;b.type="text/javascript";try{b.appendChild(t.createTextNode("window."+e+"=1;"))}catch(o){}a.insertBefore(b,a.firstChild);if(E[e]){c.support.scriptEval=true;delete E[e]}try{delete b.test}catch(x){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function r(){c.support.noCloneEvent= +false;d.detachEvent("onclick",r)});d.cloneNode(true).fireEvent("onclick")}d=t.createElement("div");d.innerHTML="";a=t.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var r=t.createElement("div");r.style.width=r.style.paddingLeft="1px";t.body.appendChild(r);c.boxModel=c.support.boxModel=r.offsetWidth===2;if("zoom"in r.style){r.style.display="inline";r.style.zoom= +1;c.support.inlineBlockNeedsLayout=r.offsetWidth===2;r.style.display="";r.innerHTML="
    ";c.support.shrinkWrapBlocks=r.offsetWidth!==2}r.innerHTML="
    t
    ";var A=r.getElementsByTagName("td");c.support.reliableHiddenOffsets=A[0].offsetHeight===0;A[0].style.display="";A[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&A[0].offsetHeight===0;r.innerHTML="";t.body.removeChild(r).style.display= +"none"});a=function(r){var A=t.createElement("div");r="on"+r;var C=r in A;if(!C){A.setAttribute(r,"return;");C=typeof A[r]==="function"}return C};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();var ra={},Ja=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?ra:a;var e=a.nodeType,f=e?a[c.expando]:null,h= +c.cache;if(!(e&&!f&&typeof b==="string"&&d===B)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]=c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==B)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?ra:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando); +else if(d)delete f[e];else for(var l in a)delete a[l]}},acceptData:function(a){if(a.nodeName){var b=c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){var d=null;if(typeof a==="undefined"){if(this.length){var e=this[0].attributes,f;d=c.data(this[0]);for(var h=0,l=e.length;h-1)return true;return false},val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one"; +if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h=0;else if(c.nodeName(this,"select")){var A=c.makeArray(r);c("option",this).each(function(){this.selected=c.inArray(c(this).val(),A)>=0});if(!A.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true}, +attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return B;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==B;b=e&&c.props[b]||b;var h=Ta.test(b);if((b in a||a[b]!==B)&&e&&!h){if(f){b==="type"&&Ua.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&& +b.specified?b.value:Va.test(a.nodeName)||Wa.test(a.nodeName)&&a.href?0:B;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return B;a=!c.support.hrefNormalized&&e&&h?a.getAttribute(b,2):a.getAttribute(b);return a===null?B:a}});var X=/\.(.*)$/,ia=/^(?:textarea|input|select)$/i,La=/\./g,Ma=/ /g,Xa=/[^\w\s.|`]/g,Ya=function(a){return a.replace(Xa,"\\$&")},ua={focusin:0,focusout:0}; +c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;else if(!d)return;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var l=a.nodeType?"events":"__events__",k=h[l],o=h.handle;if(typeof k==="function"){o=k.handle;k=k.events}else if(!k){a.nodeType||(h[l]=h=function(){});h.events=k={}}if(!o)h.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem, +arguments):B};o.elem=a;b=b.split(" ");for(var x=0,r;l=b[x++];){h=f?c.extend({},f):{handler:d,data:e};if(l.indexOf(".")>-1){r=l.split(".");l=r.shift();h.namespace=r.slice(0).sort().join(".")}else{r=[];h.namespace=""}h.type=l;if(!h.guid)h.guid=d.guid;var A=k[l],C=c.event.special[l]||{};if(!A){A=k[l]=[];if(!C.setup||C.setup.call(a,e,r,o)===false)if(a.addEventListener)a.addEventListener(l,o,false);else a.attachEvent&&a.attachEvent("on"+l,o)}if(C.add){C.add.call(a,h);if(!h.handler.guid)h.handler.guid= +d.guid}A.push(h);c.event.global[l]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,l=0,k,o,x,r,A,C,J=a.nodeType?"events":"__events__",w=c.data(a),I=w&&w[J];if(w&&I){if(typeof I==="function"){w=I;I=I.events}if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in I)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[l++];){r=f;k=f.indexOf(".")<0;o=[];if(!k){o=f.split(".");f=o.shift();x=RegExp("(^|\\.)"+ +c.map(o.slice(0).sort(),Ya).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(A=I[f])if(d){r=c.event.special[f]||{};for(h=e||0;h=0){a.type=f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType=== +8)return B;a.result=B;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)===false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){var l;e=a.target;var k=f.replace(X,""),o=c.nodeName(e,"a")&&k=== +"click",x=c.event.special[k]||{};if((!x._default||x._default.call(d,a)===false)&&!o&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[k]){if(l=e["on"+k])e["on"+k]=null;c.event.triggered=true;e[k]()}}catch(r){}if(l)e["on"+k]=l;c.event.triggered=false}}},handle:function(a){var b,d,e,f;d=[];var h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+ +d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var l=d.length;f-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ia.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=xa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===B||f===e))if(e!=null||f){a.type="change";a.liveFired= +B;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",xa(a))}},setup:function(){if(this.type=== +"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ia.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ia.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}t.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){ua[b]++===0&&t.addEventListener(a,d,true)},teardown:function(){--ua[b]=== +0&&t.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=B}var l=b==="one"?c.proxy(f,function(o){c(this).unbind(o,l);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var k=this.length;h0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}}); +(function(){function a(g,i,n,m,p,q){p=0;for(var u=m.length;p0){F=y;break}}y=y[g]}m[p]=F}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,l=true;[0,0].sort(function(){l=false;return 0});var k=function(g,i,n,m){n=n||[];var p=i=i||t;if(i.nodeType!==1&&i.nodeType!==9)return[];if(!g||typeof g!=="string")return n;var q,u,y,F,M,N=true,O=k.isXML(i),D=[],R=g;do{d.exec("");if(q=d.exec(R)){R=q[3];D.push(q[1]);if(q[2]){F=q[3]; +break}}}while(q);if(D.length>1&&x.exec(g))if(D.length===2&&o.relative[D[0]])u=L(D[0]+D[1],i);else for(u=o.relative[D[0]]?[i]:k(D.shift(),i);D.length;){g=D.shift();if(o.relative[g])g+=D.shift();u=L(g,u)}else{if(!m&&D.length>1&&i.nodeType===9&&!O&&o.match.ID.test(D[0])&&!o.match.ID.test(D[D.length-1])){q=k.find(D.shift(),i,O);i=q.expr?k.filter(q.expr,q.set)[0]:q.set[0]}if(i){q=m?{expr:D.pop(),set:C(m)}:k.find(D.pop(),D.length===1&&(D[0]==="~"||D[0]==="+")&&i.parentNode?i.parentNode:i,O);u=q.expr?k.filter(q.expr, +q.set):q.set;if(D.length>0)y=C(u);else N=false;for(;D.length;){q=M=D.pop();if(o.relative[M])q=D.pop();else M="";if(q==null)q=i;o.relative[M](y,q,O)}}else y=[]}y||(y=u);y||k.error(M||g);if(f.call(y)==="[object Array]")if(N)if(i&&i.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&k.contains(i,y[g])))n.push(u[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&n.push(u[g]);else n.push.apply(n,y);else C(y,n);if(F){k(F,p,n,m);k.uniqueSort(n)}return n};k.uniqueSort=function(g){if(w){h= +l;g.sort(w);if(h)for(var i=1;i0};k.find=function(g,i,n){var m;if(!g)return[];for(var p=0,q=o.order.length;p":function(g,i){var n,m=typeof i==="string",p=0,q=g.length;if(m&&!/\W/.test(i))for(i=i.toLowerCase();p=0))n||m.push(u);else if(n)i[q]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},CHILD:function(g){if(g[1]==="nth"){var i=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=i[1]+(i[2]||1)-0;g[3]=i[3]-0}g[0]=e++;return g},ATTR:function(g,i,n, +m,p,q){i=g[1].replace(/\\/g,"");if(!q&&o.attrMap[i])g[1]=o.attrMap[i];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,i,n,m,p){if(g[1]==="not")if((d.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,i);else{g=k.filter(g[3],i,n,true^p);n||m.push.apply(m,g);return false}else if(o.match.POS.test(g[0])||o.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled=== +true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,i,n){return!!k(n[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"=== +g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},setFilters:{first:function(g,i){return i===0},last:function(g,i,n,m){return i===m.length-1},even:function(g,i){return i%2===0},odd:function(g,i){return i%2===1},lt:function(g,i,n){return in[3]-0},nth:function(g,i,n){return n[3]- +0===i},eq:function(g,i,n){return n[3]-0===i}},filter:{PSEUDO:function(g,i,n,m){var p=i[1],q=o.filters[p];if(q)return q(g,n,i,m);else if(p==="contains")return(g.textContent||g.innerText||k.getText([g])||"").indexOf(i[3])>=0;else if(p==="not"){i=i[3];n=0;for(m=i.length;n=0}},ID:function(g,i){return g.nodeType===1&&g.getAttribute("id")===i},TAG:function(g,i){return i==="*"&&g.nodeType===1||g.nodeName.toLowerCase()=== +i},CLASS:function(g,i){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(i)>-1},ATTR:function(g,i){var n=i[1];n=o.attrHandle[n]?o.attrHandle[n](g):g[n]!=null?g[n]:g.getAttribute(n);var m=n+"",p=i[2],q=i[4];return n==null?p==="!=":p==="="?m===q:p==="*="?m.indexOf(q)>=0:p==="~="?(" "+m+" ").indexOf(q)>=0:!q?m&&n!==false:p==="!="?m!==q:p==="^="?m.indexOf(q)===0:p==="$="?m.substr(m.length-q.length)===q:p==="|="?m===q||m.substr(0,q.length+1)===q+"-":false},POS:function(g,i,n,m){var p=o.setFilters[i[2]]; +if(p)return p(g,n,i,m)}}},x=o.match.POS,r=function(g,i){return"\\"+(i-0+1)},A;for(A in o.match){o.match[A]=RegExp(o.match[A].source+/(?![^\[]*\])(?![^\(]*\))/.source);o.leftMatch[A]=RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[A].source.replace(/\\(\d+)/g,r))}var C=function(g,i){g=Array.prototype.slice.call(g,0);if(i){i.push.apply(i,g);return i}return g};try{Array.prototype.slice.call(t.documentElement.childNodes,0)}catch(J){C=function(g,i){var n=0,m=i||[];if(f.call(g)==="[object Array]")Array.prototype.push.apply(m, +g);else if(typeof g.length==="number")for(var p=g.length;n";n.insertBefore(g,n.firstChild);if(t.getElementById(i)){o.find.ID=function(m,p,q){if(typeof p.getElementById!=="undefined"&&!q)return(p=p.getElementById(m[1]))?p.id===m[1]||typeof p.getAttributeNode!=="undefined"&&p.getAttributeNode("id").nodeValue===m[1]?[p]:B:[]};o.filter.ID=function(m,p){var q=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&q&&q.nodeValue===p}}n.removeChild(g); +n=g=null})();(function(){var g=t.createElement("div");g.appendChild(t.createComment(""));if(g.getElementsByTagName("*").length>0)o.find.TAG=function(i,n){var m=n.getElementsByTagName(i[1]);if(i[1]==="*"){for(var p=[],q=0;m[q];q++)m[q].nodeType===1&&p.push(m[q]);m=p}return m};g.innerHTML="";if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")o.attrHandle.href=function(i){return i.getAttribute("href",2)};g=null})();t.querySelectorAll&& +function(){var g=k,i=t.createElement("div");i.innerHTML="

    ";if(!(i.querySelectorAll&&i.querySelectorAll(".TEST").length===0)){k=function(m,p,q,u){p=p||t;m=m.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!u&&!k.isXML(p))if(p.nodeType===9)try{return C(p.querySelectorAll(m),q)}catch(y){}else if(p.nodeType===1&&p.nodeName.toLowerCase()!=="object"){var F=p.getAttribute("id"),M=F||"__sizzle__";F||p.setAttribute("id",M);try{return C(p.querySelectorAll("#"+M+" "+m),q)}catch(N){}finally{F|| +p.removeAttribute("id")}}return g(m,p,q,u)};for(var n in g)k[n]=g[n];i=null}}();(function(){var g=t.documentElement,i=g.matchesSelector||g.mozMatchesSelector||g.webkitMatchesSelector||g.msMatchesSelector,n=false;try{i.call(t.documentElement,"[test!='']:sizzle")}catch(m){n=true}if(i)k.matchesSelector=function(p,q){q=q.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!k.isXML(p))try{if(n||!o.match.PSEUDO.test(q)&&!/!=/.test(q))return i.call(p,q)}catch(u){}return k(q,null,null,[p]).length>0}})();(function(){var g= +t.createElement("div");g.innerHTML="
    ";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){o.order.splice(1,0,"CLASS");o.find.CLASS=function(i,n,m){if(typeof n.getElementsByClassName!=="undefined"&&!m)return n.getElementsByClassName(i[1])};g=null}}})();k.contains=t.documentElement.contains?function(g,i){return g!==i&&(g.contains?g.contains(i):true)}:t.documentElement.compareDocumentPosition? +function(g,i){return!!(g.compareDocumentPosition(i)&16)}:function(){return false};k.isXML=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false};var L=function(g,i){for(var n,m=[],p="",q=i.nodeType?[i]:i;n=o.match.PSEUDO.exec(g);){p+=n[0];g=g.replace(o.match.PSEUDO,"")}g=o.relative[g]?g+"*":g;n=0;for(var u=q.length;n0)for(var h=d;h0},closest:function(a,b){var d=[],e,f,h=this[0];if(c.isArray(a)){var l,k={},o=1;if(h&&a.length){e=0;for(f=a.length;e-1:c(h).is(e))d.push({selector:l,elem:h,level:o})}h= +h.parentNode;o++}}return d}l=cb.test(a)?c(a,b||this.context):null;e=0;for(f=this.length;e-1:c.find.matchesSelector(h,a)){d.push(h);break}else{h=h.parentNode;if(!h||!h.ownerDocument||h===b)break}d=d.length>1?c.unique(d):d;return this.pushStack(d,"closest",a)},index:function(a){if(!a||typeof a==="string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var d=typeof a==="string"?c(a,b||this.context): +c.makeArray(a),e=c.merge(this.get(),d);return this.pushStack(!d[0]||!d[0].parentNode||d[0].parentNode.nodeType===11||!e[0]||!e[0].parentNode||e[0].parentNode.nodeType===11?e:c.unique(e))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a, +2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a, +b){c.fn[a]=function(d,e){var f=c.map(this,b,d);Za.test(a)||(e=d);if(e&&typeof e==="string")f=c.filter(e,f);f=this.length>1?c.unique(f):f;if((this.length>1||ab.test(e))&&$a.test(a))f=f.reverse();return this.pushStack(f,a,bb.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return b.length===1?c.find.matchesSelector(b[0],a)?[b[0]]:[]:c.find.matches(a,b)},dir:function(a,b,d){var e=[];for(a=a[b];a&&a.nodeType!==9&&(d===B||a.nodeType!==1||!c(a).is(d));){a.nodeType===1&& +e.push(a);a=a[b]}return e},nth:function(a,b,d){b=b||1;for(var e=0;a;a=a[d])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var za=/ jQuery\d+="(?:\d+|null)"/g,$=/^\s+/,Aa=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Ba=/<([\w:]+)/,db=/\s]+\/)>/g,P={option:[1, +""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "],area:[1,"",""],_default:[0,"",""]};P.optgroup=P.option;P.tbody=P.tfoot=P.colgroup=P.caption=P.thead;P.th=P.td;if(!c.support.htmlSerialize)P._default=[1,"div
    ","
    "];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==B)return this.empty().append((this[0]&&this[0].ownerDocument||t).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,e;(e=this[d])!=null;d++)if(!a||c.filter(a,[e]).length){if(!b&&e.nodeType===1){c.cleanData(e.getElementsByTagName("*"));c.cleanData([e])}e.parentNode&&e.parentNode.removeChild(e)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,e=this.ownerDocument;if(!d){d=e.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(za,"").replace(fb,'="$1">').replace($,"")],e)[0]}else return this.cloneNode(true)});if(a===true){na(this,b);na(this.find("*"),b.find("*"))}return b},html:function(a){if(a===B)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(za,""):null; +else if(typeof a==="string"&&!Ca.test(a)&&(c.support.leadingWhitespace||!$.test(a))&&!P[(Ba.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Aa,"<$1>");try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?h.cloneNode(true):h)}k.length&&c.each(k,Oa)}return this}});c.buildFragment=function(a,b,d){var e,f,h;b=b&&b[0]?b[0].ownerDocument||b[0]:t;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===t&&!Ca.test(a[0])&&(c.support.checkClone||!Da.test(a[0]))){f=true;if(h=c.fragments[a[0]])if(h!==1)e=h}if(!e){e=b.createDocumentFragment();c.clean(a,b,e,d)}if(f)c.fragments[a[0]]=h?e:1;return{fragment:e,cacheable:f}};c.fragments={};c.each({appendTo:"append", +prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var e=[];d=c(d);var f=this.length===1&&this[0].parentNode;if(f&&f.nodeType===11&&f.childNodes.length===1&&d.length===1){d[b](this[0]);return this}else{f=0;for(var h=d.length;f0?this.clone(true):this).get();c(d[f])[b](l);e=e.concat(l)}return this.pushStack(e,a,d.selector)}}});c.extend({clean:function(a,b,d,e){b=b||t;if(typeof b.createElement==="undefined")b=b.ownerDocument|| +b[0]&&b[0].ownerDocument||t;for(var f=[],h=0,l;(l=a[h])!=null;h++){if(typeof l==="number")l+="";if(l){if(typeof l==="string"&&!eb.test(l))l=b.createTextNode(l);else if(typeof l==="string"){l=l.replace(Aa,"<$1>");var k=(Ba.exec(l)||["",""])[1].toLowerCase(),o=P[k]||P._default,x=o[0],r=b.createElement("div");for(r.innerHTML=o[1]+l+o[2];x--;)r=r.lastChild;if(!c.support.tbody){x=db.test(l);k=k==="table"&&!x?r.firstChild&&r.firstChild.childNodes:o[1]===""&&!x?r.childNodes:[];for(o=k.length- +1;o>=0;--o)c.nodeName(k[o],"tbody")&&!k[o].childNodes.length&&k[o].parentNode.removeChild(k[o])}!c.support.leadingWhitespace&&$.test(l)&&r.insertBefore(b.createTextNode($.exec(l)[0]),r.firstChild);l=r.childNodes}if(l.nodeType)f.push(l);else f=c.merge(f,l)}}if(d)for(h=0;f[h];h++)if(e&&c.nodeName(f[h],"script")&&(!f[h].type||f[h].type.toLowerCase()==="text/javascript"))e.push(f[h].parentNode?f[h].parentNode.removeChild(f[h]):f[h]);else{f[h].nodeType===1&&f.splice.apply(f,[h+1,0].concat(c.makeArray(f[h].getElementsByTagName("script")))); +d.appendChild(f[h])}return f},cleanData:function(a){for(var b,d,e=c.cache,f=c.event.special,h=c.support.deleteExpando,l=0,k;(k=a[l])!=null;l++)if(!(k.nodeName&&c.noData[k.nodeName.toLowerCase()]))if(d=k[c.expando]){if((b=e[d])&&b.events)for(var o in b.events)f[o]?c.event.remove(k,o):c.removeEvent(k,o,b.handle);if(h)delete k[c.expando];else k.removeAttribute&&k.removeAttribute(c.expando);delete e[d]}}});var Ea=/alpha\([^)]*\)/i,gb=/opacity=([^)]*)/,hb=/-([a-z])/ig,ib=/([A-Z])/g,Fa=/^-?\d+(?:px)?$/i, +jb=/^-?\d/,kb={position:"absolute",visibility:"hidden",display:"block"},Pa=["Left","Right"],Qa=["Top","Bottom"],W,Ga,aa,lb=function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){if(arguments.length===2&&b===B)return this;return c.access(this,a,b,true,function(d,e,f){return f!==B?c.style(d,e,f):c.css(d,e)})};c.extend({cssHooks:{opacity:{get:function(a,b){if(b){var d=W(a,"opacity","opacity");return d===""?"1":d}else return a.style.opacity}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true, +zoom:true,lineHeight:true},cssProps:{"float":c.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,d,e){if(!(!a||a.nodeType===3||a.nodeType===8||!a.style)){var f,h=c.camelCase(b),l=a.style,k=c.cssHooks[h];b=c.cssProps[h]||h;if(d!==B){if(!(typeof d==="number"&&isNaN(d)||d==null)){if(typeof d==="number"&&!c.cssNumber[h])d+="px";if(!k||!("set"in k)||(d=k.set(a,d))!==B)try{l[b]=d}catch(o){}}}else{if(k&&"get"in k&&(f=k.get(a,false,e))!==B)return f;return l[b]}}},css:function(a,b,d){var e,f=c.camelCase(b), +h=c.cssHooks[f];b=c.cssProps[f]||f;if(h&&"get"in h&&(e=h.get(a,true,d))!==B)return e;else if(W)return W(a,b,f)},swap:function(a,b,d){var e={},f;for(f in b){e[f]=a.style[f];a.style[f]=b[f]}d.call(a);for(f in b)a.style[f]=e[f]},camelCase:function(a){return a.replace(hb,lb)}});c.curCSS=c.css;c.each(["height","width"],function(a,b){c.cssHooks[b]={get:function(d,e,f){var h;if(e){if(d.offsetWidth!==0)h=oa(d,b,f);else c.swap(d,kb,function(){h=oa(d,b,f)});if(h<=0){h=W(d,b,b);if(h==="0px"&&aa)h=aa(d,b,b); +if(h!=null)return h===""||h==="auto"?"0px":h}if(h<0||h==null){h=d.style[b];return h===""||h==="auto"?"0px":h}return typeof h==="string"?h:h+"px"}},set:function(d,e){if(Fa.test(e)){e=parseFloat(e);if(e>=0)return e+"px"}else return e}}});if(!c.support.opacity)c.cssHooks.opacity={get:function(a,b){return gb.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var d=a.style;d.zoom=1;var e=c.isNaN(b)?"":"alpha(opacity="+b*100+")",f= +d.filter||"";d.filter=Ea.test(f)?f.replace(Ea,e):d.filter+" "+e}};if(t.defaultView&&t.defaultView.getComputedStyle)Ga=function(a,b,d){var e;d=d.replace(ib,"-$1").toLowerCase();if(!(b=a.ownerDocument.defaultView))return B;if(b=b.getComputedStyle(a,null)){e=b.getPropertyValue(d);if(e===""&&!c.contains(a.ownerDocument.documentElement,a))e=c.style(a,d)}return e};if(t.documentElement.currentStyle)aa=function(a,b){var d,e,f=a.currentStyle&&a.currentStyle[b],h=a.style;if(!Fa.test(f)&&jb.test(f)){d=h.left; +e=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;h.left=b==="fontSize"?"1em":f||0;f=h.pixelLeft+"px";h.left=d;a.runtimeStyle.left=e}return f===""?"auto":f};W=Ga||aa;if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=a.offsetHeight;return a.offsetWidth===0&&b===0||!c.support.reliableHiddenOffsets&&(a.style.display||c.css(a,"display"))==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var mb=c.now(),nb=/)<[^<]*)*<\/script>/gi, +ob=/^(?:select|textarea)/i,pb=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,qb=/^(?:GET|HEAD)$/,Ra=/\[\]$/,T=/\=\?(&|$)/,ja=/\?/,rb=/([?&])_=[^&]*/,sb=/^(\w+:)?\/\/([^\/?#]+)/,tb=/%20/g,ub=/#.*$/,Ha=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!=="string"&&Ha)return Ha.apply(this,arguments);else if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var f=a.slice(e,a.length);a=a.slice(0,e)}e="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b=== +"object"){b=c.param(b,c.ajaxSettings.traditional);e="POST"}var h=this;c.ajax({url:a,type:e,dataType:"html",data:b,complete:function(l,k){if(k==="success"||k==="notmodified")h.html(f?c("
    ").append(l.responseText.replace(nb,"")).find(f):l.responseText);d&&h.each(d,[l.responseText,k,l])}});return this},serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&& +!this.disabled&&(this.checked||ob.test(this.nodeName)||pb.test(this.type))}).map(function(a,b){var d=c(this).val();return d==null?null:c.isArray(d)?c.map(d,function(e){return{name:b.name,value:e}}):{name:b.name,value:d}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:e})}, +getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:e})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return new E.XMLHttpRequest},accepts:{xml:"application/xml, text/xml",html:"text/html", +script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},ajax:function(a){var b=c.extend(true,{},c.ajaxSettings,a),d,e,f,h=b.type.toUpperCase(),l=qb.test(h);b.url=b.url.replace(ub,"");b.context=a&&a.context!=null?a.context:b;if(b.data&&b.processData&&typeof b.data!=="string")b.data=c.param(b.data,b.traditional);if(b.dataType==="jsonp"){if(h==="GET")T.test(b.url)||(b.url+=(ja.test(b.url)?"&":"?")+(b.jsonp||"callback")+"=?");else if(!b.data|| +!T.test(b.data))b.data=(b.data?b.data+"&":"")+(b.jsonp||"callback")+"=?";b.dataType="json"}if(b.dataType==="json"&&(b.data&&T.test(b.data)||T.test(b.url))){d=b.jsonpCallback||"jsonp"+mb++;if(b.data)b.data=(b.data+"").replace(T,"="+d+"$1");b.url=b.url.replace(T,"="+d+"$1");b.dataType="script";var k=E[d];E[d]=function(m){if(c.isFunction(k))k(m);else{E[d]=B;try{delete E[d]}catch(p){}}f=m;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);r&&r.removeChild(A)}}if(b.dataType==="script"&&b.cache===null)b.cache= +false;if(b.cache===false&&l){var o=c.now(),x=b.url.replace(rb,"$1_="+o);b.url=x+(x===b.url?(ja.test(b.url)?"&":"?")+"_="+o:"")}if(b.data&&l)b.url+=(ja.test(b.url)?"&":"?")+b.data;b.global&&c.active++===0&&c.event.trigger("ajaxStart");o=(o=sb.exec(b.url))&&(o[1]&&o[1].toLowerCase()!==location.protocol||o[2].toLowerCase()!==location.host);if(b.dataType==="script"&&h==="GET"&&o){var r=t.getElementsByTagName("head")[0]||t.documentElement,A=t.createElement("script");if(b.scriptCharset)A.charset=b.scriptCharset; +A.src=b.url;if(!d){var C=false;A.onload=A.onreadystatechange=function(){if(!C&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){C=true;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);A.onload=A.onreadystatechange=null;r&&A.parentNode&&r.removeChild(A)}}}r.insertBefore(A,r.firstChild);return B}var J=false,w=b.xhr();if(w){b.username?w.open(h,b.url,b.async,b.username,b.password):w.open(h,b.url,b.async);try{if(b.data!=null&&!l||a&&a.contentType)w.setRequestHeader("Content-Type", +b.contentType);if(b.ifModified){c.lastModified[b.url]&&w.setRequestHeader("If-Modified-Since",c.lastModified[b.url]);c.etag[b.url]&&w.setRequestHeader("If-None-Match",c.etag[b.url])}o||w.setRequestHeader("X-Requested-With","XMLHttpRequest");w.setRequestHeader("Accept",b.dataType&&b.accepts[b.dataType]?b.accepts[b.dataType]+", */*; q=0.01":b.accepts._default)}catch(I){}if(b.beforeSend&&b.beforeSend.call(b.context,w,b)===false){b.global&&c.active--===1&&c.event.trigger("ajaxStop");w.abort();return false}b.global&& +c.triggerGlobal(b,"ajaxSend",[w,b]);var L=w.onreadystatechange=function(m){if(!w||w.readyState===0||m==="abort"){J||c.handleComplete(b,w,e,f);J=true;if(w)w.onreadystatechange=c.noop}else if(!J&&w&&(w.readyState===4||m==="timeout")){J=true;w.onreadystatechange=c.noop;e=m==="timeout"?"timeout":!c.httpSuccess(w)?"error":b.ifModified&&c.httpNotModified(w,b.url)?"notmodified":"success";var p;if(e==="success")try{f=c.httpData(w,b.dataType,b)}catch(q){e="parsererror";p=q}if(e==="success"||e==="notmodified")d|| +c.handleSuccess(b,w,e,f);else c.handleError(b,w,e,p);d||c.handleComplete(b,w,e,f);m==="timeout"&&w.abort();if(b.async)w=null}};try{var g=w.abort;w.abort=function(){w&&Function.prototype.call.call(g,w);L("abort")}}catch(i){}b.async&&b.timeout>0&&setTimeout(function(){w&&!J&&L("timeout")},b.timeout);try{w.send(l||b.data==null?null:b.data)}catch(n){c.handleError(b,w,null,n);c.handleComplete(b,w,e,f)}b.async||L();return w}},param:function(a,b){var d=[],e=function(h,l){l=c.isFunction(l)?l():l;d[d.length]= +encodeURIComponent(h)+"="+encodeURIComponent(l)};if(b===B)b=c.ajaxSettings.traditional;if(c.isArray(a)||a.jquery)c.each(a,function(){e(this.name,this.value)});else for(var f in a)da(f,a[f],b,e);return d.join("&").replace(tb,"+")}});c.extend({active:0,lastModified:{},etag:{},handleError:function(a,b,d,e){a.error&&a.error.call(a.context,b,d,e);a.global&&c.triggerGlobal(a,"ajaxError",[b,a,e])},handleSuccess:function(a,b,d,e){a.success&&a.success.call(a.context,e,d,b);a.global&&c.triggerGlobal(a,"ajaxSuccess", +[b,a])},handleComplete:function(a,b,d){a.complete&&a.complete.call(a.context,b,d);a.global&&c.triggerGlobal(a,"ajaxComplete",[b,a]);a.global&&c.active--===1&&c.event.trigger("ajaxStop")},triggerGlobal:function(a,b,d){(a.context&&a.context.url==null?c(a.context):c.event).trigger(b,d)},httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===1223}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"), +e=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(e)c.etag[b]=e;return a.status===304},httpData:function(a,b,d){var e=a.getResponseHeader("content-type")||"",f=b==="xml"||!b&&e.indexOf("xml")>=0;a=f?a.responseXML:a.responseText;f&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b==="json"||!b&&e.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&e.indexOf("javascript")>=0)c.globalEval(a);return a}}); +if(E.ActiveXObject)c.ajaxSettings.xhr=function(){if(E.location.protocol!=="file:")try{return new E.XMLHttpRequest}catch(a){}try{return new E.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}};c.support.ajax=!!c.ajaxSettings.xhr();var ea={},vb=/^(?:toggle|show|hide)$/,wb=/^([+\-]=)?([\d+.\-]+)(.*)$/,ba,pa=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b,d){if(a||a===0)return this.animate(S("show", +3),a,b,d);else{d=0;for(var e=this.length;d=0;e--)if(d[e].elem===this){b&&d[e](true);d.splice(e,1)}});b||this.dequeue();return this}});c.each({slideDown:S("show",1),slideUp:S("hide",1),slideToggle:S("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){c.fn[a]=function(d,e,f){return this.animate(b, +d,e,f)}});c.extend({speed:function(a,b,d){var e=a&&typeof a==="object"?c.extend({},a):{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};e.duration=c.fx.off?0:typeof e.duration==="number"?e.duration:e.duration in c.fx.speeds?c.fx.speeds[e.duration]:c.fx.speeds._default;e.old=e.complete;e.complete=function(){e.queue!==false&&c(this).dequeue();c.isFunction(e.old)&&e.old.call(this)};return e},easing:{linear:function(a,b,d,e){return d+e*a},swing:function(a,b,d,e){return(-Math.cos(a* +Math.PI)/2+0.5)*e+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||c.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a=parseFloat(c.css(this.elem,this.prop));return a&&a>-1E4?a:0},custom:function(a,b,d){function e(l){return f.step(l)} +var f=this,h=c.fx;this.startTime=c.now();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;this.pos=this.state=0;e.elem=this.elem;if(e()&&c.timers.push(e)&&!ba)ba=setInterval(h.tick,h.interval)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true; +this.custom(this.cur(),0)},step:function(a){var b=c.now(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var e in this.options.curAnim)if(this.options.curAnim[e]!==true)d=false;if(d){if(this.options.overflow!=null&&!c.support.shrinkWrapBlocks){var f=this.elem,h=this.options;c.each(["","X","Y"],function(k,o){f.style["overflow"+o]=h.overflow[k]})}this.options.hide&&c(this.elem).hide();if(this.options.hide|| +this.options.show)for(var l in this.options.curAnim)c.style(this.elem,l,this.options.orig[l]);this.options.complete.call(this.elem)}return false}else{a=b-this.startTime;this.state=a/this.options.duration;b=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||b](this.state,a,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a= +c.timers,b=0;b-1;e={};var x={};if(o)x=f.position();l=o?x.top:parseInt(l,10)||0;k=o?x.left:parseInt(k,10)||0;if(c.isFunction(b))b=b.call(a,d,h);if(b.top!=null)e.top=b.top-h.top+l;if(b.left!=null)e.left=b.left-h.left+k;"using"in b?b.using.call(a, +e):f.css(e)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),e=Ia.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.css(a,"marginTop"))||0;d.left-=parseFloat(c.css(a,"marginLeft"))||0;e.top+=parseFloat(c.css(b[0],"borderTopWidth"))||0;e.left+=parseFloat(c.css(b[0],"borderLeftWidth"))||0;return{top:d.top-e.top,left:d.left-e.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||t.body;a&&!Ia.test(a.nodeName)&& +c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(e){var f=this[0],h;if(!f)return null;if(e!==B)return this.each(function(){if(h=fa(this))h.scrollTo(!a?e:c(h).scrollLeft(),a?e:c(h).scrollTop());else this[d]=e});else return(h=fa(f))?"pageXOffset"in h?h[a?"pageYOffset":"pageXOffset"]:c.support.boxModel&&h.document.documentElement[d]||h.document.body[d]:f[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase(); +c.fn["inner"+b]=function(){return this[0]?parseFloat(c.css(this[0],d,"padding")):null};c.fn["outer"+b]=function(e){return this[0]?parseFloat(c.css(this[0],d,e?"margin":"border")):null};c.fn[d]=function(e){var f=this[0];if(!f)return e==null?null:this;if(c.isFunction(e))return this.each(function(l){var k=c(this);k[d](e.call(this,l,k[d]()))});if(c.isWindow(f))return f.document.compatMode==="CSS1Compat"&&f.document.documentElement["client"+b]||f.document.body["client"+b];else if(f.nodeType===9)return Math.max(f.documentElement["client"+ +b],f.body["scroll"+b],f.documentElement["scroll"+b],f.body["offset"+b],f.documentElement["offset"+b]);else if(e===B){f=c.css(f,d);var h=parseFloat(f);return c.isNaN(h)?f:h}else return this.css(d,typeof e==="string"?e:e+"px")}})})(window); -- cgit v1.2.3-54-g00ecf From bbb81e86c4f7327a3c263fffec20f50fe1df9060 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:02:59 -0500 Subject: Add CSS for staging repos packages Signed-off-by: Dan McGee --- media/archweb.css | 1 + 1 file changed, 1 insertion(+) diff --git a/media/archweb.css b/media/archweb.css index 9832d586..fce88e21 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -125,6 +125,7 @@ ul.errorlist { color: red; } #pkg-updates td.pkg-name { white-space: normal; } #pkg-updates td.pkg-arch { text-align: right; } #pkg-updates span.testing, #pkg-updates span.community-testing, span.multilib-testing { font-style: italic; } +#pkg-updates span.staging, #pkg-updates span.community-staging, span.multilib-staging { font-style: italic; color: #ff8040; } /* home: sidebar navigation */ div#nav-sidebar ul { list-style: none; margin: 0.5em 0 0.5em 1em; padding: 0; } -- cgit v1.2.3-54-g00ecf From eb231660b017eab886ecd98828bf0295e36a1112 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:13:11 -0500 Subject: PositiveBigIntegerField: set get_internal_type() correctly This is a key for Django to return the DB type that matches this value. Since we are basically just ripping off a BigIntegerField, we can use the database types for it. This makes my prior checked in migrations actually work on MySQL. Signed-off-by: Dan McGee --- main/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main/models.py b/main/models.py index 57e9650e..eae55c8b 100644 --- a/main/models.py +++ b/main/models.py @@ -15,7 +15,7 @@ class PositiveBigIntegerField(models.BigIntegerField): _south_introspects = True def get_internal_type(self): - return "PositiveBigIntegerField" + return "BigIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} -- cgit v1.2.3-54-g00ecf From 4a9b6867a3a2786435316ab7deefa54257bb931d Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:50:14 -0500 Subject: Refactor common select_related into manager method For a Package object query, we almost always did .select_related('arch', 'repo). Refactor this into the manager as a 'normal()' method so we can avoid sprinkling the same logic everywhere. Signed-off-by: Dan McGee --- devel/views.py | 4 ++-- feeds.py | 2 +- main/models.py | 11 ++++++----- packages/utils.py | 2 +- packages/views.py | 15 +++++++-------- public/utils.py | 6 +++--- sitemaps.py | 3 +-- 7 files changed, 21 insertions(+), 22 deletions(-) diff --git a/devel/views.py b/devel/views.py index 2b8bd43e..1827f2ac 100644 --- a/devel/views.py +++ b/devel/views.py @@ -32,7 +32,7 @@ def index(request): '''the developer dashboard''' inner_q = PackageRelation.objects.filter(user=request.user).values('pkgbase') - flagged = Package.objects.select_related('arch', 'repo').filter( + flagged = Package.objects.normal().filter( flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname') todopkgs = TodolistPkg.objects.select_related( @@ -135,7 +135,7 @@ def change_profile(request): @login_required def report(request, report, username=None): title = 'Developer Report' - packages = Package.objects.select_related('arch', 'repo') + packages = Package.objects.normal() names = attrs = user = None if report == 'old': diff --git a/feeds.py b/feeds.py index 269d0a38..74ae9ff9 100644 --- a/feeds.py +++ b/feeds.py @@ -51,7 +51,7 @@ def __call__(self, request, *args, **kwargs): def get_object(self, request, arch='', repo=''): obj = dict() - qs = Package.objects.select_related('arch', 'repo').order_by( + qs = Package.objects.normal().order_by( '-last_update') if arch != '': diff --git a/main/models.py b/main/models.py index eae55c8b..677647ac 100644 --- a/main/models.py +++ b/main/models.py @@ -74,8 +74,10 @@ def incomplete(self): class PackageManager(models.Manager): def flagged(self): - return self.get_query_set().filter(flag_date__isnull=False) + return self.filter(flag_date__isnull=False) + def normal(self): + return self.select_related('arch', 'repo') class Donor(models.Model): name = models.CharField(max_length=255, unique=True) @@ -255,8 +257,7 @@ def get_depends(self): deps = [] # TODO: we can use list comprehension and an 'in' query to make this more effective for dep in self.packagedepend_set.order_by('optional', 'depname'): - pkgs = Package.objects.select_related('arch', 'repo').filter( - pkgname=dep.depname) + pkgs = Package.objects.normal().filter(pkgname=dep.depname) if not self.arch.agnostic: # make sure we match architectures if possible pkgs = pkgs.filter(arch__in=self.applicable_arches()) @@ -320,7 +321,7 @@ def in_testing(self): if self.repo.testing: return None try: - return Package.objects.get(repo__testing=True, + return Package.objects.normal().get(repo__testing=True, pkgname=self.pkgname, arch=self.arch) except Package.DoesNotExist: return None @@ -328,7 +329,7 @@ def in_testing(self): def elsewhere(self): '''attempt to locate this package anywhere else, regardless of architecture or repository. Excludes this package from the list.''' - return Package.objects.select_related('arch', 'repo').filter( + return Package.objects.normal().filter( pkgname=self.pkgname).exclude(id=self.id).order_by( 'arch__name', 'repo__name') diff --git a/packages/utils.py b/packages/utils.py index 29a3087f..af4675bb 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -108,7 +108,7 @@ def get_differences_info(arch_a, arch_b): # column A will always have a value, column B might be NULL to_fetch.append(row[0]) # fetch all of the necessary packages - pkgs = Package.objects.select_related('arch', 'repo').in_bulk(to_fetch) + pkgs = Package.objects.normal().in_bulk(to_fetch) # now build a list of tuples containing differences differences = [] for row in results: diff --git a/packages/views.py b/packages/views.py index 44a1db96..5f559d6a 100644 --- a/packages/views.py +++ b/packages/views.py @@ -117,9 +117,8 @@ def details(request, name='', repo='', arch=''): arches = [ arch ] arches.extend(Arch.objects.filter(agnostic=True)) repo = get_object_or_404(Repo, name__iexact=repo) - pkgs = Package.objects.filter(pkgbase=name, - repo__testing=repo.testing, arch__in=arches) - pkgs = pkgs.select_related('arch', 'repo').order_by('pkgname') + pkgs = Package.objects.normal().filter(pkgbase=name, + repo__testing=repo.testing, arch__in=arches).order_by('pkgname') if len(pkgs) == 0: raise Http404 context = { @@ -156,8 +155,8 @@ def group_details(request, arch, name): arch = get_object_or_404(Arch, name=arch) arches = [ arch ] arches.extend(Arch.objects.filter(agnostic=True)) - pkgs = Package.objects.filter(groups__name=name, arch__in=arches) - pkgs = pkgs.select_related('arch', 'repo').order_by('pkgname') + pkgs = Package.objects.normal().filter( + groups__name=name, arch__in=arches).order_by('pkgname') if len(pkgs) == 0: raise Http404 context = { @@ -217,7 +216,7 @@ def __init__(self, *args, **kwargs): def search(request, page=None): limit = 50 - packages = Package.objects.select_related('arch', 'repo') + packages = Package.objects.normal() if request.GET: form = PackageSearchForm(data=request.GET) @@ -405,7 +404,7 @@ def flag(request, name, repo, arch): # already flagged. do nothing. return direct_to_template(request, 'packages/flagged.html', {'pkg': pkg}) # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.select_related('arch', 'repo').filter( + pkgs = Package.objects.normal().filter( pkgbase=pkg.pkgbase, flag_date__isnull=True, repo__testing=pkg.repo.testing).order_by( 'pkgname', 'repo__name', 'arch__name') @@ -460,7 +459,7 @@ def flag(request, name, repo, arch): def flag_confirmed(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) - pkgs = Package.objects.select_related('arch', 'repo').filter( + pkgs = Package.objects.normal().filter( pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, repo__testing=pkg.repo.testing).order_by( 'pkgname', 'repo__name', 'arch__name') diff --git a/public/utils.py b/public/utils.py index fd29a845..0be3ebaa 100644 --- a/public/utils.py +++ b/public/utils.py @@ -1,6 +1,6 @@ from operator import attrgetter -from main.models import Arch, Package +from main.models import Arch, Package, Repo from main.utils import cache_function class RecentUpdate(object): @@ -57,8 +57,8 @@ def get_recent_updates(number=15): # grab a few extra so we can hopefully catch everything we need fetch = number * 6 for arch in Arch.objects.all(): - pkgs += list(Package.objects.select_related( - 'arch', 'repo').filter(arch=arch).order_by('-last_update')[:fetch]) + pkgs += list(Package.objects.normal().filter( + arch=arch).order_by('-last_update')[:fetch]) pkgs.sort(key=attrgetter('last_update')) updates = [] diff --git a/sitemaps.py b/sitemaps.py index e321fe85..1a3669ad 100644 --- a/sitemaps.py +++ b/sitemaps.py @@ -8,8 +8,7 @@ class PackagesSitemap(Sitemap): priority = "0.5" def items(self): - return Package.objects.select_related('arch', 'repo').all() - return Package.objects.all() + return Package.objects.normal() def lastmod(self, obj): return obj.last_update -- cgit v1.2.3-54-g00ecf From 2a44855556531a27e949884d4084c6e5d37082e1 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:52:37 -0500 Subject: Remove unused flagged() manager method Signed-off-by: Dan McGee --- main/models.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/main/models.py b/main/models.py index 677647ac..a5d6c42e 100644 --- a/main/models.py +++ b/main/models.py @@ -73,9 +73,6 @@ def incomplete(self): return self.filter(todolistpkg__complete=False).distinct() class PackageManager(models.Manager): - def flagged(self): - return self.filter(flag_date__isnull=False) - def normal(self): return self.select_related('arch', 'repo') -- cgit v1.2.3-54-g00ecf From aaae7e599eeedbc7e1f23d2da1588ad4edcd0f5f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 15:53:41 -0500 Subject: Fix out of date test A version of this view is now publicly available, so it returns 200. Signed-off-by: Dan McGee --- devel/tests.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/devel/tests.py b/devel/tests.py index da5459d6..604da74c 100644 --- a/devel/tests.py +++ b/devel/tests.py @@ -26,7 +26,4 @@ def test_newuser(self): def test_mirrors(self): response = self.client.get('/mirrors/') - self.assertEqual(response.status_code, 302) - self.assertEqual(response.has_header('Location'), True) - self.assertEqual(response['location'], - 'http://testserver/login/?next=/mirrors/') + self.assertEqual(response.status_code, 200) -- cgit v1.2.3-54-g00ecf From b0bad20756549df5edf726771c8e6869caba6244 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 16:12:01 -0500 Subject: Use correct date in news sitemap Signed-off-by: Dan McGee --- sitemaps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sitemaps.py b/sitemaps.py index 1a3669ad..37387f15 100644 --- a/sitemaps.py +++ b/sitemaps.py @@ -47,6 +47,6 @@ def items(self): return News.objects.all() def lastmod(self, obj): - return obj.postdate + return obj.last_modified # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From cda9c5bb34b450b391500636c8d3f5ff4bfe51ab Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 15 Jun 2011 16:55:03 -0500 Subject: Add a base pages sitemap Signed-off-by: Dan McGee --- packages/urls.py | 4 ++-- sitemaps.py | 34 +++++++++++++++++++++++++++++++++- urls.py | 1 + 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/packages/urls.py b/packages/urls.py index 3c2c87b0..d408e6cf 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -19,10 +19,10 @@ 'signoff_package'), (r'^update/$', 'update'), - (r'^$', 'search'), + (r'^$', 'search', {}, 'packages-search'), (r'^(?P\d+)/$', 'search'), - (r'^differences/$', 'arch_differences'), + (r'^differences/$', 'arch_differences', {}, 'packages-differences'), (r'^stale_relations/$', 'stale_relations'), (r'^stale_relations/update/$','stale_relations_update'), diff --git a/sitemaps.py b/sitemaps.py index 37387f15..8ac5bc4f 100644 --- a/sitemaps.py +++ b/sitemaps.py @@ -1,4 +1,6 @@ from django.contrib.sitemaps import Sitemap +from django.core.urlresolvers import reverse + from main.models import Package from news.models import News from packages.utils import get_group_info @@ -41,7 +43,7 @@ def location(self, obj): class NewsSitemap(Sitemap): changefreq = "never" - priority = "0.7" + priority = "0.8" def items(self): return News.objects.all() @@ -49,4 +51,34 @@ def items(self): def lastmod(self, obj): return obj.last_modified + +class BaseSitemap(Sitemap): + base_viewnames = ( + ('index', 1.0, 'hourly'), + ('packages-search', 0.8, 'hourly'), + 'page-about', 'page-art', 'page-svn', 'page-devs', 'page-tus', + 'page-fellows', 'page-donate', 'page-download', 'news-list', + 'feeds-list', 'groups-list', 'mirror-list', 'mirror-status', + 'mirrorlist', 'packages-differences', 'releng-test-overview', + ) + + def items(self): + return self.base_viewnames + + def location(self, obj): + name = obj + if isinstance(obj, tuple): + name = obj[0] + return reverse(name) + + def priority(self, obj): + if isinstance(obj, tuple): + return obj[1] + return 0.7 + + def changefreq(self, obj): + if isinstance(obj, tuple): + return obj[2] + return 'monthly' + # vim: set ts=4 sw=4 et: diff --git a/urls.py b/urls.py index 36a2d79f..c9faf165 100644 --- a/urls.py +++ b/urls.py @@ -10,6 +10,7 @@ import sitemaps sitemaps = { + 'base': sitemaps.BaseSitemap, 'news': sitemaps.NewsSitemap, 'packages': sitemaps.PackagesSitemap, 'package-files': sitemaps.PackageFilesSitemap, -- cgit v1.2.3-54-g00ecf From 00eb8f426a8ddb905fcf8b241eb29b687b9db8ed Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 16 Jun 2011 12:41:40 -0500 Subject: Remove automatic search redirection on count = 1 This is really annoying. Signed-off-by: Dan McGee --- packages/views.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/views.py b/packages/views.py index 5f559d6a..486fb945 100644 --- a/packages/views.py +++ b/packages/views.py @@ -262,9 +262,6 @@ def search(request, page=None): else: form = PackageSearchForm() - if packages.count() == 1: - return redirect(packages[0]) - current_query = request.GET.urlencode() page_dict = { 'search_form': form, -- cgit v1.2.3-54-g00ecf From 92e13e0670661a542db646c668780cb4620c0b13 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 20 Jun 2011 01:09:20 -0500 Subject: Differentiate between no filelist and empty filelist We had these two cases munged together before; some packages have seen filelist updates but simply don't have any files ('firefox-i18n' for example). Signed-off-by: Dan McGee --- packages/views.py | 7 +++++-- templates/packages/files-list.html | 6 +++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/views.py b/packages/views.py index 486fb945..7b8c2e67 100644 --- a/packages/views.py +++ b/packages/views.py @@ -290,11 +290,14 @@ def files(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') + context = { + 'pkg': pkg, + 'files': fileslist, + } template = 'packages/files.html' if request.is_ajax(): template = 'packages/files-list.html' - return direct_to_template(request, template, - {'pkg':pkg, 'files':fileslist}) + return direct_to_template(request, template, context) def details_json(request, name, repo, arch): pkg = get_object_or_404(Package, diff --git a/templates/packages/files-list.html b/templates/packages/files-list.html index bb89b663..95a85d24 100644 --- a/templates/packages/files-list.html +++ b/templates/packages/files-list.html @@ -2,12 +2,16 @@

    Note: This file list was generated from a previous version of the package; it may be out of date.

    {% endif %} -{% if files.count %} +{% if pkg.files_last_update %} +{% if files|length %}
      {% for file in files %}
    • {{ file.directory }}{{ file.filename|default:'' }}
    • {% endfor %}
    {% else %} +

    Package has no files.

    +{% endif %} +{% else %}

    No file list available.

    {% endif %} -- cgit v1.2.3-54-g00ecf From dcbee21c1ba281b061db302056c771936925c824 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 21 Jun 2011 08:56:50 -0500 Subject: Nicer formatting of PGP key display value Signed-off-by: Dan McGee --- main/templatetags/pgp.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py index d9126dbd..956de892 100644 --- a/main/templatetags/pgp.py +++ b/main/templatetags/pgp.py @@ -3,6 +3,17 @@ register = template.Library() +def format_key(key_id): + print len(key_id) + if len(key_id) in (8, 20): + return u'0x%s' % key_id + elif len(key_id) == 40: + # normal display format is 5 groups of 4 hex chars seperated by spaces, + # double space, then 5 more groups of 4 hex chars + split = tuple(key_id[i:i+4] for i in range(0, 40, 4)) + return u'%s  %s' % (' '.join(split[0:5]), ' '.join(split[5:10])) + return u'0x%s' % key_id + @register.simple_tag def pgp_key_link(key_id): if not key_id: @@ -10,10 +21,10 @@ def pgp_key_link(key_id): # Something like 'pgp.mit.edu:11371' pgp_server = getattr(settings, 'PGP_SERVER', None) if not pgp_server: - return "0x%s" % key_id + return format_key(key_id) url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \ (pgp_server, key_id) - values = (url, key_id, key_id) - return '0x%s' % values + values = (url, key_id, format_key(key_id)) + return '%s' % values # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 388973d6fd082148c85db0df44e0ee5ed3b6a54b Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 18:48:39 -0500 Subject: Revert "Remove unused flagged() manager method" Far from unneeded, this is used on the developer dashboard. Silly me. Document this fact as well in the code so we don't screw it up again. This reverts commit 2a44855556531a27e949884d4084c6e5d37082e1. --- main/models.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/main/models.py b/main/models.py index a5d6c42e..05e63fed 100644 --- a/main/models.py +++ b/main/models.py @@ -73,6 +73,10 @@ def incomplete(self): return self.filter(todolistpkg__complete=False).distinct() class PackageManager(models.Manager): + def flagged(self): + """Used by dev dashboard.""" + return self.filter(flag_date__isnull=False) + def normal(self): return self.select_related('arch', 'repo') -- cgit v1.2.3-54-g00ecf From da20949c8cc185e91dbaae1b8369fcffa3447081 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 19:11:12 -0500 Subject: Move find_user method to devel utils This could be handy elsewhere as well, and it is loosely coupled to anything else in reporead. Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 51 +----------------------------- devel/utils.py | 58 +++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 50 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index baf7fee1..138931ff 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -16,7 +16,6 @@ from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from django.db import transaction -from django.db.models import Q from collections import defaultdict import io @@ -28,6 +27,7 @@ from datetime import datetime from optparse import make_option +from devel.utils import find_user from main.models import Arch, Package, PackageDepend, PackageFile, Repo from packages.models import Conflict, Provision, Replacement @@ -130,55 +130,6 @@ def full_version(self): return u'%s-%s' % (self.ver, self.rel) -def find_user(userstring): - ''' - Attempt to find the corresponding User object for a standard - packager string, e.g. something like - 'A. U. Thor '. - We start by searching for a matching email address; we then move onto - matching by first/last name. If we cannot find a user, then return None. - ''' - if userstring in find_user.cache: - return find_user.cache[userstring] - matches = re.match(r'^([^<]+)? ?<([^>]*)>', userstring) - if not matches: - return None - - user = None - name = matches.group(1) - email = matches.group(2) - - def user_email(): - return User.objects.get(email=email) - def profile_email(): - return User.objects.get(userprofile__public_email=email) - def user_name(): - # yes, a bit odd but this is the easiest way since we can't always be - # sure how to split the name. Ensure every 'token' appears in at least - # one of the two name fields. - name_q = Q() - for token in name.split(): - # ignore quoted parts; e.g. nicknames in strings - if re.match(r'^[\'"].*[\'"]$', token): - continue - name_q &= (Q(first_name__icontains=token) | - Q(last_name__icontains=token)) - return User.objects.get(name_q) - - for matcher in (user_email, profile_email, user_name): - try: - user = matcher() - break - except (User.DoesNotExist, User.MultipleObjectsReturned): - pass - - find_user.cache[userstring] = user - return user - -# cached mappings of user strings -> User objects so we don't have to do the -# lookup more than strictly necessary. -find_user.cache = {} - DEPEND_RE = re.compile(r"^(.+?)((>=|<=|=|>|<)(.*))?$") def create_depend(package, dep_str, optional=False): diff --git a/devel/utils.py b/devel/utils.py index abfdabe5..9d7dfb2d 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -1,7 +1,11 @@ +import re + from django.contrib.auth.models import User from django.db import connection +from django.db.models import Count, Q from main.utils import cache_function +from main.models import Package from packages.models import PackageRelation @cache_function(300) @@ -28,10 +32,64 @@ def get_annotated_maintainers(): pkg_count[k] = total flag_count[k] = flagged + update_count = Package.objects.values_list('packager').annotate( + Count('packager')) + update_count = dict(update_count) + for m in maintainers: m.package_count = pkg_count.get(m.id, 0) m.flagged_count = flag_count.get(m.id, 0) + m.updated_count = update_count.get(m.id, 0) return maintainers +def find_user(userstring): + ''' + Attempt to find the corresponding User object for a standard + packager string, e.g. something like + 'A. U. Thor '. + We start by searching for a matching email address; we then move onto + matching by first/last name. If we cannot find a user, then return None. + ''' + if userstring in find_user.cache: + return find_user.cache[userstring] + matches = re.match(r'^([^<]+)? ?<([^>]*)>', userstring) + if not matches: + return None + + user = None + name = matches.group(1) + email = matches.group(2) + + def user_email(): + return User.objects.get(email=email) + def profile_email(): + return User.objects.get(userprofile__public_email=email) + def user_name(): + # yes, a bit odd but this is the easiest way since we can't always be + # sure how to split the name. Ensure every 'token' appears in at least + # one of the two name fields. + name_q = Q() + for token in name.split(): + # ignore quoted parts; e.g. nicknames in strings + if re.match(r'^[\'"].*[\'"]$', token): + continue + name_q &= (Q(first_name__icontains=token) | + Q(last_name__icontains=token)) + return User.objects.get(name_q) + + for matcher in (user_email, profile_email, user_name): + try: + user = matcher() + break + except (User.DoesNotExist, User.MultipleObjectsReturned): + pass + + find_user.cache[userstring] = user + return user + +# cached mappings of user strings -> User objects so we don't have to do the +# lookup more than strictly necessary. +find_user.cache = {} + # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 26c54d017185b1c409dbd6ed4c09fb14986df0b3 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 19:35:47 -0500 Subject: find_user: add tests and fix no email address case If a packager string was passed in without an email address, we would blow up on the matcher and not try to find a user. Signed-off-by: Dan McGee --- devel/tests.py | 45 ++++++++++++++++++++++++++++++++++++++++++++- devel/utils.py | 26 ++++++++++++++++++-------- 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/devel/tests.py b/devel/tests.py index 604da74c..33b02582 100644 --- a/devel/tests.py +++ b/devel/tests.py @@ -1,8 +1,10 @@ from django.test import TestCase +from django.contrib.auth.models import User +from devel.utils import find_user +from main.models import UserProfile class DevelTest(TestCase): - def test_index(self): response = self.client.get('/devel/') self.assertEqual(response.status_code, 302) @@ -27,3 +29,44 @@ def test_newuser(self): def test_mirrors(self): response = self.client.get('/mirrors/') self.assertEqual(response.status_code, 200) + +class FindUserTest(TestCase): + + def setUp(self): + self.user1 = User.objects.create(username="joeuser", first_name="Joe", + last_name="User", email="user1@example.com") + self.user2 = User.objects.create(username="john", first_name="John", + last_name="", email="user2@example.com") + self.user3 = User.objects.create(username="bjones", first_name="Bob", + last_name="Jones", email="user3@example.com") + + for user in (self.user1, self.user2, self.user3): + email_addr = "%s@awesome.com" % user.username + UserProfile.objects.create(user=user, public_email=email_addr) + + def test_not_matching(self): + self.assertIsNone(find_user(None)) + self.assertIsNone(find_user("")) + self.assertIsNone(find_user("Bogus")) + self.assertIsNone(find_user("Bogus ")) + self.assertIsNone(find_user("")) + self.assertIsNone(find_user("bogus@example.com")) + self.assertIsNone(find_user("Unknown Packager")) + + def test_by_email(self): + self.assertEqual(self.user1, find_user("XXX YYY ")) + self.assertEqual(self.user2, find_user("YYY ZZZ ")) + + def test_by_profile_email(self): + self.assertEqual(self.user1, find_user("XXX ")) + self.assertEqual(self.user2, find_user("YYY ")) + self.assertEqual(self.user3, find_user("ZZZ ")) + + def test_by_name(self): + self.assertEqual(self.user1, find_user("Joe User ")) + self.assertEqual(self.user1, find_user("Joe User")) + self.assertEqual(self.user2, find_user("John ")) + self.assertEqual(self.user3, find_user("Bob Jones ")) + +# vim: set ts=4 sw=4 et: diff --git a/devel/utils.py b/devel/utils.py index 9d7dfb2d..acdda959 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -51,24 +51,32 @@ def find_user(userstring): We start by searching for a matching email address; we then move onto matching by first/last name. If we cannot find a user, then return None. ''' + if not userstring: + return None if userstring in find_user.cache: return find_user.cache[userstring] matches = re.match(r'^([^<]+)? ?<([^>]*)>', userstring) if not matches: - return None - - user = None - name = matches.group(1) - email = matches.group(2) + name = userstring + email = None + else: + name = matches.group(1) + email = matches.group(2) def user_email(): - return User.objects.get(email=email) + if email: + return User.objects.get(email=email) + return None def profile_email(): - return User.objects.get(userprofile__public_email=email) + if email: + return User.objects.get(userprofile__public_email=email) + return None def user_name(): # yes, a bit odd but this is the easiest way since we can't always be # sure how to split the name. Ensure every 'token' appears in at least # one of the two name fields. + if not name: + return None name_q = Q() for token in name.split(): # ignore quoted parts; e.g. nicknames in strings @@ -78,10 +86,12 @@ def user_name(): Q(last_name__icontains=token)) return User.objects.get(name_q) + user = None for matcher in (user_email, profile_email, user_name): try: user = matcher() - break + if user != None: + break except (User.DoesNotExist, User.MultipleObjectsReturned): pass -- cgit v1.2.3-54-g00ecf From 9156003d2d93de57c663901c39ac66316a3d969e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 19:50:46 -0500 Subject: Turn find_user into UserFinder class This moves the cache inside an instance. Also add a few more tests. Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 6 ++- devel/tests.py | 69 ++++++++++++++++++++-------- devel/utils.py | 86 +++++++++++++++++++---------------- 3 files changed, 103 insertions(+), 58 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 138931ff..470b785d 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -27,7 +27,7 @@ from datetime import datetime from optparse import make_option -from devel.utils import find_user +from devel.utils import UserFinder from main.models import Arch, Package, PackageDepend, PackageFile, Repo from packages.models import Conflict, Provision, Replacement @@ -182,6 +182,8 @@ def create_multivalued(dbpkg, repopkg, db_attr, repo_attr): for name in getattr(repopkg, repo_attr): collection.create(name=name) +finder = UserFinder() + def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): db_score = 1 @@ -200,7 +202,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.build_date = repopkg.builddate dbpkg.packager_str = repopkg.packager # attempt to find the corresponding django user for this string - dbpkg.packager = find_user(repopkg.packager) + dbpkg.packager = finder.find(repopkg.packager) if timestamp: dbpkg.flag_date = None diff --git a/devel/tests.py b/devel/tests.py index 33b02582..c982e502 100644 --- a/devel/tests.py +++ b/devel/tests.py @@ -1,7 +1,7 @@ from django.test import TestCase from django.contrib.auth.models import User -from devel.utils import find_user +from devel.utils import UserFinder from main.models import UserProfile class DevelTest(TestCase): @@ -33,6 +33,8 @@ def test_mirrors(self): class FindUserTest(TestCase): def setUp(self): + self.finder = UserFinder() + self.user1 = User.objects.create(username="joeuser", first_name="Joe", last_name="User", email="user1@example.com") self.user2 = User.objects.create(username="john", first_name="John", @@ -44,29 +46,60 @@ def setUp(self): email_addr = "%s@awesome.com" % user.username UserProfile.objects.create(user=user, public_email=email_addr) + self.user4 = User.objects.create(username="tim1", first_name="Tim", + last_name="One", email="tim@example.com") + self.user5 = User.objects.create(username="tim2", first_name="Tim", + last_name="Two", email="timtwo@example.com") + def test_not_matching(self): - self.assertIsNone(find_user(None)) - self.assertIsNone(find_user("")) - self.assertIsNone(find_user("Bogus")) - self.assertIsNone(find_user("Bogus ")) - self.assertIsNone(find_user("")) - self.assertIsNone(find_user("bogus@example.com")) - self.assertIsNone(find_user("Unknown Packager")) + self.assertIsNone(self.finder.find(None)) + self.assertIsNone(self.finder.find("")) + self.assertIsNone(self.finder.find("Bogus")) + self.assertIsNone(self.finder.find("Bogus ")) + self.assertIsNone(self.finder.find("")) + self.assertIsNone(self.finder.find("bogus@example.com")) + self.assertIsNone(self.finder.find("Unknown Packager")) def test_by_email(self): - self.assertEqual(self.user1, find_user("XXX YYY ")) - self.assertEqual(self.user2, find_user("YYY ZZZ ")) + self.assertEqual(self.user1, + self.finder.find("XXX YYY ")) + self.assertEqual(self.user2, + self.finder.find("YYY ZZZ ")) def test_by_profile_email(self): - self.assertEqual(self.user1, find_user("XXX ")) - self.assertEqual(self.user2, find_user("YYY ")) - self.assertEqual(self.user3, find_user("ZZZ ")) + self.assertEqual(self.user1, + self.finder.find("XXX ")) + self.assertEqual(self.user2, + self.finder.find("YYY ")) + self.assertEqual(self.user3, + self.finder.find("ZZZ ")) def test_by_name(self): - self.assertEqual(self.user1, find_user("Joe User ")) - self.assertEqual(self.user1, find_user("Joe User")) - self.assertEqual(self.user2, find_user("John ")) - self.assertEqual(self.user3, find_user("Bob Jones ")) + self.assertEqual(self.user1, + self.finder.find("Joe User ")) + self.assertEqual(self.user1, + self.finder.find("Joe User")) + self.assertEqual(self.user2, + self.finder.find("John ")) + self.assertEqual(self.user2, + self.finder.find("John")) + self.assertEqual(self.user3, + self.finder.find("Bob Jones ")) + + def test_cache(self): + # simply look two of them up, but then do it repeatedly + for i in range(50): + self.assertEqual(self.user1, + self.finder.find("XXX YYY ")) + self.assertEqual(self.user3, + self.finder.find("Bob Jones ")) + + def test_ambiguous(self): + self.assertEqual(self.user4, + self.finder.find("Tim One ")) + self.assertEqual(self.user5, + self.finder.find("Tim Two ")) + self.assertIsNone(self.finder.find("Tim ")) # vim: set ts=4 sw=4 et: diff --git a/devel/utils.py b/devel/utils.py index acdda959..3a6ad699 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -43,35 +43,25 @@ def get_annotated_maintainers(): return maintainers -def find_user(userstring): - ''' - Attempt to find the corresponding User object for a standard - packager string, e.g. something like - 'A. U. Thor '. - We start by searching for a matching email address; we then move onto - matching by first/last name. If we cannot find a user, then return None. - ''' - if not userstring: - return None - if userstring in find_user.cache: - return find_user.cache[userstring] - matches = re.match(r'^([^<]+)? ?<([^>]*)>', userstring) - if not matches: - name = userstring - email = None - else: - name = matches.group(1) - email = matches.group(2) - - def user_email(): + +class UserFinder(object): + def __init__(self): + self.cache = {} + + @staticmethod + def user_email(name, email): if email: return User.objects.get(email=email) return None - def profile_email(): + + @staticmethod + def profile_email(name, email): if email: return User.objects.get(userprofile__public_email=email) return None - def user_name(): + + @staticmethod + def user_name(name, email): # yes, a bit odd but this is the easiest way since we can't always be # sure how to split the name. Ensure every 'token' appears in at least # one of the two name fields. @@ -86,20 +76,40 @@ def user_name(): Q(last_name__icontains=token)) return User.objects.get(name_q) - user = None - for matcher in (user_email, profile_email, user_name): - try: - user = matcher() - if user != None: - break - except (User.DoesNotExist, User.MultipleObjectsReturned): - pass - - find_user.cache[userstring] = user - return user - -# cached mappings of user strings -> User objects so we don't have to do the -# lookup more than strictly necessary. -find_user.cache = {} + def find(self, userstring): + ''' + Attempt to find the corresponding User object for a standard + packager string, e.g. something like + 'A. U. Thor '. + We start by searching for a matching email address; we then move onto + matching by first/last name. If we cannot find a user, then return None. + ''' + if not userstring: + return None + if userstring in self.cache: + return self.cache[userstring] + matches = re.match(r'^([^<]+)? ?<([^>]*)>', userstring) + if not matches: + name = userstring + email = None + else: + name = matches.group(1) + email = matches.group(2) + + user = None + find_methods = (self.user_email, self.profile_email, self.user_name) + for matcher in find_methods: + try: + user = matcher(name, email) + if user != None: + break + except (User.DoesNotExist, User.MultipleObjectsReturned): + pass + + self.cache[userstring] = user + return user + + def clear_cache(self): + self.cache = {} # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 82289ebb4432b3372b959430581afa0a2158acb9 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:11:07 -0500 Subject: Add a rematch_packager management command This allows quick resolution of all unmatched packages, especially after tweaking the way find_user works. Signed-off-by: Dan McGee --- devel/management/commands/rematch_packager.py | 64 +++++++++++++++++++++++++++ devel/tests.py | 12 ++++- devel/utils.py | 8 ++-- 3 files changed, 80 insertions(+), 4 deletions(-) create mode 100644 devel/management/commands/rematch_packager.py diff --git a/devel/management/commands/rematch_packager.py b/devel/management/commands/rematch_packager.py new file mode 100644 index 00000000..ba6e6a54 --- /dev/null +++ b/devel/management/commands/rematch_packager.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +""" +rematch_packager command + +Match all packages with a packager_str but NULL packager_id to a packager if we +can find one. + +Usage: ./manage.py rematch_packager +""" + +from django.core.management.base import NoArgsCommand + +import sys +import logging + +from devel.utils import UserFinder +from main.models import Package + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s -> %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + stream=sys.stderr) +logger = logging.getLogger() + +class Command(NoArgsCommand): + help = "Runs a check on all active mirror URLs to determine if they are reachable via IPv4 and/or v6." + + def handle_noargs(self, **options): + v = int(options.get('verbosity', None)) + if v == 0: + logger.level = logging.ERROR + elif v == 1: + logger.level = logging.INFO + elif v == 2: + logger.level = logging.DEBUG + + return match_packager() + +def match_packager(): + finder = UserFinder() + logger.info("getting all unmatched packages") + package_count = matched_count = 0 + unknown = set() + + for package in Package.objects.filter(packager__isnull=True): + logger.debug("package %s, packager string %s", + package.pkgname, package.packager_str) + package_count += 1 + user = finder.find(package.packager_str) + if user: + package.packager = user + logger.debug(" found user %s" % user.username) + package.save() + matched_count += 1 + else: + unknown.add(package.packager_str) + + logger.info("%d packages checked, %d newly matched", + package_count, matched_count) + logger.debug("unknown packagers:\n%s", + "\n".join(unknown)) + +# vim: set ts=4 sw=4 et: diff --git a/devel/tests.py b/devel/tests.py index c982e502..36691179 100644 --- a/devel/tests.py +++ b/devel/tests.py @@ -1,7 +1,7 @@ from django.test import TestCase from django.contrib.auth.models import User -from devel.utils import UserFinder +from devel.utils import UserFinder from main.models import UserProfile class DevelTest(TestCase): @@ -87,6 +87,16 @@ def test_by_name(self): self.assertEqual(self.user3, self.finder.find("Bob Jones ")) + def test_by_invalid(self): + self.assertEqual(self.user1, + self.finder.find("Joe User ]*)>', userstring) + + name = email = None + + matches = re.match(r'^([^<]+)? ?<([^>]*)>?', userstring) if not matches: - name = userstring - email = None + name = userstring.strip() else: name = matches.group(1) email = matches.group(2) -- cgit v1.2.3-54-g00ecf From dcbb859a259082bf8d0587a63385ece44c697e45 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:13:01 -0500 Subject: Add (hidden) ability to search by last packager This is used from the developer dashboard to add a new column to the stats of # of packages for a given developer where they were the last to do the packaging. Signed-off-by: Dan McGee --- devel/views.py | 2 ++ packages/views.py | 10 ++++++++++ templates/devel/index.html | 41 ++++++++++++++++++++++++++++------------- 3 files changed, 40 insertions(+), 13 deletions(-) diff --git a/devel/views.py b/devel/views.py index 1827f2ac..4399b73f 100644 --- a/devel/views.py +++ b/devel/views.py @@ -50,9 +50,11 @@ def index(request): total_orphans = Package.objects.exclude(pkgbase__in=maintained).count() total_flagged_orphans = Package.objects.filter( flag_date__isnull=False).exclude(pkgbase__in=maintained).count() + total_updated = Package.objects.filter(packager__isnull=True).count() orphan = { 'package_count': total_orphans, 'flagged_count': total_flagged_orphans, + 'updated_count': total_updated, } page_dict = { diff --git a/packages/views.py b/packages/views.py index 7b8c2e67..02b9f93b 100644 --- a/packages/views.py +++ b/packages/views.py @@ -191,6 +191,7 @@ class PackageSearchForm(forms.Form): arch = forms.MultipleChoiceField(required=False) q = forms.CharField(required=False) maintainer = forms.ChoiceField(required=False) + packager = forms.ChoiceField(required=False) last_update = forms.DateField(required=False, widget=AdminDateWidget(), label='Last Updated After') flagged = forms.ChoiceField( @@ -213,6 +214,9 @@ def __init__(self, *args, **kwargs): self.fields['maintainer'].choices = \ [('', 'All'), ('orphan', 'Orphan')] + \ [(m.username, m.get_full_name()) for m in maints] + self.fields['packager'].choices = \ + [('', 'All'), ('unknown', 'Unknown')] + \ + [(m.username, m.get_full_name()) for m in maints] def search(request, page=None): limit = 50 @@ -237,6 +241,12 @@ def search(request, page=None): user__username=form.cleaned_data['maintainer']).values('pkgbase') packages = packages.filter(pkgbase__in=inner_q) + if form.cleaned_data['packager'] == 'unknown': + packages = packages.filter(packager__isnull=True) + elif form.cleaned_data['packager']: + packages = packages.filter( + packager__username=form.cleaned_data['packager']) + if form.cleaned_data['flagged'] == 'Flagged': packages = packages.filter(flag_date__isnull=False) elif form.cleaned_data['flagged'] == 'Not Flagged': diff --git a/templates/devel/index.html b/templates/devel/index.html index 2a0058d3..f9ca2135 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -182,10 +182,10 @@

    Stats by Repository

    {# dash-by-arch #} {% endcache %} -{% cache 60 dev-dash-by-maintainer %} -
    +{% cache 60 dev-dash-by-developer %} +
    -

    Stats by Maintainer

    +

    Stats by Developer

    {% if perms.main.change_package %}

    Look for stale relations

    @@ -195,17 +195,24 @@

    Stats by Maintainer

    - + + - + + {{ orphan.package_count }} packages + + {{ orphan.flagged_count }} packages + + @@ -214,15 +221,21 @@

    Stats by Maintainer

    + {{ maint.package_count }} packages + + {{ maint.flagged_count }} packages + + {% endfor %}
    Maintainer# Packages# Maintained # Flagged# Last Packager
    OrphanOrphan/Unknown - {{ orphan.package_count }} packages - {{ orphan.flagged_count }} packages + {{ orphan.updated_count }} packages +
    {{ maint.get_full_name }} - {{ maint.package_count }} packages - {{ maint.flagged_count }} packages + {{ maint.updated_count }} packages +
    -
    {# #dash-by-maintainer #} +{# #dash-by-developer #} {% endcache %} {% load cdn %}{% jquery %} @@ -236,9 +249,11 @@

    Stats by Maintainer

    {widgets: ['zebra'], sortList: [[0,0], [1,0]]}); $("#dash-todo:not(:has(tbody tr.empty))").tablesorter( {widgets: ['zebra'], sortList: [[1,1]]}); - $(".dash-stats").tablesorter( - {widgets: ['zebra'], sortList: [[0,0]], - headers: { 1: { sorter: 'pkgcount' }, 2: { sorter: 'pkgcount' } } }); + $(".dash-stats").tablesorter({ + widgets: ['zebra'], + sortList: [[0,0]], + headers: { 1: { sorter: 'pkgcount' }, 2: { sorter: 'pkgcount' }, 3: { sorter: 'pkgcount' } } + }); }); {% endblock %} -- cgit v1.2.3-54-g00ecf From 55f6ad0c95323a5bfeca2c322918d21f413d1075 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:19:49 -0500 Subject: Set up queries for staging repos This treats repo.staging special in much the way we already have to treat repo.testing as special. Signed-off-by: Dan McGee --- main/models.py | 17 +++++++++++------ packages/views.py | 13 ++++++++----- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/main/models.py b/main/models.py index 05e63fed..e0017069 100644 --- a/main/models.py +++ b/main/models.py @@ -226,6 +226,7 @@ def get_requiredby(self): groupby(requiredby, lambda x: x.pkg.id)] # find another package by this name in the opposite testing setup + # TODO: figure out staging exclusions too if not Package.objects.filter(pkgname=self.pkgname, arch=self.arch).exclude(id=self.id).exclude( repo__testing=self.repo.testing).exists(): @@ -241,7 +242,8 @@ def get_requiredby(self): dep = dep_pkgs[0] if len(dep_pkgs) > 1: dep_pkgs = [d for d in dep_pkgs - if d.pkg.repo.testing == self.repo.testing] + if d.pkg.repo.testing == self.repo.testing and + d.pkg.repo.staging = self.repo.staging] if len(dep_pkgs) > 0: dep = dep_pkgs[0] trimmed.append(dep) @@ -273,7 +275,8 @@ def get_depends(self): # grab the first though in case we fail pkg = pkgs[0] # prevents yet more DB queries, these lists should be short - pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing] + pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing + and p.repo.staging = self.repo.staging] if len(pkgs) > 0: pkg = pkgs[0] deps.append({'dep': dep, 'pkg': pkg}) @@ -293,7 +296,8 @@ def base_package(self): # this package might be split across repos? just find one # that matches the correct [testing] repo flag pkglist = Package.objects.filter(arch=self.arch, - repo__testing=self.repo.testing, pkgname=self.pkgbase) + repo__testing=self.repo.testing, + repo__staging=self.repo.staging, pkgname=self.pkgbase) if len(pkglist) > 0: return pkglist[0] return None @@ -303,11 +307,12 @@ def split_packages(self): Return all packages that were built with this one (e.g. share a pkgbase value). The package this method is called on will never be in the list, and we will never return a package that does not have the same - repo.testing flag. For any non-split packages, the return value will be - an empty list. + repo.testing and repo.staging flags. For any non-split packages, the + return value will be an empty list. """ return Package.objects.filter(arch__in=self.applicable_arches(), - repo__testing=self.repo.testing, pkgbase=self.pkgbase).exclude(id=self.id) + repo__testing=self.repo.testing, repo__staging=self.repo.staging, + pkgbase=self.pkgbase).exclude(id=self.id) def is_same_version(self, other): 'is this package similar, name and version-wise, to another' diff --git a/packages/views.py b/packages/views.py index 02b9f93b..01d01e20 100644 --- a/packages/views.py +++ b/packages/views.py @@ -118,7 +118,8 @@ def details(request, name='', repo='', arch=''): arches.extend(Arch.objects.filter(agnostic=True)) repo = get_object_or_404(Repo, name__iexact=repo) pkgs = Package.objects.normal().filter(pkgbase=name, - repo__testing=repo.testing, arch__in=arches).order_by('pkgname') + repo__testing=repo.testing, repo__staging=repo.staging, + arch__in=arches).order_by('pkgname') if len(pkgs) == 0: raise Http404 context = { @@ -343,8 +344,8 @@ def unflag_all(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.filter( - pkgbase=pkg.pkgbase, repo__testing=pkg.repo.testing) + pkgs = Package.objects.filter(pkgbase=pkg.pkgbase, + repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging) pkgs.update(flag_date=None) return redirect(pkg) @@ -416,7 +417,8 @@ def flag(request, name, repo, arch): # find all packages from (hopefully) the same PKGBUILD pkgs = Package.objects.normal().filter( pkgbase=pkg.pkgbase, flag_date__isnull=True, - repo__testing=pkg.repo.testing).order_by( + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( 'pkgname', 'repo__name', 'arch__name') if request.POST: @@ -471,7 +473,8 @@ def flag_confirmed(request, name, repo, arch): pkgname=name, repo__name__iexact=repo, arch__name=arch) pkgs = Package.objects.normal().filter( pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, - repo__testing=pkg.repo.testing).order_by( + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( 'pkgname', 'repo__name', 'arch__name') context = {'package': pkg, 'packages': pkgs} -- cgit v1.2.3-54-g00ecf From 5bf28d1cabc54dfa68ff64804a31ead419780fdf Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:24:15 -0500 Subject: Add Package.in_staging method Signed-off-by: Dan McGee --- main/models.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/main/models.py b/main/models.py index e0017069..b52c45fb 100644 --- a/main/models.py +++ b/main/models.py @@ -332,6 +332,17 @@ def in_testing(self): except Package.DoesNotExist: return None + def in_staging(self): + '''attempt to locate this package in a staging repo; if we are in + a staging repo we will always return None.''' + if self.repo.staging: + return None + try: + return Package.objects.normal().get(repo__staging=True, + pkgname=self.pkgname, arch=self.arch) + except Package.DoesNotExist: + return None + def elsewhere(self): '''attempt to locate this package anywhere else, regardless of architecture or repository. Excludes this package from the list.''' -- cgit v1.2.3-54-g00ecf From 8f3c60c8dec4c162ad5dc753b97ce5177fb12ef1 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:27:21 -0500 Subject: Don't include staging packages in todolists Signed-off-by: Dan McGee --- todolists/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/todolists/views.py b/todolists/views.py index 9376ed68..8ad7be56 100644 --- a/todolists/views.py +++ b/todolists/views.py @@ -23,8 +23,8 @@ def clean_packages(self): package_names = [s.strip() for s in self.cleaned_data['packages'].split("\n")] package_names = set(package_names) - packages = Package.objects.filter(pkgname__in=package_names).exclude( - repo__testing=True).select_related( + packages = Package.objects.filter(pkgname__in=package_names).filter( + repo__testing=False, repo__staging=False).select_related( 'arch', 'repo').order_by('arch') return packages -- cgit v1.2.3-54-g00ecf From c584ffc9b6bc9bdab1616b096ccaa1c1cd762f69 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 23 Jun 2011 20:30:31 -0500 Subject: Fix syntax errors, == not = Signed-off-by: Dan McGee --- main/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main/models.py b/main/models.py index b52c45fb..9010817b 100644 --- a/main/models.py +++ b/main/models.py @@ -243,7 +243,7 @@ def get_requiredby(self): if len(dep_pkgs) > 1: dep_pkgs = [d for d in dep_pkgs if d.pkg.repo.testing == self.repo.testing and - d.pkg.repo.staging = self.repo.staging] + d.pkg.repo.staging == self.repo.staging] if len(dep_pkgs) > 0: dep = dep_pkgs[0] trimmed.append(dep) @@ -276,7 +276,7 @@ def get_depends(self): pkg = pkgs[0] # prevents yet more DB queries, these lists should be short pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing - and p.repo.staging = self.repo.staging] + and p.repo.staging == self.repo.staging] if len(pkgs) > 0: pkg = pkgs[0] deps.append({'dep': dep, 'pkg': pkg}) -- cgit v1.2.3-54-g00ecf From 8734d6ccdf10fd03ed3dfea5fb1028812d0ba8cd Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 27 Jun 2011 16:22:05 -0500 Subject: Adjust CSS to make front page news look better A code element inside a pre block looked like ass, which is how markdown generates indented code blocks. Drop the bright yellow from these code elements, and while we are at it, also lighten up the yellow a bit as it stands out way too much. Signed-off-by: Dan McGee --- media/archweb.css | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/media/archweb.css b/media/archweb.css index fce88e21..85fdb610 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -20,8 +20,9 @@ body { min-width: 650px; background: #f6f9fc; color: #222; font: normal 100% san p { margin: .33em 0 1em; } ol, ul { margin-bottom: 1em; padding-left: 2em; } ul { list-style: square; } -code { font: 1.2em monospace; background: #ffa; padding: 0.15em 0.25em; } +code { font: 1.2em monospace; background: #ffd; padding: 0.15em 0.25em; } pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 0.25em 2em; } +pre code { display: block; background: none; } blockquote { margin: 1.5em 2em; } input { vertical-align: middle; } select[multiple] { padding-top: 1px; padding-bottom: 1px; } -- cgit v1.2.3-54-g00ecf From f913bbcab4dc458d1566778a094bdd337cb91841 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 28 Jun 2011 00:11:49 -0500 Subject: Add order_by to packager count query No real idea why SQLite is returning wrong results without out this, but it is likely a bug in the ORM layer I'm not interested in digging into. Signed-off-by: Dan McGee --- devel/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/devel/utils.py b/devel/utils.py index 6bc52c89..d7a154a8 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -32,8 +32,8 @@ def get_annotated_maintainers(): pkg_count[k] = total flag_count[k] = flagged - update_count = Package.objects.values_list('packager').annotate( - Count('packager')) + update_count = Package.objects.values_list('packager').order_by( + 'packager').annotate(Count('packager')) update_count = dict(update_count) for m in maintainers: -- cgit v1.2.3-54-g00ecf From d3f1763efefef9ff5095a49b075b27b38df83d16 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 28 Jun 2011 00:12:45 -0500 Subject: Add a bad compression ratio report Signed-off-by: Dan McGee --- devel/views.py | 19 ++++++++++++++++++- templates/devel/index.html | 3 +++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/devel/views.py b/devel/views.py index 4399b73f..79eef318 100644 --- a/devel/views.py +++ b/devel/views.py @@ -6,7 +6,7 @@ from django.contrib.sites.models import Site from django.core.mail import send_mail from django.db import transaction -from django.db.models import Q +from django.db.models import F, Q from django.http import Http404 from django.shortcuts import get_object_or_404 from django.template import loader, Context @@ -163,6 +163,23 @@ def report(request, report, username=None): package.compressed_size) package.installed_size_pretty = filesizeformat( package.installed_size) + elif report == 'badcompression': + title = 'Packages that have little need for compression' + cutoff = 0.90 * F('installed_size') + packages = packages.filter(compressed_size__gt=0, installed_size__gt=0, + compressed_size__gte=cutoff).order_by('-compressed_size') + names = [ 'Compressed Size', 'Installed Size', 'Ratio', 'Type' ] + attrs = [ 'compressed_size_pretty', 'installed_size_pretty', + 'ratio', 'compress_type' ] + # Format the compressed and installed sizes with MB/GB/etc suffixes + for package in packages: + package.compressed_size_pretty = filesizeformat( + package.compressed_size) + package.installed_size_pretty = filesizeformat( + package.installed_size) + ratio = package.compressed_size / float(package.installed_size) + package.ratio = '%.2f' % ratio + package.compress_type = package.filename.split('.')[-1] elif report == 'uncompressed-man': title = 'Packages with uncompressed manpages' # magic going on here! Checking for all '.1'...'.9' extensions diff --git a/templates/devel/index.html b/templates/devel/index.html index f9ca2135..1689319b 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -115,6 +115,9 @@

    Developer Reports

  • Uncompressed Info Pages: Self-explanatory (yours only)
  • +
  • Bad Compression: + Packages with a compression ratio of less than 10% + (yours only)
  • Unneeded Orphans: Packages that have no maintainer and are not required by any other package in any repository
  • -- cgit v1.2.3-54-g00ecf From 33b9bf44aac4b70fa4cc6e9d1e82fb556b836801 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 29 Jun 2011 09:17:00 -0500 Subject: Use normal date format on signoffs page Fixes FS#24949. Signed-off-by: Dan McGee --- templates/packages/signoffs.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index b7184039..93be4979 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -27,7 +27,7 @@

    Package Signoffs

    {{ pkg.pkgname }} {{ pkg.full_version }} - {{ pkg.last_update }} + {{ pkg.last_update|date }} {{ target }} {{ pkg.approved_for_signoff|yesno:"Yes,No" }} -- cgit v1.2.3-54-g00ecf From 5e85c5ac9ed09551d65ec07767094770d248f3b1 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 29 Jun 2011 10:52:30 -0500 Subject: Move set_created_field() to shared utils class Signed-off-by: Dan McGee --- main/utils.py | 10 ++++++++++ packages/models.py | 10 ++-------- releng/models.py | 13 +++---------- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/main/utils.py b/main/utils.py index 12d12503..2ca69bcb 100644 --- a/main/utils.py +++ b/main/utils.py @@ -2,6 +2,9 @@ import cPickle as pickle except ImportError: import pickle + +from datetime import datetime + from django.core.cache import cache from django.utils.hashcompat import md5_constructor @@ -81,4 +84,11 @@ def retrieve_latest(sender): pass return None +def set_created_field(sender, **kwargs): + '''This will set the 'created' field on any object to datetime.utcnow() if + it is unset. For use as a pre_save signal handler.''' + obj = kwargs['instance'] + if hasattr(obj, 'created') and not obj.created: + obj.created = datetime.utcnow() + # vim: set ts=4 sw=4 et: diff --git a/packages/models.py b/packages/models.py index a950bddb..0983c642 100644 --- a/packages/models.py +++ b/packages/models.py @@ -1,9 +1,9 @@ -from datetime import datetime - from django.db import models from django.db.models.signals import pre_save, post_save from django.contrib.auth.models import User +from main.utils import set_created_field + class PackageRelation(models.Model): ''' Represents maintainership (or interest) in a package by a given developer. @@ -112,12 +112,6 @@ def remove_inactive_maintainers(sender, instance, created, **kwargs): type=PackageRelation.MAINTAINER) maint_relations.delete() -def set_created_field(sender, **kwargs): - # We use this same callback for both Isos and Tests - obj = kwargs['instance'] - if not obj.created: - obj.created = datetime.utcnow() - post_save.connect(remove_inactive_maintainers, sender=User, dispatch_uid="packages.models") pre_save.connect(set_created_field, sender=PackageRelation, diff --git a/releng/models.py b/releng/models.py index 07ede1c5..5510db6a 100644 --- a/releng/models.py +++ b/releng/models.py @@ -1,6 +1,7 @@ -from datetime import datetime - from django.db import models +from django.db.models.signals import pre_save + +from main.utils import set_created_field class IsoOption(models.Model): name = models.CharField(max_length=200) @@ -104,14 +105,6 @@ class Test(models.Model): success = models.BooleanField() comments = models.TextField(null=True, blank=True) -def set_created_field(sender, **kwargs): - # We use this same callback for both Isos and Tests - obj = kwargs['instance'] - if not obj.created: - obj.created = datetime.utcnow() - -from django.db.models.signals import pre_save - pre_save.connect(set_created_field, sender=Iso, dispatch_uid="releng.models") pre_save.connect(set_created_field, sender=Test, -- cgit v1.2.3-54-g00ecf From 65e274dcb6dc58990e60af892be941c6b53f195a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 5 Jul 2011 09:43:36 -0500 Subject: Simplify package differences code Signed-off-by: Dan McGee --- packages/utils.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index af4675bb..37f23244 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -92,21 +92,16 @@ def get_differences_info(arch_a, arch_b): WHERE p.arch_id IN (%s, %s) AND ( q.id IS NULL - OR - p.pkgver != q.pkgver - OR - p.pkgrel != q.pkgrel - OR - p.epoch != q.epoch + OR p.pkgver != q.pkgver + OR p.pkgrel != q.pkgrel + OR p.epoch != q.epoch ) """ cursor = connection.cursor() cursor.execute(sql, [arch_a.id, arch_b.id]) results = cursor.fetchall() - to_fetch = [] - for row in results: - # column A will always have a value, column B might be NULL - to_fetch.append(row[0]) + # column A will always have a value, column B might be NULL + to_fetch = [row[0] for row in results] # fetch all of the necessary packages pkgs = Package.objects.normal().in_bulk(to_fetch) # now build a list of tuples containing differences -- cgit v1.2.3-54-g00ecf From fa65115afccc612dd902b6324069a0b312f6a0f6 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 3 Jul 2011 18:56:29 -0500 Subject: Add rel="nofollow" to download package links Signed-off-by: Dan McGee --- templates/packages/details.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index 1926abc2..68016501 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -38,7 +38,7 @@

    Package Actions

    onclick="return !window.open('/packages/flaghelp/','FlagHelp', 'height=350,width=450,location=no,scrollbars=yes,menubars=no,toolbars=no,resizable=no');">(?) {% endif %} -
  • Download From Mirror
  • +
  • Download From Mirror
  • {% if perms.main.change_package %} -- cgit v1.2.3-54-g00ecf From 1bcb2b7ed0bdb11ea3b22cdbc93192b4259303d5 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 3 Jul 2011 18:56:05 -0500 Subject: Add a default datetime format Signed-off-by: Dan McGee --- settings.py | 1 + templates/packages/details.html | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/settings.py b/settings.py index e0d1b1a2..5cca6047 100644 --- a/settings.py +++ b/settings.py @@ -32,6 +32,7 @@ # Default date format in templates for 'date' filter DATE_FORMAT = 'Y-m-d' +DATETIME_FORMAT = 'Y-m-d H:i' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. diff --git a/templates/packages/details.html b/templates/packages/details.html index 68016501..7972b9ab 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -144,7 +144,7 @@

    Versions Elsewhere

    {% with pkg.packager as pkgr %}{% if pkgr %}{% userpkgs pkgr %}{% else %}{{ pkg.packager_str }}{% endif %}{% endwith %} Build Date: - {{ pkg.build_date }} UTC + {{ pkg.build_date|date:"DATETIME_FORMAT" }} UTC Last Updated: {{ pkg.last_update|date }} -- cgit v1.2.3-54-g00ecf From e37d465c4eb03259b3ce0c89e43c2c21badb0063 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 1 Jul 2011 18:59:28 -0500 Subject: Allow more lenient data entry for PGP key field Don't allow the max_length parameter to make it through to the HTML form, silently cutting off HTML cut and pastes. Trim out spaces automatically, as well as '0x' and '2048R/' type prefixes. Signed-off-by: Dan McGee --- .../0051_auto__chg_field_userprofile_pgp_key.py | 160 +++++++++++++++++++++ main/models.py | 21 ++- 2 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 main/migrations/0051_auto__chg_field_userprofile_pgp_key.py diff --git a/main/migrations/0051_auto__chg_field_userprofile_pgp_key.py b/main/migrations/0051_auto__chg_field_userprofile_pgp_key.py new file mode 100644 index 00000000..6b8abf6e --- /dev/null +++ b/main/migrations/0051_auto__chg_field_userprofile_pgp_key.py @@ -0,0 +1,160 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.alter_column('user_profiles', 'pgp_key', self.gf('main.models.PGPKeyField')(max_length=40, null=True)) + + + def backwards(self, orm): + db.alter_column('user_profiles', 'pgp_key', self.gf('django.db.models.fields.CharField')(max_length=40, null=True)) + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 9010817b..2f549081 100644 --- a/main/models.py +++ b/main/models.py @@ -22,6 +22,25 @@ def formfield(self, **kwargs): defaults.update(kwargs) return super(PositiveBigIntegerField, self).formfield(**defaults) +class PGPKeyField(models.CharField): + _south_introspects = True + + def to_python(self, value): + if value == '': + return None + value = super(PGPKeyField, self).to_python(value) + # remove all spaces + value = value.replace(' ', '') + # prune prefixes, either 0x or 2048R/ type + if value.startswith('0x'): + value = value[2:] + value = value.split('/')[-1] + return value + + def formfield(self, **kwargs): + # override so we don't set max_length form field attribute + return models.Field.formfield(self, **kwargs) + def validate_pgp_key_length(value): if len(value) not in (8, 16, 40): raise ValidationError( @@ -45,7 +64,7 @@ class UserProfile(models.Model): max_length=50, help_text="Required field") other_contact = models.CharField(max_length=100, null=True, blank=True) - pgp_key = models.CharField(max_length=40, null=True, blank=True, + pgp_key = PGPKeyField(max_length=40, null=True, blank=True, verbose_name="PGP key", validators=[RegexValidator(r'^[0-9A-F]+$', "Ensure this value consists of only hex characters.", 'hex_char'), validate_pgp_key_length], -- cgit v1.2.3-54-g00ecf From f3262790b37da4883d74095ce34a84951432399b Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 29 Jun 2011 11:14:56 -0500 Subject: Convert tabs to spaces in devel packages template Signed-off-by: Dan McGee --- templates/devel/packages.html | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/templates/devel/packages.html b/templates/devel/packages.html index 051ee4a3..a9553d70 100644 --- a/templates/devel/packages.html +++ b/templates/devel/packages.html @@ -5,12 +5,12 @@ {% block content %}
    -

    {{ title }}{% if maintainer %}, - maintained by {{ maintainer.get_full_name }}{% endif%}

    -

    {{ packages|length }} package{{ packages|pluralize }} found. - {% if maintainer %}This report only includes packages maintained by - {{ maintainer.get_full_name }} ({{ maintainer.username }}).{% endif %} -

    +

    {{ title }}{% if maintainer %}, + maintained by {{ maintainer.get_full_name }}{% endif%}

    +

    {{ packages|length }} package{{ packages|pluralize }} found. + {% if maintainer %}This report only includes packages maintained by + {{ maintainer.get_full_name }} ({{ maintainer.username }}).{% endif %} +

    -- cgit v1.2.3-54-g00ecf From c5308b75837dfcbb851bcf9e93553c72e4b8996e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 29 Jun 2011 12:05:07 -0500 Subject: Recent updates refactor Pull out a few helpful objects and functions for use later elsewhere. Signed-off-by: Dan McGee --- main/utils.py | 35 +++++++++++++++++++++++++++++++++++ public/utils.py | 38 +++++++++++++------------------------- 2 files changed, 48 insertions(+), 25 deletions(-) diff --git a/main/utils.py b/main/utils.py index 2ca69bcb..81f689e7 100644 --- a/main/utils.py +++ b/main/utils.py @@ -91,4 +91,39 @@ def set_created_field(sender, **kwargs): if hasattr(obj, 'created') and not obj.created: obj.created = datetime.utcnow() +def groupby_preserve_order(iterable, keyfunc): + '''Take an iterable and regroup using keyfunc to determine whether items + belong to the same group. The order of the iterable is preserved and + similar keys do not have to be consecutive. This means the earliest + occurrence of a given key will determine the order of the lists in the + returned list.''' + seen_keys = {} + result = [] + for item in iterable: + key = keyfunc(item) + + group = seen_keys.get(key, None) + if group is None: + group = [] + seen_keys[key] = group + result.append(group) + + group.append(item) + + return result + +class PackageStandin(object): + '''Resembles a Package object, and has a few of the same fields, but is + really a link to a pkgbase that has no package with matching pkgname.''' + def __init__(self, package): + self.package = package + self.pkgname = package.pkgbase + + def __getattr__(self, name): + return getattr(self.package, name) + + def get_absolute_url(self): + return '/packages/%s/%s/%s/' % ( + self.repo.name.lower(), self.arch.name, self.pkgbase) + # vim: set ts=4 sw=4 et: diff --git a/public/utils.py b/public/utils.py index 0be3ebaa..5900c674 100644 --- a/public/utils.py +++ b/public/utils.py @@ -1,7 +1,7 @@ from operator import attrgetter from main.models import Arch, Package, Repo -from main.utils import cache_function +from main.utils import cache_function, groupby_preserve_order, PackageStandin class RecentUpdate(object): def __init__(self, packages): @@ -35,18 +35,12 @@ def package_links(self): for package in self.packages: yield package else: - # time to fake out the template, this is a tad dirty - arches = set(pkg.arch for pkg in self.others) - for arch in arches: - url = '/packages/%s/%s/%s/' % ( - self.repo.name.lower(), arch.name, self.pkgbase) - package_stub = { - 'pkgname': self.pkgbase, - 'arch': arch, - 'repo': self.repo, - 'get_absolute_url': url - } - yield package_stub + # fake out the template- this is slightly hacky but yields one + # 'package-like' object per arch which is what the normal loop does + arches = set() + for package in self.others: + if package.arch not in arches and not arches.add(package.arch): + yield PackageStandin(package) @cache_function(300) def get_recent_updates(number=15): @@ -59,20 +53,14 @@ def get_recent_updates(number=15): for arch in Arch.objects.all(): pkgs += list(Package.objects.normal().filter( arch=arch).order_by('-last_update')[:fetch]) - pkgs.sort(key=attrgetter('last_update')) + pkgs.sort(key=attrgetter('last_update'), reverse=True) - updates = [] - while len(pkgs) > 0: - pkg = pkgs.pop() - - in_group = lambda x: pkg.repo == x.repo and pkg.pkgbase == x.pkgbase - samepkgs = [other for other in pkgs if in_group(other)] - samepkgs.append(pkg) + same_pkgbase_key = lambda x: (x.repo.name, x.pkgbase) + grouped = groupby_preserve_order(pkgs, same_pkgbase_key) - # now remove all the packages we just pulled out - pkgs = [other for other in pkgs if other not in samepkgs] - - update = RecentUpdate(samepkgs) + updates = [] + for group in grouped: + update = RecentUpdate(group) updates.append(update) return updates[:number] -- cgit v1.2.3-54-g00ecf From 52363933c0b04848a26c8ed65e7f975ccfc5b846 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 11:00:58 -0500 Subject: Initial signoff template changes Signed-off-by: Dan McGee --- templates/packages/signoffs.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 93be4979..ab00b645 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -3,11 +3,12 @@ {% block navbarclass %}anb-packages{% endblock %} {% block content %} -{% if packages %}

    Package Signoffs

    +

    {{ packages|length }} package{{ packages|pluralize }} found.

    +
    @@ -43,7 +44,7 @@

    Package Signoffs

    - {% endfor %} + {% endfor %}
    @@ -57,5 +58,4 @@

    Package Signoffs

    headers: { 6: { sorter: false } } }); }); -{% endif %} {% endblock %} -- cgit v1.2.3-54-g00ecf From 0f9a1da2cb9dd2a20a5e12bb346ec460b4335f9f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 11:08:22 -0500 Subject: Move package signoff URL to more logical location Signed-off-by: Dan McGee --- packages/models.py | 3 +-- packages/urls.py | 3 +-- packages/views.py | 6 ++---- templates/packages/signoffs.html | 2 +- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/packages/models.py b/packages/models.py index 0983c642..faf5f398 100644 --- a/packages/models.py +++ b/packages/models.py @@ -25,8 +25,7 @@ class PackageRelation(models.Model): def get_associated_packages(self): # TODO: delayed import to avoid circular reference from main.models import Package - return Package.objects.filter(pkgbase=self.pkgbase).select_related( - 'arch', 'repo') + return Package.objects.normal().filter(pkgbase=self.pkgbase) def repositories(self): packages = self.get_associated_packages() diff --git a/packages/urls.py b/packages/urls.py index d408e6cf..d7d01170 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -9,14 +9,13 @@ (r'^flag/done/$', 'flag_confirmed', {}, 'package-flag-confirmed'), (r'^unflag/$', 'unflag'), (r'^unflag/all/$', 'unflag_all'), + (r'^signoff/$', 'signoff_package'), (r'^download/$', 'download'), ) urlpatterns = patterns('packages.views', (r'^flaghelp/$', 'flaghelp'), (r'^signoffs/$', 'signoffs', {}, 'package-signoffs'), - (r'^signoff_package/(?P[A-z0-9]+)/(?P[A-z0-9\-+.]+)/$', - 'signoff_package'), (r'^update/$', 'update'), (r'^$', 'search', {}, 'packages-search'), diff --git a/packages/views.py b/packages/views.py index 01d01e20..d12583f0 100644 --- a/packages/views.py +++ b/packages/views.py @@ -372,11 +372,9 @@ def signoffs(request): @permission_required('main.change_package') @never_cache -def signoff_package(request, arch, pkgname): +def signoff_package(request, name, repo, arch): pkg = get_object_or_404(Package, - arch__name=arch, - pkgname=pkgname, - repo__testing=True) + pkgname=name, repo__name__iexact=repo, arch__name=arch) signoff, created = Signoff.objects.get_or_create( pkg=pkg, diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index ab00b645..b1153d7c 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -34,7 +34,7 @@

    Package Signoffs

    {{ pkg.approved_for_signoff|yesno:"Yes,No" }}
      -
    • Signoff
    • {% for signoff in pkg.signoffs %} -- cgit v1.2.3-54-g00ecf From ba975112cbc36f7515293543baaacfbcb46a96c2 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 11:20:42 -0500 Subject: Add new packages signoff model This one is centered around pkgbase, much as our PackageRelation object is. However, it also tracks all of the versioning fields we have in order to making joining against the current package testing list possible. Finally, additional metadata including a created date, an (optional) revoke date, and a comments field are added. Signed-off-by: Dan McGee --- packages/migrations/0008_add_signoff_model.py | 166 ++++++++++++++++++++++++++ packages/models.py | 39 +++++- 2 files changed, 204 insertions(+), 1 deletion(-) create mode 100644 packages/migrations/0008_add_signoff_model.py diff --git a/packages/migrations/0008_add_signoff_model.py b/packages/migrations/0008_add_signoff_model.py new file mode 100644 index 00000000..5feed909 --- /dev/null +++ b/packages/migrations/0008_add_signoff_model.py @@ -0,0 +1,166 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Signoff' + db.create_table('packages_signoff', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('pkgbase', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)), + ('pkgver', self.gf('django.db.models.fields.CharField')(max_length=255)), + ('pkgrel', self.gf('django.db.models.fields.CharField')(max_length=255)), + ('epoch', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), + ('arch', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Arch'])), + ('repo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Repo'])), + ('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='package_signoffs', to=orm['auth.User'])), + ('created', self.gf('django.db.models.fields.DateTimeField')()), + ('revoked', self.gf('django.db.models.fields.DateTimeField')(null=True)), + ('comments', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), + )) + db.send_create_signal('packages', ['Signoff']) + + + def backwards(self, orm): + # Deleting model 'Signoff' + db.delete_table('packages_signoff') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'packages.conflict': { + 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.license': { + 'Meta': {'ordering': "['name']", 'object_name': 'License'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"}) + }, + 'packages.packagegroup': { + 'Meta': {'object_name': 'PackageGroup'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"}) + }, + 'packages.packagerelation': { + 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"}) + }, + 'packages.provision': { + 'Meta': {'ordering': "['name']", 'object_name': 'Provision'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.replacement': { + 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"}) + } + } + + complete_apps = ['packages'] diff --git a/packages/models.py b/packages/models.py index faf5f398..55725e8e 100644 --- a/packages/models.py +++ b/packages/models.py @@ -32,12 +32,47 @@ def repositories(self): return sorted(set([p.repo for p in packages])) def __unicode__(self): - return "%s: %s (%s)" % ( + return u'%s: %s (%s)' % ( self.pkgbase, self.user, self.get_type_display()) class Meta: unique_together = (('pkgbase', 'user', 'type'),) +class Signoff(models.Model): + ''' + A signoff for a package (by pkgbase) at a given point in time. These are + not keyed directly to a Package object so they don't ever get deleted when + Packages come and go from testing repositories. + ''' + pkgbase = models.CharField(max_length=255, db_index=True) + pkgver = models.CharField(max_length=255) + pkgrel = models.CharField(max_length=255) + epoch = models.PositiveIntegerField(default=0) + arch = models.ForeignKey('main.Arch') + repo = models.ForeignKey('main.Repo') + user = models.ForeignKey(User, related_name="package_signoffs") + created = models.DateTimeField(editable=False) + revoked = models.DateTimeField(null=True) + comments = models.TextField(null=True, blank=True) + + @property + def packages(self): + # TODO: delayed import to avoid circular reference + from main.models import Package + return Package.objects.normal().filter(pkgbase=self.pkgbase, + pkgver=self.pkgver, pkgrel=self.pkgrel, epoch=pkg.epoch, + arch=self.arch, repo=self.repo) + + @property + def full_version(self): + if self.epoch > 0: + return u'%d:%s-%s' % (self.epoch, self.pkgver, self.pkgrel) + return u'%s-%s' % (self.pkgver, self.pkgrel) + + def __unicode__(self): + return u'%s-%s: %s' % ( + self.pkgbase, self.full_version, self.user) + class PackageGroup(models.Model): ''' Represents a group a package is in. There is no actual group entity, @@ -115,5 +150,7 @@ def remove_inactive_maintainers(sender, instance, created, **kwargs): dispatch_uid="packages.models") pre_save.connect(set_created_field, sender=PackageRelation, dispatch_uid="packages.models") +pre_save.connect(set_created_field, sender=Signoff, + dispatch_uid="packages.models") # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 4876a1258042a85d9c0a1d57a1a185f48850c929 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 11:37:35 -0500 Subject: Add get_current_signoffs utility method This is another SQL-based utility method that dramatically cuts back on how many queries we run and gets around the shortcoming of arbitrary joins in Django. It will be used by the new signoff page logic. Signed-off-by: Dan McGee --- packages/utils.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 37f23244..c8c1f8a6 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -1,11 +1,12 @@ +from collections import defaultdict +from operator import itemgetter + from django.db import connection from django.db.models import Count, Max -from operator import itemgetter - from main.models import Package from main.utils import cache_function -from .models import PackageGroup, PackageRelation +from .models import PackageGroup, PackageRelation, Signoff @cache_function(300) def get_group_info(include_arches=None): @@ -147,4 +148,28 @@ def get_wrong_permissions(): id__in=to_fetch) return relations +def get_current_signoffs(): + '''Returns a mapping of pkgbase -> signoff objects.''' + sql = """ +SELECT DISTINCT s.id + FROM packages_signoff s + JOIN packages p ON ( + s.pkgbase = p.pkgbase + AND s.pkgver = p.pkgver + AND s.pkgrel = p.pkgrel + AND s.epoch = p.epoch + AND s.arch_id = p.arch_id + AND s.repo_id = p.repo_id + ) + JOIN repos r ON p.repo_id = r.id + WHERE r.testing = %s +""" + cursor = connection.cursor() + cursor.execute(sql, [True]) + results = cursor.fetchall() + # fetch all of the returned signoffs by ID + to_fetch = [row[0] for row in results] + signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch) + return signoffs.values() + # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From f95abca269aec1409ec1e57de4c6cb5ba1da6369 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 12:05:19 -0500 Subject: Refactor code to use new signoff model This moves signoff creation and display to the new packages.Signoff model. Signed-off-by: Dan McGee --- main/models.py | 14 ++--- packages/models.py | 2 + packages/views.py | 120 ++++++++++++++++++++++++++++++--------- templates/packages/signoffs.html | 25 ++++---- 4 files changed, 115 insertions(+), 46 deletions(-) diff --git a/main/models.py b/main/models.py index 2f549081..473144da 100644 --- a/main/models.py +++ b/main/models.py @@ -6,6 +6,7 @@ from main.utils import cache_function, make_choice from packages.models import PackageRelation +from packages.models import Signoff as PackageSignoff from datetime import datetime from itertools import groupby @@ -206,16 +207,13 @@ def maintainers(self): @property def signoffs(self): - if 'signoffs_cache' in dir(self): - return self.signoffs_cache - self.signoffs_cache = list(Signoff.objects.filter( - pkg=self, - pkgver=self.pkgver, - pkgrel=self.pkgrel)) - return self.signoffs_cache + return PackageSignoff.objects.select_related('user').filter( + pkgbase=self.pkgbase, pkgver=self.pkgver, pkgrel=self.pkgrel, + epoch=self.epoch, arch=self.arch, repo=self.repo) def approved_for_signoff(self): - return len(self.signoffs) >= 2 + count = self.signoffs.filter(revoked__isnull=True).count() + return count >= PackageSignoff.REQUIRED @cache_function(300) def applicable_arches(self): diff --git a/packages/models.py b/packages/models.py index 55725e8e..7ae2d57a 100644 --- a/packages/models.py +++ b/packages/models.py @@ -55,6 +55,8 @@ class Signoff(models.Model): revoked = models.DateTimeField(null=True) comments = models.TextField(null=True, blank=True) + REQUIRED = 2 + @property def packages(self): # TODO: delayed import to avoid circular reference diff --git a/packages/views.py b/packages/views.py index d12583f0..3d9032e6 100644 --- a/packages/views.py +++ b/packages/views.py @@ -8,7 +8,7 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db.models import Q from django.http import HttpResponse, Http404 -from django.shortcuts import get_object_or_404, redirect +from django.shortcuts import get_object_or_404, get_list_or_404, redirect from django.template import loader, Context from django.utils import simplejson from django.views.decorators.cache import never_cache @@ -18,15 +18,16 @@ from django.views.generic.simple import direct_to_template from datetime import datetime +from operator import attrgetter import string from urllib import urlencode -from main.models import Package, PackageFile -from main.models import Arch, Repo, Signoff -from main.utils import make_choice +from main.models import Package, PackageFile, Arch, Repo +from main.utils import make_choice, groupby_preserve_order, PackageStandin from mirrors.models import MirrorUrl -from .models import PackageRelation, PackageGroup -from .utils import get_group_info, get_differences_info, get_wrong_permissions +from .models import PackageRelation, PackageGroup, Signoff +from .utils import (get_group_info, get_differences_info, + get_wrong_permissions, get_current_signoffs) class PackageJSONEncoder(DjangoJSONEncoder): pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', @@ -349,38 +350,101 @@ def unflag_all(request, name, repo, arch): pkgs.update(flag_date=None) return redirect(pkg) +class PackageSignoffGroup(object): + '''Encompasses all packages in testing with the same pkgbase.''' + def __init__(self, packages, target_repo=None, signoffs=None): + if len(packages) == 0: + raise Exception + self.packages = packages + self.target_repo = target_repo + self.signoffs = signoffs + + first = packages[0] + self.pkgbase = first.pkgbase + self.arch = first.arch + self.repo = first.repo + self.version = '' + + version = first.full_version + if all(version == pkg.full_version for pkg in packages): + self.version = version + + @property + def package(self): + '''Try and return a relevant single package object representing this + group. Start by seeing if there is only one package, then look for the + matching package by name, finally falling back to a standin package + object.''' + if len(self.packages) == 1: + return self.packages[0] + + same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase] + if same_pkgs: + return same_pkgs[0] + + return PackageStandin(self.packages[0]) + + def find_signoffs(self, all_signoffs): + '''Look through a list of Signoff objects for ones matching this + particular group and store them on the object.''' + if self.signoffs is None: + self.signoffs = [] + for s in all_signoffs: + if s.pkgbase != self.pkgbase: + continue + if self.version and not s.full_version == self.version: + continue + if s.arch_id == self.arch.id and s.repo_id == self.repo.id: + self.signoffs.append(s) + + def approved(self): + if self.signoffs: + good_signoffs = [s for s in self.signoffs if not s.revoked] + return len(good_signoffs) >= Signoff.REQUIRED + return False + @permission_required('main.change_package') @never_cache def signoffs(request): - packages = Package.objects.select_related('arch', 'repo', 'signoffs').filter(repo__testing=True).order_by("pkgname") - package_list = [] - - q_pkgname = Package.objects.filter(repo__testing=True).values('pkgname').distinct().query - package_repos = Package.objects.values('pkgname', 'repo__name').exclude(repo__testing=True).filter(pkgname__in=q_pkgname) - pkgtorepo = dict() - for pr in package_repos: - pkgtorepo[pr['pkgname']] = pr['repo__name'] - - for package in packages: - if package.pkgname in pkgtorepo: - repo = pkgtorepo[package.pkgname] - else: - repo = "Unknown" - package_list.append((package, repo)) + test_pkgs = Package.objects.normal().filter(repo__testing=True) + packages = test_pkgs.order_by('pkgname') + + # Collect all pkgbase values in testing repos + q_pkgbase = test_pkgs.values('pkgbase') + package_repos = Package.objects.order_by().values_list( + 'pkgbase', 'repo__name').filter( + repo__testing=False, repo__staging=False, + pkgbase__in=q_pkgbase).distinct() + pkgtorepo = dict(package_repos) + + # Collect all existing signoffs for these packages + signoffs = get_current_signoffs() + + same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase) + grouped = groupby_preserve_order(packages, same_pkgbase_key) + signoff_groups = [] + for group in grouped: + signoff_group = PackageSignoffGroup(group) + signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, + "Unknown") + signoff_group.find_signoffs(signoffs) + signoff_groups.append(signoff_group) + + signoff_groups.sort(key=attrgetter('pkgbase')) + return direct_to_template(request, 'packages/signoffs.html', - {'packages': package_list}) + {'signoff_groups': signoff_groups}) @permission_required('main.change_package') @never_cache def signoff_package(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + pkg = packages[0] signoff, created = Signoff.objects.get_or_create( - pkg=pkg, - pkgver=pkg.pkgver, - pkgrel=pkg.pkgrel, - packager=request.user) + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, user=request.user) if request.is_ajax(): data = { diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index b1153d7c..6014396c 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -7,13 +7,15 @@

      Package Signoffs

      -

      {{ packages|length }} package{{ packages|pluralize }} found.

      +

      {{ signoff_groups|length }} signoff group{{ signoff_groups|pluralize }} found. + A "signoff group" consists of packages grouped by pkgbase, architecture, and repository.

      - + + @@ -22,28 +24,31 @@

      Package Signoffs

      - {% for pkg,target in packages %} + {% for group in signoff_groups %} + {% with group.package as pkg %} + - - + + + {% endwith %} {% endfor %}
      ArchPackagePackage Base# of Packages Version Last Updated Target Repo
      {{ pkg.arch.name }} {{ pkg.pkgname }}{{ group.packages|length }} {{ pkg.full_version }} {{ pkg.last_update|date }}{{ target }} - {{ pkg.approved_for_signoff|yesno:"Yes,No" }}{{ group.target_repo }} + {{ group.approved|yesno:"Yes,No" }}
      • Signoff + title="Signoff {{ pkg.pkgname }} for {{ pkg.arch }}">Signoff
      • - {% for signoff in pkg.signoffs %} -
      • - {{signoff.packager}}
      • + {% for signoff in group.signoffs %} +
      • + {{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}
      • {% endfor %}
      -- cgit v1.2.3-54-g00ecf From d8e089fd30ac2dacac54c444cc522ee44ee332ed Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 12:08:42 -0500 Subject: Delete old signoff model Signed-off-by: Dan McGee --- main/migrations/0052_auto__del_signoff.py | 166 ++++++++++++++++++++++++++++++ main/models.py | 6 -- 2 files changed, 166 insertions(+), 6 deletions(-) create mode 100644 main/migrations/0052_auto__del_signoff.py diff --git a/main/migrations/0052_auto__del_signoff.py b/main/migrations/0052_auto__del_signoff.py new file mode 100644 index 00000000..8da6becc --- /dev/null +++ b/main/migrations/0052_auto__del_signoff.py @@ -0,0 +1,166 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + depends_on = ( + ('packages', '0008_add_signoff_model'), + ) + + def forwards(self, orm): + # Deleting model 'Signoff' + db.delete_table('main_signoff') + + + def backwards(self, orm): + # Adding model 'Signoff' + db.create_table('main_signoff', ( + ('pkgrel', self.gf('django.db.models.fields.CharField')(max_length=255)), + ('packager', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), + ('pkg', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Package'])), + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('pkgver', self.gf('django.db.models.fields.CharField')(max_length=255)), + )) + db.send_create_signal('main', ['Signoff']) + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 473144da..6e134810 100644 --- a/main/models.py +++ b/main/models.py @@ -367,12 +367,6 @@ def elsewhere(self): pkgname=self.pkgname).exclude(id=self.id).order_by( 'arch__name', 'repo__name') -class Signoff(models.Model): - pkg = models.ForeignKey(Package) - pkgver = models.CharField(max_length=255) - pkgrel = models.CharField(max_length=255) - packager = models.ForeignKey(User) - class PackageFile(models.Model): pkg = models.ForeignKey(Package) is_directory = models.BooleanField(default=False) -- cgit v1.2.3-54-g00ecf From 86c48cc4314b593ee6c9136490a72433a5dca1e2 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 6 Jul 2011 13:17:29 -0500 Subject: Add a few more valid package sort fields Just for fun and for people that know what they are doing. Signed-off-by: Dan McGee --- packages/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/views.py b/packages/views.py index 3d9032e6..1f2a38d3 100644 --- a/packages/views.py +++ b/packages/views.py @@ -279,7 +279,9 @@ def search(request, page=None): 'search_form': form, 'current_query': current_query } - allowed_sort = ["arch", "repo", "pkgname", "last_update", "flag_date"] + allowed_sort = ["arch", "repo", "pkgname", "pkgname", + "compressed_size", "installed_size", + "build_date", "last_update", "flag_date"] allowed_sort += ["-" + s for s in allowed_sort] sort = request.GET.get('sort', None) # TODO: sorting by multiple fields makes using a DB index much harder -- cgit v1.2.3-54-g00ecf From 59badd066238c946cbe64d7002a260b9de606938 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 15 Jul 2011 10:10:11 -0500 Subject: Add pacakge count to group/split package details page Signed-off-by: Dan McGee --- templates/packages/packages_list.html | 1 + 1 file changed, 1 insertion(+) diff --git a/templates/packages/packages_list.html b/templates/packages/packages_list.html index c897aac5..ccc091d8 100644 --- a/templates/packages/packages_list.html +++ b/templates/packages/packages_list.html @@ -5,6 +5,7 @@ {% block content %}

      {{ list_title }} - {{ name }} ({{ arch.name }})

      +

      {{ packages|length }} package{{ packages|pluralize }} found.

      -- cgit v1.2.3-54-g00ecf From ae12aa58fd6c6965b3eadcf3843ea68602069c60 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 25 Jul 2011 12:27:46 -0500 Subject: Small template updates * Show full version in developer dashboard out of date pane * Link packages on flag confirmation screen Signed-off-by: Dan McGee --- templates/devel/index.html | 2 +- templates/packages/flag_confirmed.html | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/templates/devel/index.html b/templates/devel/index.html index 1689319b..ad101145 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -27,7 +27,7 @@

      My Flagged Packages

      - + diff --git a/templates/packages/flag_confirmed.html b/templates/packages/flag_confirmed.html index 02c24f72..9ef316cb 100644 --- a/templates/packages/flag_confirmed.html +++ b/templates/packages/flag_confirmed.html @@ -4,12 +4,13 @@ {% block content %}
      -

      Package Flagged - {{ package.pkgname }}

      +

      Package Flagged - {{ package.pkgname }}

      Thank you, the maintainers have been notified the following packages are out-of-date:

        {% for pkg in packages %} -
      • {{ pkg.pkgname }} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
      • +
      • {{ pkg.pkgname }} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
      • {% endfor %}
      -- cgit v1.2.3-54-g00ecf From ad65f4c98e6ea3e8078b4a482fa88fb4595ccedd Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 29 Jul 2011 12:23:03 -0500 Subject: Fix package sort fields Don't list pkgname twice, include pkgbase instead. Signed-off-by: Dan McGee --- packages/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/views.py b/packages/views.py index 1f2a38d3..8c9c1b18 100644 --- a/packages/views.py +++ b/packages/views.py @@ -279,7 +279,7 @@ def search(request, page=None): 'search_form': form, 'current_query': current_query } - allowed_sort = ["arch", "repo", "pkgname", "pkgname", + allowed_sort = ["arch", "repo", "pkgname", "pkgbase", "compressed_size", "installed_size", "build_date", "last_update", "flag_date"] allowed_sort += ["-" + s for s in allowed_sort] -- cgit v1.2.3-54-g00ecf From 82cb7c3586154da7196b513e349ea9c91efbc9ac Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 29 Jul 2011 12:26:27 -0500 Subject: Add index to package groups name field Signed-off-by: Dan McGee --- .../migrations/0009_add_packagegroup_name_index.py | 149 +++++++++++++++++++++ packages/models.py | 2 +- 2 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 packages/migrations/0009_add_packagegroup_name_index.py diff --git a/packages/migrations/0009_add_packagegroup_name_index.py b/packages/migrations/0009_add_packagegroup_name_index.py new file mode 100644 index 00000000..f81e77fc --- /dev/null +++ b/packages/migrations/0009_add_packagegroup_name_index.py @@ -0,0 +1,149 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.create_index('packages_packagegroup', ['name']) + + def backwards(self, orm): + db.delete_index('packages_packagegroup', ['name']) + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'packages.conflict': { + 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.license': { + 'Meta': {'ordering': "['name']", 'object_name': 'License'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"}) + }, + 'packages.packagegroup': { + 'Meta': {'object_name': 'PackageGroup'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"}) + }, + 'packages.packagerelation': { + 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"}) + }, + 'packages.provision': { + 'Meta': {'ordering': "['name']", 'object_name': 'Provision'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.replacement': { + 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"}) + } + } + + complete_apps = ['packages'] diff --git a/packages/models.py b/packages/models.py index 7ae2d57a..d2fe1878 100644 --- a/packages/models.py +++ b/packages/models.py @@ -81,7 +81,7 @@ class PackageGroup(models.Model): only names that link to given packages. ''' pkg = models.ForeignKey('main.Package', related_name='groups') - name = models.CharField(max_length=255) + name = models.CharField(max_length=255, db_index=True) def __unicode__(self): return "%s: %s" % (self.name, self.pkg) -- cgit v1.2.3-54-g00ecf From 2a73675f0ed894d57ed13c5799e988f8d10ccfe2 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 29 Jul 2011 23:08:33 -0500 Subject: Select arch/repo for split package related fields Signed-off-by: Dan McGee --- main/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/main/models.py b/main/models.py index 6e134810..70372823 100644 --- a/main/models.py +++ b/main/models.py @@ -307,12 +307,12 @@ def base_package(self): """ try: # start by looking for something in this repo - return Package.objects.get(arch=self.arch, + return Package.objects.normal().get(arch=self.arch, repo=self.repo, pkgname=self.pkgbase) except Package.DoesNotExist: # this package might be split across repos? just find one # that matches the correct [testing] repo flag - pkglist = Package.objects.filter(arch=self.arch, + pkglist = Package.objects.normal().filter(arch=self.arch, repo__testing=self.repo.testing, repo__staging=self.repo.staging, pkgname=self.pkgbase) if len(pkglist) > 0: @@ -327,7 +327,7 @@ def split_packages(self): repo.testing and repo.staging flags. For any non-split packages, the return value will be an empty list. """ - return Package.objects.filter(arch__in=self.applicable_arches(), + return Package.objects.normal().filter(arch__in=self.applicable_arches(), repo__testing=self.repo.testing, repo__staging=self.repo.staging, pkgbase=self.pkgbase).exclude(id=self.id) -- cgit v1.2.3-54-g00ecf From 0f6c80e9a36bc5770e95543b4374c5ace4383cf5 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 29 Jul 2011 23:34:24 -0500 Subject: mirrors: do protocol formatting in template Signed-off-by: Dan McGee --- mirrors/models.py | 2 +- templates/mirrors/mirrors.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mirrors/models.py b/mirrors/models.py index 4f70e5a9..a8217844 100644 --- a/mirrors/models.py +++ b/mirrors/models.py @@ -42,7 +42,7 @@ def __unicode__(self): def supported_protocols(self): protocols = MirrorProtocol.objects.filter( urls__mirror=self).order_by('protocol').distinct() - return ", ".join([p.protocol for p in protocols]) + return sorted(protocols) def downstream(self): return Mirror.objects.filter(upstream=self).order_by('name') diff --git a/templates/mirrors/mirrors.html b/templates/mirrors/mirrors.html index 67a678d9..ee76acbe 100644 --- a/templates/mirrors/mirrors.html +++ b/templates/mirrors/mirrors.html @@ -28,7 +28,7 @@

      Mirror Overview

      - + {% if user.is_authenticated %} -- cgit v1.2.3-54-g00ecf From 21461e78608bb687d7101dd55e72d44cbebf2ff6 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 22:53:01 -0500 Subject: Add package details link tag Signed-off-by: Dan McGee --- packages/templatetags/package_extras.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index e089b723..14a519d4 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -36,6 +36,11 @@ def do_buildsortqs(parser, token): "%r tag's argument should be in quotes" % tagname) return BuildQueryStringNode(sortfield[1:-1]) +@register.simple_tag +def pkg_details_link(pkg): + template = '%s' + return template % (pkg.get_absolute_url(), pkg.pkgname, pkg.pkgname) + @register.simple_tag def userpkgs(user): if user: @@ -48,7 +53,6 @@ def userpkgs(user): ) return '' - def svn_link(package, svnpath): '''Helper function for the two real SVN link methods.''' parts = (package.repo.svn_root, package.pkgbase, svnpath) -- cgit v1.2.3-54-g00ecf From 156b91eb5935df4afdb8f0f0311d36537808c2f5 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 23:16:00 -0500 Subject: Use new package details link tag in templates This replaces a lot of boilerplate we had everywhere, and makes sure things like the title are consistent across all links. Signed-off-by: Dan McGee --- templates/devel/index.html | 7 +++---- templates/devel/packages.html | 4 ++-- templates/packages/details.html | 17 +++++------------ templates/packages/flag.html | 4 +++- templates/packages/flag_confirmed.html | 8 ++++---- templates/packages/flagged.html | 7 +++---- templates/packages/packages_list.html | 5 +++-- templates/packages/search.html | 3 +-- templates/packages/signoffs.html | 5 +++-- templates/todolists/public_list.html | 4 ++-- templates/todolists/view.html | 5 +++-- 11 files changed, 32 insertions(+), 37 deletions(-) diff --git a/templates/devel/index.html b/templates/devel/index.html index ad101145..0c818d36 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -1,5 +1,6 @@ {% extends "base.html" %} {% load cache %} +{% load package_extras %} {% block title %}Arch Linux - Developer Dashboard{% endblock %} @@ -24,8 +25,7 @@

      My Flagged Packages

      {% for pkg in flagged %} - + @@ -55,8 +55,7 @@

      My Incomplete Todo List Packages

      - + diff --git a/templates/devel/packages.html b/templates/devel/packages.html index a9553d70..9f01167c 100644 --- a/templates/devel/packages.html +++ b/templates/devel/packages.html @@ -1,5 +1,6 @@ {% extends "base.html" %} {% load attributes %} +{% load package_extras %} {% block title %}Arch Linux - {{ title }}{% endblock %} @@ -32,8 +33,7 @@

      {{ title }}{% if maintainer %},

      - + {% if pkg.flag_date %} {% else %} diff --git a/templates/packages/details.html b/templates/packages/details.html index 7972b9ab..8e3c0022 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -1,5 +1,6 @@ {% extends "base.html" %} {% load cache %} +{% load package_extras %} {% block title %}Arch Linux - {{ pkg.pkgname }} {{ pkg.full_version }} - Package Details{% endblock %} {% block navbarclass %}anb-packages{% endblock %} @@ -82,20 +83,14 @@

      Versions Elsewhere

      {% with pkg.split_packages as splits %}{% if splits %} - + {% endif %}{% endwith %} {% else %} {% if pkg.base_package %} - + {% else %} @@ -165,8 +160,7 @@

      {% ifequal depend.pkg None %}
    • {{ depend.dep.depname }} (virtual)
    • {% else %} -
    • {{ depend.dep.depname }}{{ depend.dep.depvcmp|default:"" }} +
    • {% pkg_details_link depend.pkg %}{{ depend.dep.depvcmp|default:"" }} {% if depend.pkg.repo.testing %}(testing){% endif %} {% if depend.dep.optional %}(optional){% endif %} {% if depend.dep.description %}- {{ depend.dep.description }}{% endif %} @@ -188,8 +182,7 @@

      {% if rqdby %}
        {% for req in rqdby %} -
      • {{ req.pkg.pkgname }} +
      • {% pkg_details_link req.pkg %} {% if req.pkg.repo.testing %}(testing){% endif %} {% if req.optional %}(optional){% endif %}
      • diff --git a/templates/packages/flag.html b/templates/packages/flag.html index 4a3c6966..261d6066 100644 --- a/templates/packages/flag.html +++ b/templates/packages/flag.html @@ -1,4 +1,6 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - Flag Package - {{ package.pkgname }}{% endblock %} {% block navbarclass %}anb-packages{% endblock %} @@ -13,7 +15,7 @@

        Flag Package: {{ package.pkgname }}

        Note that all of the following packages will be marked out of date:

          {% for pkg in packages %} -
        • {{ pkg.pkgname }} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
        • +
        • {% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
        • {% endfor %}
        diff --git a/templates/packages/flag_confirmed.html b/templates/packages/flag_confirmed.html index 9ef316cb..398212f8 100644 --- a/templates/packages/flag_confirmed.html +++ b/templates/packages/flag_confirmed.html @@ -1,4 +1,6 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - Package Flagged - {{ package.pkgname }}{% endblock %} {% block navbarclass %}anb-packages{% endblock %} @@ -9,12 +11,10 @@

        Package Flagged - {{ package.pkgname }}

        Thank you, the maintainers have been notified the following packages are out-of-date:

          {% for pkg in packages %} -
        • {{ pkg.pkgname }} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
        • +
        • {% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})
        • {% endfor %}
        -

        You can return to the package details page for - {{package.pkgname}}.

        +

        You can return to the package details page for {% pkg_details_link package %}.

        {% endblock %} diff --git a/templates/packages/flagged.html b/templates/packages/flagged.html index 3a39d178..a99a6924 100644 --- a/templates/packages/flagged.html +++ b/templates/packages/flagged.html @@ -1,16 +1,15 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - Flag Package - {{ pkg.pkgname }}{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block content %}
        -

        Error: Package already flagged

        {{pkg.pkgname}} has already been flagged out-of-date.

        -

        You can return to the package details page for - {{pkg.pkgname}}.

        - +

        You can return to the package details page for {% pkg_details_link pkg %}.

        {% endblock %} diff --git a/templates/packages/packages_list.html b/templates/packages/packages_list.html index ccc091d8..942e1073 100644 --- a/templates/packages/packages_list.html +++ b/templates/packages/packages_list.html @@ -1,4 +1,6 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - {{ name }} ({{ arch.name }}) - {{ list_title }}{% endblock %} {% block navbarclass %}anb-packages{% endblock %} @@ -23,8 +25,7 @@

        {{ list_title }} - {{ name }} ({{ arch.name }})

    • - + {% if pkg.flag_date %} {% else %} diff --git a/templates/packages/search.html b/templates/packages/search.html index 381ebb01..eb4aceca 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -104,8 +104,7 @@

      Package Search

      {% endif %} - + {% if pkg.flag_date %} {% else %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 6014396c..baf85338 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -1,4 +1,6 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - Package Signoffs{% endblock %} {% block navbarclass %}anb-packages{% endblock %} @@ -28,8 +30,7 @@

      Package Signoffs

      {% with group.package as pkg %} - + diff --git a/templates/todolists/public_list.html b/templates/todolists/public_list.html index 0d14250d..c3054863 100644 --- a/templates/todolists/public_list.html +++ b/templates/todolists/public_list.html @@ -1,4 +1,5 @@ {% extends "base.html" %} +{% load package_extras %} {% block title %}Arch Linux - Todo Lists{% endblock %} @@ -43,8 +44,7 @@

      {{ list.name }}

      {% for pkg in list.packages %} - + diff --git a/templates/todolists/view.html b/templates/todolists/view.html index 9a6e3c0f..8f515c9b 100644 --- a/templates/todolists/view.html +++ b/templates/todolists/view.html @@ -1,4 +1,6 @@ {% extends "base.html" %} +{% load package_extras %} + {% block title %}Arch Linux - Todo: {{ list.name }}{% endblock %} {% block content %} @@ -34,8 +36,7 @@

      Todo List: {{ list.name }}

      {% for pkg in list.packages %} - + -- cgit v1.2.3-54-g00ecf From 02c0dbc482fc6037c19e713f4645d627eb004e9c Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 23:33:27 -0500 Subject: Add some methods to PackageDepend object This will allow for some future "find the best provider link" stuff as well as refactoring of the get_depends() method on Package. Signed-off-by: Dan McGee --- main/models.py | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/main/models.py b/main/models.py index 70372823..4897cb65 100644 --- a/main/models.py +++ b/main/models.py @@ -386,6 +386,64 @@ class PackageDepend(models.Model): optional = models.BooleanField(default=False) description = models.TextField(null=True, blank=True) + def get_best_satisfier(self, arches=None, testing=None, staging=None): + '''Find a satisfier for this dependency that best matches the given + criteria. It will not search provisions, but will find packages named + and matching repo characteristics if possible.''' + pkgs = Package.objects.normal().filter(pkgname=self.depname) + if arches is not None: + # make sure we match architectures if possible + pkgs = pkgs.filter(arch__in=arches) + if len(pkgs) == 0: + # couldn't find a package in the DB + # it should be a virtual depend (or a removed package) + return None + if len(pkgs) == 1: + return pkgs[0] + # more than one package, see if we can't shrink it down + # grab the first though in case we fail + pkg = pkgs[0] + # prevents yet more DB queries, these lists should be short; + # after each grab the best available in case we remove all entries + if staging is not None: + pkgs = [p for p in pkgs if p.repo.staging == staging] + if len(pkgs) > 0: + pkg = pkgs[0] + + if testing is not None: + pkgs = [p for p in pkgs if p.repo.testing == testing] + if len(pkgs) > 0: + pkg = pkgs[0] + + return pkg + + def get_providers(self, arches=None, testing=None, staging=None): + '''Return providers of this dep. Does *not* include exact matches as it + checks the Provision names only, use get_best_satisfier() instead.''' + pkgs = Package.objects.normal().filter( + provides__name=self.depname).distinct() + if arches is not None: + pkgs = pkgs.filter(arch__in=arches) + + # Logic here is to filter out packages that are in multiple repos if + # they are not requested. For example, if testing is False, only show a + # testing package if it doesn't exist in a non-testing repo. + if staging is not None: + filtered = {} + for p in pkgs: + if p.pkgname not in filtered or p.repo.staging == staging: + filtered[p.pkgname] = p + pkgs = filtered.values() + + if testing is not None: + filtered = {} + for p in pkgs: + if p.pkgname not in filtered or p.repo.testing == testing: + filtered[p.pkgname] = p + pkgs = filtered.values() + + return pkgs + def __unicode__(self): return "%s%s" % (self.depname, self.depvcmp) -- cgit v1.2.3-54-g00ecf From b167fcb08725da983344fc18d449db6fc87d2f8a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 23:34:29 -0500 Subject: Refactor get_depends() to use PackageDepends methods The returned objects now also have a providers list if it makes sense. Signed-off-by: Dan McGee --- main/models.py | 42 ++++++++++++++++++------------------------ 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/main/models.py b/main/models.py index 4897cb65..0125cc0e 100644 --- a/main/models.py +++ b/main/models.py @@ -269,36 +269,30 @@ def get_requiredby(self): @cache_function(300) def get_depends(self): """ - Returns a list of dicts. Each dict contains ('pkg' and 'dep'). If it - represents a found package both vars will be available; else pkg will - be None if it is a 'virtual' dependency. Packages will match the - testing status of this package if possible. + Returns a list of dicts. Each dict contains ('dep', 'pkg', and + 'providers'). If it represents a found package both vars will be + available; else pkg will be None if it is a 'virtual' dependency. + If pkg is None and providers are known, they will be available in + providers. + Packages will match the testing status of this package if possible. """ deps = [] + arches = None + if not self.arch.agnostic: + arches = self.applicable_arches() # TODO: we can use list comprehension and an 'in' query to make this more effective for dep in self.packagedepend_set.order_by('optional', 'depname'): - pkgs = Package.objects.normal().filter(pkgname=dep.depname) - if not self.arch.agnostic: - # make sure we match architectures if possible - pkgs = pkgs.filter(arch__in=self.applicable_arches()) - if len(pkgs) == 0: - # couldn't find a package in the DB - # it should be a virtual depend (or a removed package) - pkg = None - elif len(pkgs) == 1: - pkg = pkgs[0] - else: - # more than one package, see if we can't shrink it down - # grab the first though in case we fail - pkg = pkgs[0] - # prevents yet more DB queries, these lists should be short - pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing - and p.repo.staging == self.repo.staging] - if len(pkgs) > 0: - pkg = pkgs[0] - deps.append({'dep': dep, 'pkg': pkg}) + pkg = dep.get_best_satisfier(arches, testing=self.repo.testing, + staging=self.repo.staging) + providers = None + if not pkg: + providers = dep.get_providers(arches, + testing=self.repo.testing, staging=self.repo.staging) + print providers + deps.append({'dep': dep, 'pkg': pkg, 'providers': providers}) return deps + @cache_function(300) def base_package(self): """ Locate the base package for this package. It may be this very package, -- cgit v1.2.3-54-g00ecf From d14e80e2f04edb2f52811dba805a7ed2aa680fab Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 23:35:27 -0500 Subject: Add a template tag to link multiple packages at once Comma-separated list. Signed-off-by: Dan McGee --- packages/templatetags/package_extras.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 14a519d4..7bc868de 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -41,6 +41,10 @@ def pkg_details_link(pkg): template = '%s' return template % (pkg.get_absolute_url(), pkg.pkgname, pkg.pkgname) +@register.simple_tag +def multi_pkg_details(pkgs): + return ', '.join([pkg_details_link(pkg) for pkg in pkgs]) + @register.simple_tag def userpkgs(user): if user: -- cgit v1.2.3-54-g00ecf From b158cdedb5510becc39cc4ab9baf6eddb38c8389 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 9 Aug 2011 23:36:04 -0500 Subject: Link package provisions if available Rather than just the blank 'virtual' text we used to have in the dependencies listing on the package page. Signed-off-by: Dan McGee --- templates/packages/details.html | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index 8e3c0022..a3f2cef4 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -158,7 +158,10 @@

        {% for depend in deps %} {% ifequal depend.pkg None %} -
      • {{ depend.dep.depname }} (virtual)
      • + {% if depend.providers %} +
      • {{ depend.dep.depname }} ({% multi_pkg_details depend.providers %})
      • + {% else %}
      • {{ depend.dep.depname }} (virtual)
      • + {% endif %} {% else %}
      • {% pkg_details_link depend.pkg %}{{ depend.dep.depvcmp|default:"" }} {% if depend.pkg.repo.testing %}(testing){% endif %} -- cgit v1.2.3-54-g00ecf From 110c79ad0637c8e35b18c9ccc201d33d456d5453 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Aug 2011 14:23:57 -0500 Subject: Small Python convention fixup Signed-off-by: Dan McGee --- mirrors/management/commands/mirrorcheck.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mirrors/management/commands/mirrorcheck.py b/mirrors/management/commands/mirrorcheck.py index 7bd79c83..8eb8b010 100644 --- a/mirrors/management/commands/mirrorcheck.py +++ b/mirrors/management/commands/mirrorcheck.py @@ -68,7 +68,7 @@ def check_mirror_url(mirror_url): log.last_sync = parsed_time # if we couldn't parse a time, this is a failure - if parsed_time == None: + if parsed_time is None: log.error = "Could not parse time from lastsync" log.is_success = False log.duration = end - start -- cgit v1.2.3-54-g00ecf From f7626c05719ebb4362836d2ba7e1d297bd8a92a6 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Aug 2011 14:39:23 -0500 Subject: Add a removed date for releng ISOs Signed-off-by: Dan McGee --- releng/management/commands/syncisos.py | 5 +- .../migrations/0002_auto__add_field_iso_removed.py | 99 ++++++++++++++++++++++ releng/models.py | 1 + 3 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 releng/migrations/0002_auto__add_field_iso_removed.py diff --git a/releng/management/commands/syncisos.py b/releng/management/commands/syncisos.py index ba174131..23955afe 100644 --- a/releng/management/commands/syncisos.py +++ b/releng/management/commands/syncisos.py @@ -1,3 +1,4 @@ +from datetime import datetime import re import urllib from HTMLParser import HTMLParser, HTMLParseError @@ -45,7 +46,9 @@ def handle(self, *args, **options): if iso not in isonames: new = Iso(name=iso, active=True) new.save() + now = datetime.utcnow() # and then mark all other names as no longer active - Iso.objects.exclude(name__in=active_isos).update(active=False) + Iso.objects.filter(active=True).exclude(name__in=active_isos).update( + active=False, removed=now) # vim: set ts=4 sw=4 et: diff --git a/releng/migrations/0002_auto__add_field_iso_removed.py b/releng/migrations/0002_auto__add_field_iso_removed.py new file mode 100644 index 00000000..d5cd09c8 --- /dev/null +++ b/releng/migrations/0002_auto__add_field_iso_removed.py @@ -0,0 +1,99 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.add_column('releng_iso', 'removed', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True), keep_default=False) + + def backwards(self, orm): + db.delete_column('releng_iso', 'removed') + + models = { + 'releng.architecture': { + 'Meta': {'object_name': 'Architecture'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.bootloader': { + 'Meta': {'object_name': 'Bootloader'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.boottype': { + 'Meta': {'object_name': 'BootType'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.clockchoice': { + 'Meta': {'object_name': 'ClockChoice'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.filesystem': { + 'Meta': {'object_name': 'Filesystem'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.hardwaretype': { + 'Meta': {'object_name': 'HardwareType'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.installtype': { + 'Meta': {'object_name': 'InstallType'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.iso': { + 'Meta': {'object_name': 'Iso'}, + 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'removed': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}) + }, + 'releng.isotype': { + 'Meta': {'object_name': 'IsoType'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.module': { + 'Meta': {'object_name': 'Module'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.source': { + 'Meta': {'object_name': 'Source'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) + }, + 'releng.test': { + 'Meta': {'object_name': 'Test'}, + 'architecture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.Architecture']"}), + 'boot_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.BootType']"}), + 'bootloader': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.Bootloader']"}), + 'clock_choice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.ClockChoice']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'filesystem': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.Filesystem']"}), + 'hardware_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.HardwareType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'install_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.InstallType']"}), + 'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), + 'iso': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.Iso']"}), + 'iso_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.IsoType']"}), + 'modules': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['releng.Module']", 'null': 'True', 'blank': 'True'}), + 'rollback_filesystem': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'rollback_test_set'", 'null': 'True', 'to': "orm['releng.Filesystem']"}), + 'rollback_modules': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'rollback_test_set'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['releng.Module']"}), + 'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['releng.Source']"}), + 'success': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), + 'user_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}) + } + } + + complete_apps = ['releng'] diff --git a/releng/models.py b/releng/models.py index 5510db6a..ceac948b 100644 --- a/releng/models.py +++ b/releng/models.py @@ -45,6 +45,7 @@ class Meta: class Iso(models.Model): name = models.CharField(max_length=255) created = models.DateTimeField(editable=False) + removed = models.DateTimeField(null=True, blank=True, default=None) active = models.BooleanField(default=True) def __unicode__(self): -- cgit v1.2.3-54-g00ecf From 0df3567ae25bb2856bc62951844d9dab5ea97990 Mon Sep 17 00:00:00 2001 From: Sergej Pupykin Date: Sat, 13 Aug 2011 00:23:36 +0400 Subject: add "search wiki" link to package details page Dan: fix usage of urlencode() function. Signed-off-by: Dan McGee --- packages/templatetags/package_extras.py | 8 ++++++++ templates/packages/details.html | 1 + 2 files changed, 9 insertions(+) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 7bc868de..e4c7a010 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -72,6 +72,14 @@ def svn_arch(package): def svn_trunk(package): return svn_link(package, "trunk") +@register.simple_tag +def get_wiki_link(package): + data = { + 'search': package.pkgname, + } + return "https://wiki.archlinux.org/index.php/Special:Search?%s" % \ + urlencode(data) + @register.simple_tag def bugs_list(package): data = { diff --git a/templates/packages/details.html b/templates/packages/details.html index a3f2cef4..bec4bdff 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -18,6 +18,7 @@

        Package Actions

        • SVN Entries ({{pkg.repo|lower}}-{{pkg.arch}})
        • SVN Entries (trunk)
        • +
        • Search Wiki
        • Bug Reports
        • Report a Bug
        • {% if pkg.flag_date %} -- cgit v1.2.3-54-g00ecf From f5b5c59b3c2e2aed161bc71a0e26cdeb59055a11 Mon Sep 17 00:00:00 2001 From: Evangelos Foutras Date: Sat, 13 Aug 2011 17:25:51 +0300 Subject: Update README to use virtualenv2 (FS#25520) Signed-off-by: Dan McGee --- README | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/README b/README index 184d1c8a..0d3ee082 100644 --- a/README +++ b/README @@ -14,8 +14,8 @@ See AUTHORS file. # Dependencies -- python -- python-virtualenv +- python2 +- python2-virtualenv # Python dependencies @@ -31,13 +31,13 @@ packages, you will probably want the following: # Testing Installation -1. Run `virtualenv`. +1. Run `virtualenv2`. - $ cd /path/to/archweb && virtualenv ../archweb-env + $ cd /path/to/archweb && virtualenv2 ../archweb-env -2. Source the virtualenv. +2. Activate the virtualenv. - $ . ../archweb-env/bin/activate + $ source ../archweb-env/bin/activate 2. Install dependencies through `pip`. @@ -58,7 +58,7 @@ packages, you will probably want the following: provided data, adjust the file glob accordingly. (archweb-env) $ ./manage.py loaddata */fixtures/*.json - + 7. Use the following commands to start a service instance (archweb-env) $ ./manage.py runserver @@ -69,7 +69,8 @@ packages, you will probably want the following: (archweb-env) $ ./manage.py reporead i686 core.db.tar.gz (archweb-env) $ ./manage.py syncisos -Alter architecture and repo to get x86\_64 and packages from other repos if needed. +Alter architecture and repo to get x86\_64 and packages from other repos if +needed. # Production Installation -- cgit v1.2.3-54-g00ecf From 8e1cae30596e48bfcc958dcd840c09b4cb8987ba Mon Sep 17 00:00:00 2001 From: Thomas Bächler Date: Sun, 14 Aug 2011 16:32:35 +0200 Subject: templates/flag.html: Improve the note about bug reports. There are tons of morons out there who are not able to read. Thus, I keep getting bug reports via the "flag out of date" feature. This patch removes all occurrences of "please" from the note and adds a new sentence that explains our ignorance properly. The hint to file a bug report is now longer, so idiots might see it. Dan: add one more line in the early text. Signed-off-by: Dan McGee --- templates/packages/flag.html | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/templates/packages/flag.html b/templates/packages/flag.html index 261d6066..34dfe34e 100644 --- a/templates/packages/flag.html +++ b/templates/packages/flag.html @@ -10,7 +10,7 @@

          Flag Package: {{ package.pkgname }}

          If you notice a package is out-of-date (i.e., there is a newer stable release available), then please notify us using - the form below.

          + the form below. Do not report bugs via this form!

          Note that all of the following packages will be marked out of date:

            @@ -26,9 +26,10 @@

            Flag Package: {{ package.pkgname }}

            title="Visit the arch-general mailing list">arch-general mailing list with your additional text.

            -

            Note: Please do not use this facility if the - package is broken! Please file a bug instead.

            +

            Note: Do not use this facility if the + package is broken! The package will be unflagged and the report will be ignored! + Use the + bugtracker to file a bug instead.

            Please confirm your flag request for {{package.pkgname}}:

            -- cgit v1.2.3-54-g00ecf From a06ab5c1eb2053d33e15244ea579875aa72c0c1f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Aug 2011 15:35:59 -0500 Subject: Tabs -> spaces in templates Signed-off-by: Dan McGee --- templates/packages/search.html | 12 ++++++------ templates/public/about.html | 2 +- templates/public/index.html | 4 ++-- templates/public/svn.html | 18 +++++++++--------- templates/registration/logout.html | 2 +- 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/templates/packages/search.html b/templates/packages/search.html index eb4aceca..8a357024 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -153,12 +153,12 @@

            Package Search

            {% else %}
            -

            We couldn't find any packages matching your query. Try searching again - using different criteria, or try - {% if search_form.q.data %} - searching the AUR - {% else %}searching the AUR{% endif %} - to see if the package can be found there.

            +

            We couldn't find any packages matching your query. Try searching again + using different criteria, or try + {% if search_form.q.data %} + searching the AUR + {% else %}searching the AUR{% endif %} + to see if the package can be found there.

            {% endif %} diff --git a/templates/public/about.html b/templates/public/about.html index 0e0601e8..bf38e8a1 100644 --- a/templates/public/about.html +++ b/templates/public/about.html @@ -3,7 +3,7 @@ {% block content %}

            About Arch Linux

            -

            +

            Arch Linux is an independently developed, i686/x86-64 general purpose GNU/Linux distribution versatile enough to suit any role. Development focuses on simplicity, minimalism, and code elegance. Arch is installed as a diff --git a/templates/public/index.html b/templates/public/index.html index add921d1..e68943c8 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -82,7 +82,7 @@

            Recent Updates (

      - @@ -161,7 +161,7 @@

      Development

      title="Developer Wiki articles">DeveloperWiki
    • Todo Lists
    • -
    • Releng Testbuild Feedback
    • diff --git a/templates/public/svn.html b/templates/public/svn.html index dd175bcc..aedd9a73 100644 --- a/templates/public/svn.html +++ b/templates/public/svn.html @@ -7,25 +7,25 @@

      SVN Repositories

      The PKGBUILD files can be fetched via the ABS utility. To learn more about ABS, see the ABS wiki page.

      -

      The SVN repositories have been cloned into git repositories and can be - viewed via the cgit interface. - All - packages are available here except for - community - and multilib which are available in a different repository.

      +

      The SVN repositories have been cloned into git repositories and can be + viewed via the cgit interface. + All + packages are available here except for + community + and multilib which are available in a different repository.

      You can also get individual PKGBUILDs directly from SVN. This can be especially useful if you need to compile an older version of a package. DO NOT CHECK OUT THE ENTIRE SVN REPO. Your address may be blocked. Use the following commands to check out a specific package: -

      +

      svn checkout --depth=empty svn://svn.archlinux.org/packages
       cd packages
       svn update <your-package-name>
      - For the community and multilib repositories, use the following commands - instead: + For the community and multilib repositories, use the following commands + instead:
      svn checkout --depth=empty svn://svn.archlinux.org/community
       cd community
       svn update <your-package-name>
      diff --git a/templates/registration/logout.html b/templates/registration/logout.html index e890ce99..50b3574b 100644 --- a/templates/registration/logout.html +++ b/templates/registration/logout.html @@ -5,7 +5,7 @@

      Developer Logout

      -

      Logout was successful.

      +

      Logout was successful.

      {% endblock %} -- cgit v1.2.3-54-g00ecf From a4895f06680beaf447ed43ee326423fcc8232815 Mon Sep 17 00:00:00 2001 From: Olivier Keun Date: Fri, 22 Jul 2011 16:25:57 +0200 Subject: News frontpage layout changes Signed-off-by: Dan McGee --- media/archweb.css | 21 ++++++++++++++++----- public/views.py | 2 +- templates/public/index.html | 31 +++++++++++++++++++++++++++---- 3 files changed, 44 insertions(+), 10 deletions(-) diff --git a/media/archweb.css b/media/archweb.css index 85fdb610..ce1ece1d 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -21,7 +21,7 @@ p { margin: .33em 0 1em; } ol, ul { margin-bottom: 1em; padding-left: 2em; } ul { list-style: square; } code { font: 1.2em monospace; background: #ffd; padding: 0.15em 0.25em; } -pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 0.25em 2em; } +pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 1em; } pre code { display: block; background: none; } blockquote { margin: 1.5em 2em; } input { vertical-align: middle; } @@ -44,7 +44,7 @@ a:active { color: #e90; } /* headings */ h2 { font-size: 1.5em; margin-bottom: 0.5em; border-bottom: 1px solid #888; } -h3 { font-size: 1.25em; margin-top: 1em; } +h3 { font-size: 1.25em; margin-top: .5em; } h4 { font-size: 1.15em; margin-top: 1em; } h5 { font-size: 1em; margin-top: 1em; } @@ -76,6 +76,12 @@ table.pretty2 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-co table.pretty2 th { padding: 0.35em; background: #eee; border: 1px solid #bbb; } table.pretty2 td { padding: 0.35em; border: 1px dotted #bbb; } +/* definition lists */ +dl { clear:both; } +dl dt, +dl dd { margin-bottom:4px; padding: 8px 0px 4px; font-weight:bold; border-top:1px solid #888; } +dl dt { color: #333; float:left; padding-right:15px; } + /* forms and input styling */ form p { margin: 0.5em 0; } fieldset { border: 0; } @@ -105,14 +111,19 @@ ul.errorlist { color: red; } /* home: news */ #news { margin-top: 1.5em; } -#news h3 { border-bottom: 1px solid #888; } +#news h3 { float:left; padding-bottom: .5em } #news div { margin-bottom: 1em; } #news div p { margin-bottom: 0.5em; } #news .more { font-weight: normal; } -#news .rss-icon { float: right; margin: -1.6em 0.4em 0 0; } -#news h4 { font-size: 1em; margin-top: 1.5em; border-bottom: 1px dotted #bbb; } +#news .rss-icon { float: right; margin-top: 1em; } +#news h4 { clear:both; font-size: 1em; margin-top: 1.5em; border-bottom: 1px dotted #bbb; } #news .timestamp { float: right; font-size: 0.85em; margin: -1.8em 0.5em 0 0; } +/* home: arrowed headings */ +#news h3 a { display: block; background: #1794D1; font-size: 15px; padding: 2px 10px; color: white; } +#news a:active { color: white; } +h3 span.arrow { display: block; width: 0px; height: 0px; border-left: 6px solid transparent; border-right: 6px solid transparent; border-top: 6px solid #1794D1; margin: 0 auto; font-size: 0px; line-height: 0px; } + /* home: pkgsearch box */ #pkgsearch { padding: 1em 0.75em; background: #3ad; color: #fff; border: 1px solid #08b; } #pkgsearch label { width: auto; padding: 0.1em 0; } diff --git a/public/views.py b/public/views.py index 46291b88..e3f3b02f 100644 --- a/public/views.py +++ b/public/views.py @@ -13,7 +13,7 @@ def index(request): pkgs = utils.get_recent_updates() context = { - 'news_updates': News.objects.order_by('-postdate', '-id')[:10], + 'news_updates': News.objects.order_by('-postdate', '-id')[:15], 'pkg_updates': pkgs, } return direct_to_template(request, 'public/index.html', context) diff --git a/templates/public/index.html b/templates/public/index.html index e68943c8..c8b6def0 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -40,17 +40,40 @@

      A simple, lightweight distribution

      -

      Latest News (more)

      +

      + Latest News + +

      RSS Feed {% for news in news_updates %} -

      {{ news.title }}

      + {% if forloop.counter0 < 5 %} +

      + {{ news.title }} +

      {{ news.postdate|date }}

      {{ news.content|markdown|truncatewords_html:75 }}
      + {% else %} + {% if forloop.counter0 == 5 %} +

      + Older News + +

      +
      + {% endif %} +
      {{ news.postdate|date }}
      +
      + {{ news.title }} +
      + {% if forloop.last %} +
      + {% endif %} + {% endif %} {% endfor %}
      -- cgit v1.2.3-54-g00ecf From c35e8b867c619a4191b5ab93fa1653cc9847e9e7 Mon Sep 17 00:00:00 2001 From: Olivier Keun Date: Mon, 25 Jul 2011 22:34:26 +0200 Subject: Clean up and reformat default stylesheet Signed-off-by: Dan McGee --- media/archweb.css | 1017 +++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 835 insertions(+), 182 deletions(-) diff --git a/media/archweb.css b/media/archweb.css index ce1ece1d..be455680 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -13,273 +13,926 @@ @import url('archnavbar/archnavbar.css'); /* simple reset */ -* { margin: 0; padding: 0; line-height: 1.4; } +* { + margin: 0; + padding: 0; + line-height: 1.4; +} /* general styling */ -body { min-width: 650px; background: #f6f9fc; color: #222; font: normal 100% sans-serif; text-align: center; } -p { margin: .33em 0 1em; } -ol, ul { margin-bottom: 1em; padding-left: 2em; } -ul { list-style: square; } -code { font: 1.2em monospace; background: #ffd; padding: 0.15em 0.25em; } -pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 1em; } -pre code { display: block; background: none; } -blockquote { margin: 1.5em 2em; } -input { vertical-align: middle; } -select[multiple] { padding-top: 1px; padding-bottom: 1px; } -select[multiple] option { padding-left: 0.3em; padding-right: 0.5em; } -input[type=submit] { padding-left: 0.6em; padding-right: 0.6em; } -.clear { clear: both; } -hr { border: none; border-top: 1px solid #888; } -img { border: 0; } +body { + min-width: 650px; + background: #f6f9fc; + color: #222; + font: normal 100% sans-serif; + text-align: center; +} + +p { + margin: .33em 0 1em; +} + +ol, +ul { + margin-bottom: 1em; + padding-left: 2em; +} + + ul { + list-style: square; + } + +code { + font: 1.2em monospace; + background: #ffd; + padding: 0.15em 0.25em; +} + +pre { + font: 1.2em monospace; + border: 1px solid #bdb; + background: #dfd; + padding: 0.5em; + margin: 1em; +} + + pre code { + display: block; + background: none; + } + +blockquote { + margin: 1.5em 2em; +} + +input { + vertical-align: middle; +} + +select[multiple] { + padding: 1px 0; +} + + select[multiple] option { + padding: 0 0.5em 0 0.3em; + } + +input[type=submit] { + padding-left: 0 0.6em; +} + +.clear { + clear: both; +} + +hr { + border: none; + border-top: 1px solid #888; +} + +img { + border: 0; +} /* scale fonts down to a sane default (16 * .812 = 13px) */ -#content { font-size: 0.812em; } +#content { + font-size: 0.812em; +} /* link style */ -a { text-decoration: none; } -a:link, th a:visited { color: #07b; } -a:visited { color: #666; } -a:hover { text-decoration: underline; color: #666; } -a:active { color: #e90; } +a { + text-decoration: none; +} + + a:link, + th a:visited { + color: #07b; + } + + a:visited { + color: #666; + } + + a:hover { + text-decoration: underline; + color: #666; + } + + a:active { + color: #e90; + } /* headings */ -h2 { font-size: 1.5em; margin-bottom: 0.5em; border-bottom: 1px solid #888; } -h3 { font-size: 1.25em; margin-top: .5em; } -h4 { font-size: 1.15em; margin-top: 1em; } -h5 { font-size: 1em; margin-top: 1em; } +h2 { + font-size: 1.5em; + margin-bottom: 0.5em; + border-bottom: 1px solid #888; +} + +h3 { + font-size: 1.25em; + margin-top: .5em; +} + +h4 { + font-size: 1.15em; + margin-top: 1em; +} + +h5 { + font-size: 1em; + margin-top: 1em; +} /* general layout */ -div#content { width: 95%; margin: 0 auto; text-align: left; } -div#content-left-wrapper { float: left; width: 100%; } /* req to keep content above sidebar in source code */ -div#content-left { margin: 0 340px 0 0; } -div#content-right { float: left; width: 300px; margin-left: -300px; } -div.box { margin-bottom: 1.5em; padding: 0.65em; background: #ecf2f5; border: 1px solid #bcd; } -div#footer { clear: both; margin: 2em 0 1em; } -div#footer p { margin: 0; text-align: center; font-size: 0.85em; } +div#content { + width: 95%; + margin: 0 auto; + text-align: left; +} + +div#content-left-wrapper { + float: left; + width: 100%; /* req to keep content above sidebar in source code */ +} + +div#content-left { + margin: 0 340px 0 0; +} + +div#content-right { + float: left; + width: 300px; + margin-left: -300px; +} + +div.box { + margin-bottom: 1.5em; + padding: 0.65em; + background: #ecf2f5; + border: 1px solid #bcd; +} + +div#footer { + clear: both; + margin: 2em 0 1em; +} + + div#footer p { + margin: 0; + text-align: center; + font-size: 0.85em; + } /* alignment */ -div.center, table.center, img.center { width: auto; margin-left: auto; margin-right: auto; } -p.center, td.center, th.center { text-align: center; } +div.center, +table.center, +img.center { + width: auto; + margin-left: auto; + margin-right: auto; +} + +p.center, +td.center, +th.center { + text-align: center; +} /* table generics */ -table { width: 100%; border-collapse: collapse; } -table .wrap { white-space: normal; } -th, td { white-space: nowrap; text-align: left; } -th { vertical-align: middle; font-weight: bold; } -td { vertical-align: top; } +table { + width: 100%; + border-collapse: collapse; +} + + table .wrap { + white-space: normal; + } + +th, +td { + white-space: nowrap; + text-align: left; +} + + th { + vertical-align: middle; + font-weight: bold; + } + + td { + vertical-align: top; + } /* table pretty styles */ -table.pretty1 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bcd; } -table.pretty1 th { padding: 0.35em; background: #e4eeff; border: 1px solid #bcd; } -table.pretty1 td { padding: 0.35em; border: 1px dotted #bcd; } -table.pretty2 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bbb; } -table.pretty2 th { padding: 0.35em; background: #eee; border: 1px solid #bbb; } -table.pretty2 td { padding: 0.35em; border: 1px dotted #bbb; } +table.pretty1 { + width: auto; + margin-top: 0.25em; + margin-bottom: 0.5em; + border-collapse: collapse; + border: 1px solid #bcd; +} + + table.pretty1 th { + padding: 0.35em; + background: #e4eeff; + border: 1px solid #bcd; + } + + table.pretty1 td { + padding: 0.35em; + border: 1px dotted #bcd; + } + +table.pretty2 { + width: auto; + margin-top: 0.25em; + margin-bottom: 0.5em; + border-collapse: collapse; + border: 1px solid #bbb; +} + + table.pretty2 th { + padding: 0.35em; + background: #eee; + border: 1px solid #bbb; + } + + table.pretty2 td { + padding: 0.35em; + border: 1px dotted #bbb; + } /* definition lists */ -dl { clear:both; } -dl dt, -dl dd { margin-bottom:4px; padding: 8px 0px 4px; font-weight:bold; border-top:1px solid #888; } -dl dt { color: #333; float:left; padding-right:15px; } +dl { + clear: both; +} + + dl dt, + dl dd { + margin-bottom: 4px; + padding: 8px 0px 4px; + font-weight: bold; + border-top: 1px solid #888; + } + + dl dt { + color: #333; + float:left; + padding-right:15px; + } /* forms and input styling */ -form p { margin: 0.5em 0; } -fieldset { border: 0; } -label { width: 12em; vertical-align: top; display: inline-block; font-weight: bold; } -input[type=text], input[type=password], textarea { padding: 0.10em; } -form.general-form label, form.general-form .form-help { width: 10em; vertical-align: top; display: inline-block; } -form.general-form input[type=text], form.general-form textarea { width: 45%; } +form p { + margin: 0.5em 0; +} + +fieldset { + border: 0; +} + +label { + width: 12em; + vertical-align: top; + display: inline-block; + font-weight: bold; +} + +input[type=text], +input[type=password], +textarea { + padding: 0.10em; +} + +form.general-form label, +form.general-form .form-help { + width: 10em; + vertical-align: top; + display: inline-block; +} + +form.general-form input[type=text], +form.general-form textarea { + width: 45%; +} /* archdev navbar */ -div#archdev-navbar { margin: 1.5em 0; } -div#archdev-navbar ul { list-style: none; margin: -0.5em 0; padding: 0; } -div#archdev-navbar li { display: inline; margin: 0; padding: 0; font-size: 0.9em; } -div#archdev-navbar li a { padding: 0 0.5em; color: #07b; } +div#archdev-navbar { + margin: 1.5em 0; +} + + div#archdev-navbar ul { + list-style: none; + margin: -0.5em 0; + padding: 0; + } + + div#archdev-navbar li { + display: inline; + margin: 0; + padding: 0; + font-size: 0.9em; + } + + div#archdev-navbar li a { + padding: 0 0.5em; + color: #07b; + } /* error/info messages (x pkg is already flagged out-of-date, etc) */ -#sys-message { width: 35em; text-align: center; margin: 1em auto; padding: 0.5em; background: #fff; border: 1px solid #f00; } -#sys-message p { margin: 0; } - -ul.errorlist { color: red; } - -/* +#sys-message { + width: 35em; + text-align: center; + margin: 1em auto; + padding: 0.5em; + background: #fff; + border: 1px solid #f00; +} + + #sys-message p { + margin: 0; + } + +ul.errorlist { + color: red; +} + +/** * PAGE SPECIFIC STYLES */ /* home: introduction */ -#intro p.readmore { margin: -0.5em 0 0 0; font-size: .9em; text-align: right; } +#intro p.readmore { + margin: -0.5em 0 0 0; + font-size: .9em; + text-align: right; +} /* home: news */ -#news { margin-top: 1.5em; } -#news h3 { float:left; padding-bottom: .5em } -#news div { margin-bottom: 1em; } -#news div p { margin-bottom: 0.5em; } -#news .more { font-weight: normal; } -#news .rss-icon { float: right; margin-top: 1em; } -#news h4 { clear:both; font-size: 1em; margin-top: 1.5em; border-bottom: 1px dotted #bbb; } -#news .timestamp { float: right; font-size: 0.85em; margin: -1.8em 0.5em 0 0; } +#news { + margin-top: 1.5em; +} + + #news h3 { + float: left; + padding-bottom: .5em + } + + #news div { + margin-bottom: 1em; + } + + #news div p { + margin-bottom: 0.5em; + } + + #news .more { + font-weight: normal; + } + + #news .rss-icon { + float: right; + margin-top: 1em; + } + + #news h4 { + clear: both; + font-size: 1em; + margin-top: 1.5em; + border-bottom: 1px dotted #bbb; + } + + #news .timestamp { + float: right; + font-size: 0.85em; + margin: -1.8em 0.5em 0 0; + } /* home: arrowed headings */ -#news h3 a { display: block; background: #1794D1; font-size: 15px; padding: 2px 10px; color: white; } -#news a:active { color: white; } -h3 span.arrow { display: block; width: 0px; height: 0px; border-left: 6px solid transparent; border-right: 6px solid transparent; border-top: 6px solid #1794D1; margin: 0 auto; font-size: 0px; line-height: 0px; } +#news h3 a { + display: block; + background: #1794D1; + font-size: 15px; + padding: 2px 10px; + color: white; +} + + #news a:active { + color: white; + } + +h3 span.arrow { + display: block; + width: 0px; + height: 0px; + border-left: 6px solid transparent; + border-right: 6px solid transparent; + border-top: 6px solid #1794D1; + margin: 0 auto; + font-size: 0px; + line-height: 0px; +} /* home: pkgsearch box */ -#pkgsearch { padding: 1em 0.75em; background: #3ad; color: #fff; border: 1px solid #08b; } -#pkgsearch label { width: auto; padding: 0.1em 0; } -#pkgsearch input { width: 10em; float: right; font-size: 1em; color: #000; background: #fff; border: 1px solid #09c; } +#pkgsearch { + padding: 1em 0.75em; + background: #3ad; + color: #fff; + border: 1px solid #08b; +} + + #pkgsearch label { + width: auto; + padding: 0.1em 0; + } + + #pkgsearch input { + width: 10em; + float: right; + font-size: 1em; + color: #000; + background: #fff; + border: 1px solid #09c; + } /* home: recent pkg updates */ -#pkg-updates h3 { margin: 0 0 0.3em; } -#pkg-updates .more { font-weight: normal; } -#pkg-updates .rss-icon { float: right; margin: -2em 0 0 0; } -#pkg-updates table { margin: 0; } -#pkg-updates td.pkg-name { white-space: normal; } -#pkg-updates td.pkg-arch { text-align: right; } -#pkg-updates span.testing, #pkg-updates span.community-testing, span.multilib-testing { font-style: italic; } -#pkg-updates span.staging, #pkg-updates span.community-staging, span.multilib-staging { font-style: italic; color: #ff8040; } +#pkg-updates h3 { + margin: 0 0 0.3em; +} + + #pkg-updates .more { + font-weight: normal; + } + + #pkg-updates .rss-icon { + float: right; + margin: -2em 0 0 0; + } + + #pkg-updates table { + margin: 0; + } + + #pkg-updates td.pkg-name { + white-space: normal; + } + + #pkg-updates td.pkg-arch { + text-align: right; + } + + #pkg-updates span.testing, + #pkg-updates span.community-testing, + span.multilib-testing { + font-style: italic; + } + + #pkg-updates span.staging, + #pkg-updates span.community-staging, + span.multilib-staging { + font-style: italic; + color: #ff8040; + } /* home: sidebar navigation */ -div#nav-sidebar ul { list-style: none; margin: 0.5em 0 0.5em 1em; padding: 0; } +div#nav-sidebar ul { + list-style: none; + margin: 0.5em 0 0.5em 1em; + padding: 0; +} /* home: sponsor banners */ -div#arch-sponsors img { padding: 0.3em 0; } +div#arch-sponsors img { + padding: 0.3em 0; +} /* home: sidebar components (navlist, sponsors, pkgsearch, etc) */ -div.widget { margin-bottom: 1.5em; } +div.widget { + margin-bottom: 1.5em; +} /* feeds page */ -#rss-feeds .rss { padding-right: 20px; background: url(rss.png) top right no-repeat; } +#rss-feeds .rss { + padding-right: 20px; + background: url(rss.png) top right no-repeat; +} /* artwork: logo images */ -#artwork img.inverted { background: #333; padding: 0; } -#artwork div.imagelist img { display: inline; margin: 0.75em; } +#artwork img.inverted { + background: #333; + padding: 0; +} + +#artwork div.imagelist img { + display: inline; + margin: 0.75em; +} /* news: article list */ -.news-nav { float: right; margin-top: -2.2em; } -.news-nav .prev, .news-nav .next { margin-left: 1em; margin-right: 1em; } +.news-nav { + float: right; + margin-top: -2.2em; +} + + .news-nav .prev, + .news-nav .next { + margin: 0 1em; + } /* news: article pages */ -div.news-article .article-info { margin: 0; color: #999; } +div.news-article .article-info { + margin: 0; + color: #999; +} /* news: add/edit article */ -form#newsform { width: 60em; } -form#newsform input[type=text], form#newsform textarea { width: 75%; } +form#newsform { + width: 60em; +} + + form#newsform input[type=text], + form#newsform textarea { + width: 75%; + } /* donate: donor list */ -div#donor-list ul { width: 100%; } -/* max 4 columns, but possibly fewer if screen size doesn't allow for more */ -div#donor-list li { float: left; width: 25%; min-width: 20em; } +div#donor-list ul { + width: 100%; +} + /* max 4 columns, but possibly fewer if screen size doesn't allow for more */ + div#donor-list li { + float: left; + width: 25%; + min-width: 20em; + } /* download page */ -#arch-downloads h3 { border-bottom: 1px dotted #aaa; } -table#download-torrents .cpu-arch { text-align: center; } -table#download-mirrors { width: auto; margin-bottom: 1em; } -table#download-mirrors td.mirror-country { padding-top: 1em; } -table#download-mirrors td.mirror-server { padding-right: 1em; } -table#download-mirrors a { display: block; float: right; width: 4em; } +#arch-downloads h3 { + border-bottom: 1px dotted #aaa; +} + +table#download-torrents .cpu-arch { + text-align: center; +} + +table#download-mirrors { + width: auto; + margin-bottom: 1em; +} + + table#download-mirrors td.mirror-country { + padding-top: 1em; + } + + table#download-mirrors td.mirror-server { + padding-right: 1em; + } + + table#download-mirrors a { + display: block; + float: right; + width: 4em; + } /* pkglists/devlists */ -table.results { font-size: 0.846em; border-top: 1px dotted #999; border-bottom: 1px dotted #999; } -table.results th { padding: 0.5em 1em 0.25em 0.25em; border-bottom: 1px solid #999; white-space: nowrap; background-color:#fff; } -table.results td { padding: .3em 1em .3em 3px; } -table.results tr.odd { background: #fff; } -table.results tr.even { background: #e4eeff; } -/* additional styles for JS sorting */ -table.results th.header { padding-right: 20px; background-image: url(nosort.gif); background-repeat: no-repeat; background-position: center right; cursor: pointer; } -table.results th.headerSortDown { background-color: #e4eeff; background-image: url(desc.gif); } -table.results th.headerSortUp { background-color: #e4eeff; background-image: url(asc.gif); } -table.results .flagged { color: red; } +table.results { + font-size: 0.846em; + border-top: 1px dotted #999; + border-bottom: 1px dotted #999; +} + + table.results th { + padding: 0.5em 1em 0.25em 0.25em; + border-bottom: 1px solid #999; + white-space: nowrap; + background-color:#fff; + } + + /* additional styles for JS sorting */ + table.results th.header { + padding-right: 20px; + background-image: url(nosort.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; + } + + table.results th.headerSortDown { + background-color: #e4eeff; + background-image: url(desc.gif); + } + + table.results th.headerSortUp { + background-color: #e4eeff; + background-image: url(asc.gif); + } + + table.results td { + padding: .3em 1em .3em 3px; + } + + table.results tr.odd { + background: #fff; + } + + table.results tr.even { + background: #e4eeff; + } + + table.results .flagged { + color: red; + } /* pkglist: layout */ -div#pkglist-about { margin-top: 1.5em; } +div#pkglist-about { + margin-top: 1.5em; +} /* pkglist: results navigation */ -#pkglist-stats-top, #pkglist-stats-bottom { font-size: 0.85em; } -#pkglist-results .pkglist-nav { float: right; margin-top: -2.2em; } -.pkglist-nav .prev { margin-right: 1em; } -.pkglist-nav .next { margin-right: 1em; } +#pkglist-stats-top, +#pkglist-stats-bottom { + font-size: 0.85em; +} + +#pkglist-results .pkglist-nav { + float: right; + margin-top: -2.2em; +} + +.pkglist-nav .prev { + margin-right: 1em; +} + +.pkglist-nav .next { + margin-right: 1em; +} /* search fields and other filter selections */ -.filter-criteria h3 { font-size: 1em; margin-top:0; } -.filter-criteria div { float: left; margin-right: 1.65em; font-size: 0.85em; } -.filter-criteria legend { display: none; } -.filter-criteria label { width: auto; display: block; font-weight: normal; } +.filter-criteria h3 { + font-size: 1em; + margin-top:0; +} + +.filter-criteria div { + float: left; + margin-right: 1.65em; + font-size: 0.85em; +} + +.filter-criteria legend { + display: none; +} + +.filter-criteria label { + width: auto; + display: block; + font-weight: normal; +} /* pkgdetails: details links that float on the right */ -#pkgdetails #detailslinks { float: right; } -#pkgdetails #detailslinks h4 { margin-top: 0; margin-bottom: 0.25em; } -#pkgdetails #detailslinks ul { list-style: none; padding: 0; margin-bottom: 0; font-size: 0.846em; } -#pkgdetails #detailslinks > div { padding: 0.5em; margin-bottom: 1em; background: #eee; border: 1px solid #bbb; } -#pkgdetails #actionlist .flagged { color: red; font-size: 0.9em; font-style: italic; } +#pkgdetails #detailslinks { + float: right; +} + + #pkgdetails #detailslinks h4 { + margin-top: 0; + margin-bottom: 0.25em; + } + + #pkgdetails #detailslinks ul { + list-style: none; + padding: 0; + margin-bottom: 0; + font-size: 0.846em; + } + + #pkgdetails #detailslinks > div { + padding: 0.5em; + margin-bottom: 1em; + background: #eee; + border: 1px solid #bbb; + } + +#pkgdetails #actionlist .flagged { + color: red; + font-size: 0.9em; + font-style: italic; +} /* pkgdetails: pkg info */ -#pkgdetails #pkginfo { width: auto; } -#pkgdetails #pkginfo td { padding: 0.25em 0 0.25em 1.5em; } +#pkgdetails #pkginfo { + width: auto; +} + + #pkgdetails #pkginfo td { + padding: 0.25em 0 0.25em 1.5em; + } /* pkgdetails: flag package */ -form#flag-pkg-form label { width: 10em; } -form#flag-pkg-form textarea, form#flag-pkg-form input[type=text] { width: 45%; } +form#flag-pkg-form label { + width: 10em; +} + +form#flag-pkg-form textarea, +form#flag-pkg-form input[type=text] { + width: 45%; +} /* pkgdetails: deps, required by and file lists */ -#pkgdetails #metadata h3 { background: #555; color: #fff; font-size: 1em; margin-bottom: 0.5em; padding: 0.2em 0.35em; } -#pkgdetails #metadata ul { list-style: none; margin: 0; padding: 0; } -#pkgdetails #metadata li { padding-left: 0.5em; } -#pkgdetails #metadata p { padding-left: 0.5em; } -#pkgdetails #metadata .message { font-style: italic; } -#pkgdetails #metadata br { clear: both; } -#pkgdetails #pkgdeps { float: left; width: 48%; margin-right: 2%; } -#pkgdetails #metadata .virtual-dep { font-style: italic; } -#pkgdetails #metadata .testing-dep { font-style: italic; } -#pkgdetails #metadata .opt-dep { font-style: italic; } -#pkgdetails #metadata .dep-desc { font-style: italic; } -#pkgdetails #pkgreqs { float: left; width: 50%; } -#pkgdetails #pkgfiles { clear: left; padding-top: 1em; } +#pkgdetails #metadata h3 { + background: #555; + color: #fff; + font-size: 1em; + margin-bottom: 0.5em; + padding: 0.2em 0.35em; +} + +#pkgdetails #metadata ul { + list-style: none; + margin: 0; + padding: 0; +} + +#pkgdetails #metadata li { + padding-left: 0.5em; +} + +#pkgdetails #metadata p { + padding-left: 0.5em; +} + +#pkgdetails #metadata .message { + font-style: italic; +} + +#pkgdetails #metadata br { + clear: both; +} + +#pkgdetails #pkgdeps { + float: left; + width: 48%; + margin-right: 2%; +} + +#pkgdetails #metadata .virtual-dep, +#pkgdetails #metadata .testing-dep, +#pkgdetails #metadata .opt-dep, +#pkgdetails #metadata .dep-desc { + font-style: italic; +} +#pkgdetails #pkgreqs { + float: left; + width: 50%; +} + +#pkgdetails #pkgfiles { + clear: left; + padding-top: 1em; +} /* dev/TU biographies */ -div#arch-bio-toc { width: 75%; margin: 0 auto; text-align: center; } -table.arch-bio-entry td.pic { vertical-align: top; padding-right: 15px; padding-top: 10px; } -table.arch-bio-entry td.pic img { padding: 4px; border: 1px solid #ccc; } -table.arch-bio-entry table.bio { margin-bottom: 2em; } -table.arch-bio-entry table.bio th { text-align: left; padding-right: 0.5em; vertical-align: top; white-space: nowrap; } -table.arch-bio-entry table.bio td { width: 100%; padding-bottom: 0.25em; } +div#arch-bio-toc { + width: 75%; + margin: 0 auto; + text-align: center; +} + +table.arch-bio-entry td.pic { + vertical-align: top; + padding-right: 15px; + padding-top: 10px; +} + + table.arch-bio-entry td.pic img { + padding: 4px; + border: 1px solid #ccc; + } + +table.arch-bio-entry table.bio { + margin-bottom: 2em; +} + + table.arch-bio-entry table.bio th { + text-align: left; + padding-right: 0.5em; + vertical-align: top; + white-space: nowrap; + } + + table.arch-bio-entry table.bio td { + width: 100%; + padding-bottom: 0.25em; + } /* dev: login/out */ -p.login-error {} -table#dev-login { width: auto; } +table#dev-login { + width: auto; +} /* dev dashboard: flagged packages */ -form#dash-pkg-notify { text-align: right; padding: 1em 0 0; margin-top: 1em; font-size: 0.85em; border-top: 1px dotted #aaa; } -form#dash-pkg-notify label { width: auto; font-weight: normal; } -form#dash-pkg-notify input { vertical-align: middle; margin: 0 0.25em; } -form#dash-pkg-notify input[type=submit] { margin-top: -0.25em; } -form#dash-pkg-notify p { margin: 0; } - -table.dash-stats .key { width: 50%; } +form#dash-pkg-notify { + text-align: right; + padding: 1em 0 0; + margin-top: 1em; + font-size: 0.85em; + border-top: 1px dotted #aaa; +} + + form#dash-pkg-notify label { + width: auto; + font-weight: normal; + } + + form#dash-pkg-notify input { + vertical-align: middle; + margin: 0 0.25em; + } + + form#dash-pkg-notify input[type=submit] { + margin-top: -0.25em; + } + + form#dash-pkg-notify p { + margin: 0; + } + +table.dash-stats .key { + width: 50%; +} /* dev dashboard: admin actions (add news items, todo list, etc) */ -ul.admin-actions { float: right; list-style: none; margin-top: -2.5em; } -ul.admin-actions li { display: inline; padding-left: 1.5em; } +ul.admin-actions { + float: right; + list-style: none; + margin-top: -2.5em; +} + + ul.admin-actions li { + display: inline; + padding-left: 1.5em; + } /* todo lists (public and private) */ -.todo-table .complete { color: green; } -.todo-table .incomplete { color: red; } -.todo-info { margin: 0; color: #999; } -.todo-list h4 { margin-top: 0; margin-bottom: 0.4em; } +.todo-table .complete { + color: green; +} + +.todo-table .incomplete { + color: red; +} +.todo-info { + margin: 0; color: #999; +} + +.todo-list h4 { + margin-top: 0; + margin-bottom: 0.4em; +} /* dev: signoff page */ -#dev-signoffs ul { list-style: none; margin: 0; padding: 0; } -#dev-signoffs .signoff-yes { color: green; font-weight: bold; } -#dev-signoffs .signoff-no { color: red; } -#dev-signoffs .signed-username { color: #888; margin-left: 0.5em; } +#dev-signoffs ul { + list-style: none; + margin: 0; + padding: 0; +} + +#dev-signoffs .signoff-yes { + color: green; + font-weight: bold; +} + +#dev-signoffs .signoff-no { + color: red; +} + +#dev-signoffs .signed-username { + color: #888; + margin-left: 0.5em; +} /* iso testing feedback form */ -#releng-feedback label { width: auto; display: inline; font-weight: normal; } -#releng-feedback ul { padding-left: 1em; } -#releng-feedback li { list-style: none; } -#releng-feedback ul+.helptext { position: relative; top: -0.9em; } +#releng-feedback label { + width: auto; + display: inline; + font-weight: normal; +} + +#releng-feedback ul { + padding-left: 1em; +} + +#releng-feedback li { + list-style: none; +} + +#releng-feedback ul+.helptext { + position: relative; top: -0.9em; +} /* highlight current website in the navbar */ -#archnavbar.anb-home ul li#anb-home a { color: white !important; } -#archnavbar.anb-packages ul li#anb-packages a { color: white !important; } -#archnavbar.anb-download ul li#anb-download a { color: white !important; } +#archnavbar.anb-home ul li#anb-home a, +#archnavbar.anb-packages ul li#anb-packages a, +#archnavbar.anb-download ul li#anb-download a { + color: white !important; +} -- cgit v1.2.3-54-g00ecf From b6f86d9ab0d6910f0f70398b07e965d337bd9e78 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Aug 2011 16:04:16 -0500 Subject: Add two new DB fields to reporead Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 470b785d..97fdbb73 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -73,7 +73,7 @@ def handle(self, arch=None, filename=None, **options): class Pkg(object): """An interim 'container' object for holding Arch package data.""" bare = ( 'name', 'base', 'arch', 'desc', 'filename', - 'md5sum', 'url', 'packager' ) + 'md5sum', 'sha256sum', 'pgpsig', 'url', 'packager' ) number = ( 'csize', 'isize' ) collections = ( 'depends', 'optdepends', 'conflicts', 'provides', 'replaces', 'groups', 'license', 'files' ) -- cgit v1.2.3-54-g00ecf From e3ac14389448a73d9f9b8286a25930f3978784fb Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 16 Aug 2011 16:30:14 -0500 Subject: Settings refresh Update a few things in settings.py, but more importantly, update local_settings.py.template to be more in line with modern Django settings. Signed-off-by: Dan McGee --- local_settings.py.example | 58 +++++++++++++++++++++++++++++++---------------- settings.py | 5 +++- 2 files changed, 42 insertions(+), 21 deletions(-) diff --git a/local_settings.py.example b/local_settings.py.example index 9af4ebc8..beb48f84 100644 --- a/local_settings.py.example +++ b/local_settings.py.example @@ -2,41 +2,59 @@ ## Debug settings DEBUG = False +TEMPLATE_DEBUG = True +DEBUG_TOOLBAR = True + +## For django debug toolbar +INTERNAL_IPS = ('127.0.0.1',) ## Notification admins ADMINS = ( # ('Joe Admin', 'joeadmin@example.com'), ) -## Sqlite Database settings -#DATABASE_ENGINE = 'sqlite3' -#DATABASE_NAME = 'archweb.db' - ## MySQL Database settings -#DATABASE_ENGINE = 'mysql' -#DATABASE_NAME = 'archlinux' -#DATABASE_USER = 'archlinux' -#DATABASE_PASSWORD = 'archlinux' -#DATABASE_HOST = '' -#DATABASE_PORT = '' - -## Define cache middleware settings -CACHE_BACKEND = 'memcached://127.0.0.1:11211' -CACHE_MIDDLEWARE_SECONDS = 900 +DATABASES = { + 'default': { + 'ENGINE' : 'django.db.backends.mysql', + 'NAME' : 'archlinux', + 'USER' : 'archlinux', + 'PASSWORD': 'archlinux', + 'HOST' : '', + 'PORT' : '', + 'OPTIONS' : {'init_command': 'SET storage_engine=InnoDB'}, + }, +} + +## Define cache settings +CACHES = { + 'default': { + 'BACKEND' : 'django.core.cache.backends.dummy.DummyCache', + #'BACKEND' : 'django.core.cache.backends.memcached.MemcachedCache', + #'LOCATION': '127.0.0.1:11211', + } +} CACHE_MIDDLEWARE_KEY_PREFIX = 'arch' +CACHE_MIDDLEWARE_SECONDS = 300 + +## Use secure session cookies? Make this true if you want all +## logged-in actions to take place over HTTPS only. If developing +## locally, you will want to use False. +SESSION_COOKIE_SECURE = False ## location for saving dev pictures -MEDIA_ROOT = '/var/www/archlinux/htdocs/img/' +MEDIA_ROOT = '/srv/example.com/img/' ## web url for serving image files -MEDIA_URL = 'http://www.archlinux.org/img/' +MEDIA_URL = 'http://example.com/img/' ## Make this unique, and don't share it with anybody. SECRET_KEY = '00000000000000000000000000000000000000000000000' -#dummy cache -if DEBUG: - CACHE_BACKEND = 'dummy:///' +## CDN settings +CDN_ENABLED = False +CDN_PATH = 'http://example.com/path/' +CDN_PATH_SECURE = 'https://example.com/path/' -# vim: set ts=4 sw=4 et: +# vim: set ts=4 sw=4 et: diff --git a/settings.py b/settings.py index 5cca6047..18437098 100644 --- a/settings.py +++ b/settings.py @@ -84,9 +84,12 @@ ROOT_URLCONF = 'urls' -# Configure where sessions and messages should reside +# Configure where messages should reside MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage' + +# Session configuration SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' +SESSION_COOKIE_HTTPONLY = True INSTALLED_APPS = ( 'django.contrib.auth', -- cgit v1.2.3-54-g00ecf From 7954617a0593632d749d7e8b60beab2aae1669ba Mon Sep 17 00:00:00 2001 From: Tom Willemsen Date: Wed, 17 Aug 2011 01:26:23 +0200 Subject: New page ISO Overview ISO Overview shows a simple list of all the ISOs that are available and how many times they've been tested successfully or have failed. Signed-off-by: Tom Willemsen Signed-off-by: Dan McGee --- releng/urls.py | 1 + releng/views.py | 14 +++++++++++ templates/releng/iso_overview.html | 48 ++++++++++++++++++++++++++++++++++++++ templates/releng/results.html | 3 +++ templates/releng/thanks.html | 4 +++- 5 files changed, 69 insertions(+), 1 deletion(-) create mode 100644 templates/releng/iso_overview.html diff --git a/releng/urls.py b/releng/urls.py index 4a125dff..239ad02b 100644 --- a/releng/urls.py +++ b/releng/urls.py @@ -6,6 +6,7 @@ (r'^thanks/$', 'submit_test_thanks', {}, 'releng-test-thanks'), (r'^iso/(?P\d+)/$', 'test_results_iso', {}, 'releng-results-iso'), (r'^(?P
      {{ pkg.pkgname }} {{ pkg.repo.name }}{{ pkg.pkgver }}{{ pkg.full_version }} {{ pkg.arch.name }} {{ pkg.flag_date|date }} {{ pkg.last_update|date }}{{mirror.get_tier_display}} {{mirror.country}} {{mirror.isos|yesno}}{{mirror.supported_protocols}}{{mirror.supported_protocols|join:", "}}{{mirror.public|yesno}} {{mirror.active|yesno}}
      {{ pkg.pkgname }}{% pkg_details_link pkg %} {{ pkg.repo.name }} {{ pkg.full_version }} {{ pkg.arch.name }}
      {{ todopkg.list.name }}{{ todopkg.pkg.pkgname }}{% pkg_details_link todopkg.pkg %} {{ todopkg.pkg.repo.name }} {{ todopkg.pkg.arch.name }} {{ todopkg.pkg.maintainers|join:', ' }}
      {{ pkg.arch.name }} {{ pkg.repo.name|capfirst }}{{ pkg.pkgname }}{% pkg_details_link pkg %}{{ pkg.full_version }}
      Split Packages: - {% for s in splits %} - {{ s.pkgname }}
      - {% endfor %} -
      {% for s in splits %}{% pkg_details_link s %}
      {% endfor %}
      Base Package:{{ pkg.pkgbase }}{% pkg_details_link pkg.base_package %}{{ pkg.pkgbase }}
      {{ pkg.arch.name }} {{ pkg.repo.name|capfirst }}{{ pkg.pkgname }}{% pkg_details_link pkg %}{{ pkg.full_version }}{{ pkg.arch.name }} {{ pkg.repo.name|capfirst }}{{ pkg.pkgname }}{% pkg_details_link pkg %}{{ pkg.full_version }}
      {{ pkg.arch.name }}{{ pkg.pkgname }}{% pkg_details_link pkg %} {{ group.packages|length }} {{ pkg.full_version }} {{ pkg.last_update|date }}
      {{ pkg.pkg.pkgname }}{% pkg_details_link pkg.pkg %} {{ pkg.pkg.arch.name }} {{ pkg.pkg.repo.name|capfirst }} {{ pkg.pkg.maintainers|join:', ' }}
      {{ pkg.pkg.pkgname }}{% pkg_details_link pkg.pkg %} {{ pkg.pkg.arch.name }} {{ pkg.pkg.repo.name|capfirst }} {{ pkg.pkg.maintainers|join:', ' }}{{ update.pkgbase }} {{ update.version }} + {% for pkg in update.package_links %}{{ pkg.arch }}{% if not forloop.last %}/{% endif %}{% endfor %}
      + + + + + + + + + + {% for iso in isos %} + + + + + + + {% endfor %} + +
      ISOCurrently Available# Successes# Failures
      + + {{ iso.name }} + + + {{ iso.active|yesno }} + + {{ iso.successes }} + + {{ iso.failures }} +
      +
      +{% load cdn %}{% jquery %} + + + +{% endblock %} diff --git a/templates/releng/results.html b/templates/releng/results.html index c3e7d99a..4ff3c864 100644 --- a/templates/releng/results.html +++ b/templates/releng/results.html @@ -13,6 +13,9 @@

      Release Engineering Testbuild Results

      if you have tested and used any ISOs. Both successful and failed results are encouraged and welcome.

      +

      For a overview of which ISOs tested best, have a look at + the overview.

      +

      For more information, see the documentation on the wiki.

      diff --git a/templates/releng/thanks.html b/templates/releng/thanks.html index b261426d..80018b03 100644 --- a/templates/releng/thanks.html +++ b/templates/releng/thanks.html @@ -8,6 +8,8 @@

      Thanks!

      Thank you for taking the time to give us this information! Your results have been succesfully added to our database.

      You can now go back to the results, - or give more feedback.

      + give more feedback, or + have a look at the look at + the ISO test overview.

      {% endblock %} -- cgit v1.2.3-54-g00ecf From a489f355ec52dd54946bc7476615aa7cab9e384c Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 07:57:41 -0500 Subject: Implement get_absolute_url for Iso model Signed-off-by: Dan McGee --- releng/models.py | 4 ++++ templates/releng/iso_overview.html | 4 +--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/releng/models.py b/releng/models.py index ceac948b..3afef55e 100644 --- a/releng/models.py +++ b/releng/models.py @@ -1,3 +1,4 @@ +from django.core.urlresolvers import reverse from django.db import models from django.db.models.signals import pre_save @@ -48,6 +49,9 @@ class Iso(models.Model): removed = models.DateTimeField(null=True, blank=True, default=None) active = models.BooleanField(default=True) + def get_absolute_url(self): + return reverse('releng-results-iso', args=[self.pk]) + def __unicode__(self): return self.name diff --git a/templates/releng/iso_overview.html b/templates/releng/iso_overview.html index ee3aaed3..054e0fa7 100644 --- a/templates/releng/iso_overview.html +++ b/templates/releng/iso_overview.html @@ -19,9 +19,7 @@

      Failures and Successes for Testing ISOs

      {% for iso in isos %} - - {{ iso.name }} - + {{ iso.name }} {{ iso.active|yesno }} -- cgit v1.2.3-54-g00ecf From 61311701a51cc5b060d5baa56536805aa271f9d6 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 08:11:23 -0500 Subject: Add architecture to releng results listing Signed-off-by: Dan McGee --- releng/views.py | 5 +++-- templates/releng/result_list.html | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/releng/views.py b/releng/views.py index d90a5b9a..796fc044 100644 --- a/releng/views.py +++ b/releng/views.py @@ -115,7 +115,7 @@ def test_results_overview(request): def test_results_iso(request, iso_id): iso = get_object_or_404(Iso, pk=iso_id) - test_list = iso.test_set.all() + test_list = iso.test_set.select_related() context = { 'iso_name': iso.name, 'test_list': test_list @@ -127,7 +127,8 @@ def test_results_for(request, option, value): raise Http404 option_model = getattr(Test, option).field.rel.to real_value = get_object_or_404(option_model, pk=value) - test_list = real_value.test_set.order_by('-iso__name', '-pk') + test_list = real_value.test_set.select_related().order_by( + '-iso__name', '-pk') context = { 'option': option, 'value': real_value, diff --git a/templates/releng/result_list.html b/templates/releng/result_list.html index b3ae025b..a343257e 100644 --- a/templates/releng/result_list.html +++ b/templates/releng/result_list.html @@ -12,9 +12,10 @@

      Results for: - + + @@ -24,6 +25,7 @@

      Results for:

      + {% endfor %} -- cgit v1.2.3-54-g00ecf From c80afa08c7aec23984e84bfb3ab5683c9c392115 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 09:01:07 -0500 Subject: releng: refactor results overview page for performance Use some annotation stuff and trickeration to reduce the number of queries we need on the results overview page by quite a bit. Signed-off-by: Dan McGee --- releng/models.py | 26 -------------------------- releng/views.py | 49 +++++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 41 insertions(+), 34 deletions(-) diff --git a/releng/models.py b/releng/models.py index 3afef55e..a958288b 100644 --- a/releng/models.py +++ b/releng/models.py @@ -10,36 +10,10 @@ class IsoOption(models.Model): def __unicode__(self): return self.name - def get_test_result(self, success): - try: - return self.test_set.filter(success=success).select_related( - 'iso').latest('iso__id').iso - except Test.DoesNotExist: - return None - - def get_last_success(self): - return self.get_test_result(True) - - def get_last_failure(self): - return self.get_test_result(False) - class Meta: abstract = True class RollbackOption(IsoOption): - def get_rollback_test_result(self, success): - try: - return self.rollback_test_set.filter(success=success).select_related( - 'iso').latest('iso__id').iso - except Test.DoesNotExist: - return None - - def get_last_rollback_success(self): - return self.get_rollback_test_result(True) - - def get_last_rollback_failure(self): - return self.get_rollback_test_result(False) - class Meta: abstract = True diff --git a/releng/views.py b/releng/views.py index 796fc044..7f895a45 100644 --- a/releng/views.py +++ b/releng/views.py @@ -1,6 +1,6 @@ from django import forms from django.conf import settings -from django.db.models import Count +from django.db.models import Count, Max from django.http import Http404 from django.shortcuts import get_object_or_404, redirect from django.views.generic.simple import direct_to_template @@ -85,18 +85,49 @@ def calculate_option_overview(field_name): 'is_rollback': is_rollback, 'values': [] } + if not is_rollback: + successes = dict(model.objects.values_list('pk').filter( + test__success=True).annotate(latest=Max('test__iso__id'))) + failures = dict(model.objects.values_list('pk').filter( + test__success=False).annotate(latest=Max('test__iso__id'))) + else: + successes = dict(model.objects.values_list('pk').filter( + rollback_test_set__success=True).annotate( + latest=Max('rollback_test_set__iso__id'))) + failures = dict(model.objects.values_list('pk').filter( + rollback_test_set__success=False).annotate( + latest=Max('rollback_test_set__iso__id'))) + for value in model.objects.all(): - data = { 'value': value } - if is_rollback: - data['success'] = value.get_last_rollback_success() - data['failure'] = value.get_last_rollback_failure() - else: - data['success'] = value.get_last_success() - data['failure'] = value.get_last_failure() + data = { + 'value': value, + 'success': successes.get(value.pk), + 'failure': failures.get(value.pk), + } option['values'].append(data) return option +def options_fetch_iso(options): + '''Replaces the Iso PK with a full Iso model object in a list of options + used on the overview page. We do it this way to only have to query the Iso + table once rather than once per option.''' + # collect all necessary Iso PKs + all_pks = set() + for option in options: + all_pks.update(v['success'] for v in option['values']) + all_pks.update(v['failure'] for v in option['values']) + + all_pks.discard(None) + all_isos = Iso.objects.in_bulk(all_pks) + + for option in options: + for value in option['values']: + value['success'] = all_isos.get(value['success']) + value['failure'] = all_isos.get(value['failure']) + + return options + def test_results_overview(request): # data structure produced: # [ { option, name, is_rollback, values: [ { value, success, failure } ... ] } ... ] @@ -107,6 +138,8 @@ def test_results_overview(request): for field in fields: all_options.append(calculate_option_overview(field)) + all_options = options_fetch_iso(all_options) + context = { 'options': all_options, 'iso_url': settings.ISO_LIST_URL, -- cgit v1.2.3-54-g00ecf From 366e438ed5b6b9f2fc2651e64e3e41444fb781fb Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 16:14:49 -0500 Subject: releng syncisos: reactive inactive ISOs if available Signed-off-by: Dan McGee --- releng/management/commands/syncisos.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/releng/management/commands/syncisos.py b/releng/management/commands/syncisos.py index 23955afe..270c6c34 100644 --- a/releng/management/commands/syncisos.py +++ b/releng/management/commands/syncisos.py @@ -34,18 +34,25 @@ def parse(self, url): raise CommandError('Couldn\'t parse "%s"' % url) class Command(BaseCommand): - help = 'Gets new isos from %s' % settings.ISO_LIST_URL + help = 'Gets new ISOs from %s' % settings.ISO_LIST_URL def handle(self, *args, **options): parser = IsoListParser() isonames = Iso.objects.values_list('name', flat=True) active_isos = parser.parse(settings.ISO_LIST_URL) - # create any names that don't already exist for iso in active_isos: + # create any names that don't already exist if iso not in isonames: new = Iso(name=iso, active=True) new.save() + # update those that do if they were marked inactive + else: + existing = Iso.objects.get(name=iso) + if not existing.active: + existing.active = True + existing.removed = None + existing.save() now = datetime.utcnow() # and then mark all other names as no longer active Iso.objects.filter(active=True).exclude(name__in=active_isos).update( -- cgit v1.2.3-54-g00ecf From a52c2744bf3b532f3f02ce45ae9d902706f9f518 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 16:16:32 -0500 Subject: Add capfirst filter to yesno usage in templates Signed-off-by: Dan McGee --- templates/mirrors/mirror_details.html | 10 +++++----- templates/mirrors/mirrors.html | 6 +++--- templates/packages/signoffs.html | 4 ++-- templates/releng/iso_overview.html | 12 +++--------- templates/releng/result_list.html | 2 +- 5 files changed, 14 insertions(+), 20 deletions(-) diff --git a/templates/mirrors/mirror_details.html b/templates/mirrors/mirror_details.html index 1795d0f5..3daf1a2d 100644 --- a/templates/mirrors/mirror_details.html +++ b/templates/mirrors/mirror_details.html @@ -24,16 +24,16 @@

      Mirror Details: {{ mirror.name }}

      - + {% if user.is_authenticated %} - + - + @@ -91,8 +91,8 @@

      Available URLs

      {% for m_url in urls %} - - + + diff --git a/templates/mirrors/mirrors.html b/templates/mirrors/mirrors.html index ee76acbe..bf356080 100644 --- a/templates/mirrors/mirrors.html +++ b/templates/mirrors/mirrors.html @@ -27,11 +27,11 @@

      Mirror Overview

      title="Mirror details for {{ mirror.name }}">{{ mirror.name }} - + {% if user.is_authenticated %} - - + + {% endif %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index baf85338..a8aa4de2 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -35,8 +35,8 @@

      Package Signoffs

      - + - - - + + + {% endfor %} diff --git a/templates/releng/result_list.html b/templates/releng/result_list.html index a343257e..7f9ed452 100644 --- a/templates/releng/result_list.html +++ b/templates/releng/result_list.html @@ -26,7 +26,7 @@

      Results for:

      - + {% endfor %} -- cgit v1.2.3-54-g00ecf From e5d09fb7e9003b7f96685af9c0a722b45746448e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 16:18:12 -0500 Subject: Add PGP signature package field And add eventual display code for it to the details template, but don't show it yet as no packages will have it. Signed-off-by: Dan McGee --- devel/management/commands/reporead.py | 1 + .../0053_auto__add_field_package_pgp_signature.py | 152 +++++++++++++++++++++ main/models.py | 6 +- templates/packages/details.html | 5 + 4 files changed, 163 insertions(+), 1 deletion(-) create mode 100644 main/migrations/0053_auto__add_field_package_pgp_signature.py diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 97fdbb73..cf597577 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -203,6 +203,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.packager_str = repopkg.packager # attempt to find the corresponding django user for this string dbpkg.packager = finder.find(repopkg.packager) + dbpkg.pgp_signature = repopkg.pgpsig if timestamp: dbpkg.flag_date = None diff --git a/main/migrations/0053_auto__add_field_package_pgp_signature.py b/main/migrations/0053_auto__add_field_package_pgp_signature.py new file mode 100644 index 00000000..a828d1ef --- /dev/null +++ b/main/migrations/0053_auto__add_field_package_pgp_signature.py @@ -0,0 +1,152 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.add_column('packages', 'pgp_signature', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False) + + def backwards(self, orm): + db.delete_column('packages', 'pgp_signature') + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 0125cc0e..e0681abf 100644 --- a/main/models.py +++ b/main/models.py @@ -82,6 +82,7 @@ class UserProfile(models.Model): help_text="Ideally 125px by 125px") user = models.OneToOneField(User, related_name='userprofile') allowed_repos = models.ManyToManyField('Repo', blank=True) + class Meta: db_table = 'user_profiles' verbose_name = 'Additional Profile Data' @@ -173,6 +174,7 @@ class Package(models.Model): packager_str = models.CharField(max_length=255) packager = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) + pgp_signature = models.TextField(null=True, blank=True) flag_date = models.DateTimeField(null=True) objects = PackageManager() @@ -199,6 +201,9 @@ def get_full_url(self, proto='http'): domain = Site.objects.get_current().domain return '%s://%s%s' % (proto, domain, self.get_absolute_url()) + def is_signed(self): + return bool(self.pgp_signature) + @property def maintainers(self): return User.objects.filter( @@ -288,7 +293,6 @@ def get_depends(self): if not pkg: providers = dep.get_providers(arches, testing=self.repo.testing, staging=self.repo.staging) - print providers deps.append({'dep': dep, 'pkg': pkg, 'providers': providers}) return deps diff --git a/templates/packages/details.html b/templates/packages/details.html index bec4bdff..2f1031a6 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -138,6 +138,11 @@

      Versions Elsewhere

      + + {% comment %} + + + {% endcomment %} -- cgit v1.2.3-54-g00ecf From 871cf5ad2d8a97c2c9f9f20e237999acbf5e5f2a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 17 Aug 2011 20:53:57 -0500 Subject: Use verbose name for releng options display So we see something like 'Hardware Type' instead of 'Hardware_Type'. Signed-off-by: Dan McGee --- releng/views.py | 5 +++-- templates/releng/result_list.html | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/releng/views.py b/releng/views.py index 7f895a45..2b3d0936 100644 --- a/releng/views.py +++ b/releng/views.py @@ -81,7 +81,7 @@ def calculate_option_overview(field_name): is_rollback = field_name.startswith('rollback_') option = { 'option': model, - 'name': field_name, + 'name': model._meta.verbose_name, 'is_rollback': is_rollback, 'values': [] } @@ -159,11 +159,12 @@ def test_results_for(request, option, value): if option not in Test._meta.get_all_field_names(): raise Http404 option_model = getattr(Test, option).field.rel.to + option_model.verbose_name = option_model._meta.verbose_name real_value = get_object_or_404(option_model, pk=value) test_list = real_value.test_set.select_related().order_by( '-iso__name', '-pk') context = { - 'option': option, + 'option': option_model, 'value': real_value, 'value_id': value, 'test_list': test_list diff --git a/templates/releng/result_list.html b/templates/releng/result_list.html index 7f9ed452..0233bf8c 100644 --- a/templates/releng/result_list.html +++ b/templates/releng/result_list.html @@ -3,7 +3,7 @@ {% block content %}

      Results for: - {% if option %}{{ option|title }}: {{ value }}{% endif %} + {% if option %}{{ option.verbose_name|title }}: {{ value }}{% endif %} {{ iso_name|default:"" }}

      -- cgit v1.2.3-54-g00ecf From 6bc2b84516c93ef84799671c51d9a51d46b5570f Mon Sep 17 00:00:00 2001 From: Thomas Bächler Date: Fri, 19 Aug 2011 19:14:22 +0200 Subject: download: update to new 2011.08.19 release Signed-off-by: Dan McGee --- templates/public/download.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/public/download.html b/templates/public/download.html index 8bd2e2db..78a2f964 100644 --- a/templates/public/download.html +++ b/templates/public/download.html @@ -9,7 +9,7 @@

      Arch Linux Downloads

      - {% with "2010.05" as version %} + {% with "2011.08.19" as version %}

      Release Info

      All available images can be burned to a CD, mounted as an ISO file, @@ -19,7 +19,7 @@

      Release Info

      IsoISO Submitted By Date SubmittedArchitecture Success
      {{ test.iso.name }} {{ test.user_name }} {{ test.created|date }}{{ test.architecture }} {{ test.success|yesno }}
      Has ISOs:{{ mirror.isos|yesno }}{{ mirror.isos|yesno|capfirst }}
      Public:{{ mirror.public|yesno }}{{ mirror.public|yesno|capfirst }}
      Active:{{ mirror.active|yesno }}{{ mirror.active|yesno|capfirst }}
      Rsync IPs:
      {% if m_url.protocol.is_download %}{{ m_url.url }}{% else %}{{ m_url.url }}{% endif %}{{ m_url.has_ipv4|yesno }}{{ m_url.has_ipv6|yesno }}{{ m_url.has_ipv4|yesno|capfirst }}{{ m_url.has_ipv6|yesno|capfirst }} {{ m_url.last_sync|date:'Y-m-d H:i'|default:'unknown' }} {{ m_url.completion_pct|percentage:1 }} {{ m_url.delay|duration|default:'unknown' }} {{mirror.get_tier_display}} {{mirror.country}}{{mirror.isos|yesno}}{{mirror.isos|yesno|capfirst}} {{mirror.supported_protocols|join:", "}}{{mirror.public|yesno}}{{mirror.active|yesno}}{{mirror.public|yesno|capfirst}}{{mirror.active|yesno|capfirst}} {{mirror.admin_email}} {{mirror.notes|linebreaks}} {{ pkg.full_version }} {{ pkg.last_update|date }} {{ group.target_repo }} - {{ group.approved|yesno:"Yes,No" }} + {{ group.approved|yesno|capfirst }} {{ iso.name }} - {{ iso.active|yesno }} - - {{ iso.successes }} - - {{ iso.failures }} - {{ iso.active|yesno|capfirst }}{{ iso.successes }}{{ iso.failures }}
      {{ test.user_name }} {{ test.created|date }} {{ test.architecture }}{{ test.success|yesno }}{{ test.success|yesno|capfirst }}
      Last Packager: {% with pkg.packager as pkgr %}{% if pkgr %}{% userpkgs pkgr %}{% else %}{{ pkg.packager_str }}{% endif %}{% endwith %}
      Signed:{{ pkg.is_signed|yesno|capfirst }}
      Build Date: {{ pkg.build_date|date:"DATETIME_FORMAT" }} UTC
      @@ -154,13 +153,14 @@

      Checksums

      {{mirror_url.mirror.name}} {% endifchanged %} - {{mirror_url.protocol.protocol|upper}} + {{mirror_url.protocol.protocol|upper}} {% endfor %}
      {% endcache %} + {% endwith %}

      If you want to become an Official Arch Linux Mirror please follow the instructions listed here.

      -- cgit v1.2.3-54-g00ecf From 6b15298483db6be873edc72c3e96fd5668a3cbc3 Mon Sep 17 00:00:00 2001 From: Thomas Bächler Date: Fri, 19 Aug 2011 19:38:17 +0200 Subject: download: add link to the ISO snapshots Dan: use relative links if possible, use releng link from settings, fix HTML closing tags. Signed-off-by: Dan McGee --- public/views.py | 7 ++++++- templates/public/download.html | 10 ++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/public/views.py b/public/views.py index e3f3b02f..821e4d5c 100644 --- a/public/views.py +++ b/public/views.py @@ -3,6 +3,7 @@ from news.models import News from . import utils +from django.conf import settings from django.contrib.auth.models import User from django.db.models import Q from django.http import Http404 @@ -59,10 +60,14 @@ def download(request): protocol__is_download=True, mirror__public=True, mirror__active=True, mirror__isos=True ) + context = { + 'releng_iso_url': settings.ISO_LIST_URL, + } return list_detail.object_list(request, qset.order_by('mirror__country', 'mirror__name', 'protocol'), template_name="public/download.html", - template_object_name="mirror_url") + template_object_name="mirror_url", + extra_context=context) def feeds(request): context = { diff --git a/templates/public/download.html b/templates/public/download.html index ea1a5db4..36aca9ad 100644 --- a/templates/public/download.html +++ b/templates/public/download.html @@ -115,6 +115,16 @@

      Get Arch Linux on CD

      title="Purchase a CD from LinuxCD">Purchase a CD from LinuxCD

    +

    Test ISO Info

    + +

    We provide daily snapshot ISOs. Those are largely untested, + but may be more up to date than the releases.

    +

    HTTP/FTP Direct Downloads

    -- cgit v1.2.3-54-g00ecf From 5e5182ba9b727e112b3bd16f94f41a2fc5847804 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 19 Aug 2011 13:08:26 -0500 Subject: Add color to releng results success column Signed-off-by: Dan McGee --- media/archweb.css | 8 ++++++++ templates/releng/result_list.html | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/media/archweb.css b/media/archweb.css index be455680..92487d93 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -930,6 +930,14 @@ ul.admin-actions { position: relative; top: -0.9em; } +#releng-result .success-yes { + color: green; +} + +#releng-result .success-no { + color: red; +} + /* highlight current website in the navbar */ #archnavbar.anb-home ul li#anb-home a, #archnavbar.anb-packages ul li#anb-packages a, diff --git a/templates/releng/result_list.html b/templates/releng/result_list.html index 0233bf8c..845d330d 100644 --- a/templates/releng/result_list.html +++ b/templates/releng/result_list.html @@ -26,7 +26,7 @@

    Results for: {{ test.user_name }} {{ test.created|date }} {{ test.architecture }} - {{ test.success|yesno|capfirst }} + {{ test.success|yesno|capfirst }} {% endfor %} -- cgit v1.2.3-54-g00ecf From 1908b3c4609d1c3acf04c94a490516ff5284d2c7 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 19 Aug 2011 18:03:39 -0500 Subject: Fix CSS validation error Signed-off-by: Dan McGee --- media/archweb.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/archweb.css b/media/archweb.css index 92487d93..865a4f0b 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -78,7 +78,7 @@ select[multiple] { } input[type=submit] { - padding-left: 0 0.6em; + padding-left: 0.6em; } .clear { -- cgit v1.2.3-54-g00ecf From c9c3dffdec1afa8ce4ff8d26113ff871a25d224d Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 19 Aug 2011 18:32:17 -0500 Subject: Ensure ampersands are properly escaped in hrefs This was pointed out by the W3C validator. Signed-off-by: Dan McGee --- packages/templatetags/package_extras.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index e4c7a010..42001aa5 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -9,6 +9,10 @@ register = template.Library() +def link_encode(url, query, doseq=False): + data = urlencode(query, doseq).replace('&', '&') + return "%s?%s" % (url, data) + class BuildQueryStringNode(template.Node): def __init__(self, sortfield): self.sortfield = sortfield @@ -22,7 +26,7 @@ def render(self, context): qs['sort'] = ['-' + self.sortfield] else: qs['sort'] = [self.sortfield] - return urlencode(qs, True) + return urlencode(qs, True).replace('&', '&') @register.tag(name='buildsortqs') def do_buildsortqs(parser, token): @@ -74,27 +78,29 @@ def svn_trunk(package): @register.simple_tag def get_wiki_link(package): + url = "https://wiki.archlinux.org/index.php/Special:Search" data = { 'search': package.pkgname, } - return "https://wiki.archlinux.org/index.php/Special:Search?%s" % \ - urlencode(data) + return link_encode(url, data) @register.simple_tag def bugs_list(package): + url = "https://bugs.archlinux.org/" data = { 'project': package.repo.bugs_project, 'string': package.pkgname, } - return "https://bugs.archlinux.org/?%s" % urlencode(data) + return link_encode(url, data) @register.simple_tag def bug_report(package): + url = "https://bugs.archlinux.org/newtask" data = { 'project': package.repo.bugs_project, 'product_category': package.repo.bugs_category, 'item_summary': '[%s]' % package.pkgname, } - return "https://bugs.archlinux.org/newtask?%s" % urlencode(data) + return link_encode(url, data) # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 60738969375fc72ff254517107ce53e14b068e24 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 19 Aug 2011 18:36:47 -0500 Subject: Fix template comment guard Signed-off-by: Dan McGee --- templates/packages/details.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index 2f1031a6..d09d8ada 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -142,8 +142,8 @@

    Versions Elsewhere

    {% comment %} Signed: {{ pkg.is_signed|yesno|capfirst }} - {% endcomment %} + {% endcomment %} Build Date: {{ pkg.build_date|date:"DATETIME_FORMAT" }} UTC -- cgit v1.2.3-54-g00ecf From c5b370f432215eb69dabd6bcb911b6429b16447a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 22 Aug 2011 16:42:06 -0500 Subject: Admin setup tweaks Signed-off-by: Dan McGee --- mirrors/admin.py | 3 ++- packages/admin.py | 3 ++- releng/admin.py | 4 ++-- releng/models.py | 6 +++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/mirrors/admin.py b/mirrors/admin.py index b7b478de..0632872d 100644 --- a/mirrors/admin.py +++ b/mirrors/admin.py @@ -59,7 +59,8 @@ class Meta: class MirrorAdmin(admin.ModelAdmin): form = MirrorAdminForm - list_display = ('name', 'tier', 'country', 'active', 'public', 'isos', 'admin_email', 'supported_protocols') + list_display = ('name', 'tier', 'country', 'active', 'public', + 'isos', 'admin_email') list_filter = ('tier', 'active', 'public', 'country') search_fields = ('name',) inlines = [ diff --git a/packages/admin.py b/packages/admin.py index 3ecfdbb1..01b6ed6c 100644 --- a/packages/admin.py +++ b/packages/admin.py @@ -3,8 +3,9 @@ from .models import PackageRelation class PackageRelationAdmin(admin.ModelAdmin): - list_display = ('user', 'pkgbase', 'type') + list_display = ('user', 'pkgbase', 'type', 'created') list_filter = ('type', 'user') + search_fields = ('user__username', 'pkgbase') admin.site.register(PackageRelation, PackageRelationAdmin) diff --git a/releng/admin.py b/releng/admin.py index be5e211f..e1411b84 100644 --- a/releng/admin.py +++ b/releng/admin.py @@ -5,8 +5,8 @@ Test) class IsoAdmin(admin.ModelAdmin): - list_display = ('name', 'created', 'active') - list_filter = ('active',) + list_display = ('name', 'created', 'active', 'removed') + list_filter = ('active', 'created') class TestAdmin(admin.ModelAdmin): list_display = ('user_name', 'user_email', 'created', 'ip_address', diff --git a/releng/models.py b/releng/models.py index a958288b..56187766 100644 --- a/releng/models.py +++ b/releng/models.py @@ -29,11 +29,15 @@ def get_absolute_url(self): def __unicode__(self): return self.name + class Meta: + verbose_name = 'ISO' + class Architecture(IsoOption): pass class IsoType(IsoOption): - pass + class Meta: + verbose_name = 'ISO type' class BootType(IsoOption): pass -- cgit v1.2.3-54-g00ecf From 6b34f60ba641c124055d8ae3e9c4cf425ef540c9 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 25 Aug 2011 11:10:06 -0500 Subject: Ensure package depends/required by div clears top details pane Signed-off-by: Dan McGee --- media/archweb.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/media/archweb.css b/media/archweb.css index 865a4f0b..0efd53db 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -739,6 +739,10 @@ form#flag-pkg-form input[type=text] { } /* pkgdetails: deps, required by and file lists */ +#pkgdetails #metadata { + clear: both; +} + #pkgdetails #metadata h3 { background: #555; color: #fff; -- cgit v1.2.3-54-g00ecf From e4b75dc124a53090b429bafbc3621c0e0a0f6247 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 25 Aug 2011 11:10:56 -0500 Subject: Enable display of package signed status Signed-off-by: Dan McGee --- templates/packages/details.html | 2 -- 1 file changed, 2 deletions(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index d09d8ada..afbf9103 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -139,11 +139,9 @@

    Versions Elsewhere

    Last Packager: {% with pkg.packager as pkgr %}{% if pkgr %}{% userpkgs pkgr %}{% else %}{{ pkg.packager_str }}{% endif %}{% endwith %} - {% comment %} Signed: {{ pkg.is_signed|yesno|capfirst }} - {% endcomment %} Build Date: {{ pkg.build_date|date:"DATETIME_FORMAT" }} UTC -- cgit v1.2.3-54-g00ecf From 2ed3676f61af821e71a5c070e65419cd60906cb8 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 23 Aug 2011 17:17:30 -0500 Subject: Escape parameter to search AUR link Addresses FS#25732. Signed-off-by: Dan McGee --- templates/packages/search.html | 2 +- templates/public/index.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/packages/search.html b/templates/packages/search.html index 8a357024..4744aa88 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -156,7 +156,7 @@

    Package Search

    We couldn't find any packages matching your query. Try searching again using different criteria, or try {% if search_form.q.data %} - searching the AUR + searching the AUR {% else %}searching the AUR{% endif %} to see if the package can be found there.

    diff --git a/templates/public/index.html b/templates/public/index.html index c8b6def0..bea19e0f 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -20,7 +20,7 @@

    A simple, lightweight distribution

    Currently we have official packages optimized for the i686 and x86-64 architectures. We complement our official package sets with a - + community-operated package repository that grows in size and quality each and every day.

    -- cgit v1.2.3-54-g00ecf From 25a15d4c570823c6e28693d68d57b803dc2673fa Mon Sep 17 00:00:00 2001 From: Evangelos Foutras Date: Thu, 1 Sep 2011 19:56:05 +0300 Subject: Use package branches to display commit history We now have one link pointing to the tree of /trunk, and another pointing to the log of /trunk. Both links specify a package branch. Signed-off-by: Evangelos Foutras Signed-off-by: Dan McGee --- packages/templatetags/package_extras.py | 19 ++++++------------- templates/packages/details.html | 6 ++++-- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 42001aa5..01bf7510 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -61,20 +61,13 @@ def userpkgs(user): ) return '' -def svn_link(package, svnpath): - '''Helper function for the two real SVN link methods.''' - parts = (package.repo.svn_root, package.pkgbase, svnpath) - linkbase = "http://projects.archlinux.org/svntogit/%s.git/tree/%s/%s/" - return linkbase % tuple(urlquote(part) for part in parts) - @register.simple_tag -def svn_arch(package): - repo = package.repo.name.lower() - return svn_link(package, "repos/%s-%s" % (repo, package.arch.name)) - -@register.simple_tag -def svn_trunk(package): - return svn_link(package, "trunk") +def scm_link(package, operation): + parts = (package.repo.svn_root, operation, package.pkgbase) + linkbase = ( + "http://projects.archlinux.org/svntogit/%s.git/%s/trunk?" + "h=packages/%s") + return linkbase % tuple(urlquote(part) for part in parts) @register.simple_tag def get_wiki_link(package): diff --git a/templates/packages/details.html b/templates/packages/details.html index afbf9103..fa8283ed 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -16,8 +16,10 @@

    Package Details: {{ pkg.pkgname }} {{ pkg.full_version }}

    Package Actions

      -
    • SVN Entries ({{pkg.repo|lower}}-{{pkg.arch}})
    • -
    • SVN Entries (trunk)
    • +
    • + Source Files / + View Changes +
    • Search Wiki
    • Bug Reports
    • Report a Bug
    • -- cgit v1.2.3-54-g00ecf From 6aaa9119c55503f382cecbef72532db87a87c782 Mon Sep 17 00:00:00 2001 From: Evangelos Foutras Date: Thu, 1 Sep 2011 19:56:06 +0300 Subject: Compact bug report links in "Package Actions" box Using the same style as the SCM links. Signed-off-by: Evangelos Foutras Signed-off-by: Dan McGee --- templates/packages/details.html | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index fa8283ed..76d3ce86 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -20,9 +20,11 @@

      Package Actions

      Source Files / View Changes +
    • + Bug Reports / + Add New Bug +
    • Search Wiki
    • -
    • Bug Reports
    • -
    • Report a Bug
    • {% if pkg.flag_date %}
    • Flagged out-of-date on {{ pkg.flag_date|date }}
    • {% with pkg.in_testing as tp %}{% if tp %} -- cgit v1.2.3-54-g00ecf From b283e5ff83efcc8e8b160d8e8084ff25d14c3ad3 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 2 Sep 2011 10:51:13 -0500 Subject: Correctly fix archweb.css style error Commit 1908b3c460 attempted to fix a validation error; we really wanted to change the left hand side rather than the right hand side. Signed-off-by: Dan McGee --- media/archweb.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/media/archweb.css b/media/archweb.css index 0efd53db..0cadd7a7 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -78,7 +78,7 @@ select[multiple] { } input[type=submit] { - padding-left: 0.6em; + padding: 0 0.6em; } .clear { -- cgit v1.2.3-54-g00ecf From f173a03857e92268412196cd8e7c0f5d27fb6a38 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 8 Sep 2011 14:09:45 -0500 Subject: Show provides in package details view Signed-off-by: Dan McGee --- templates/packages/details.html | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index 76d3ce86..8be408fb 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -123,7 +123,16 @@

      Versions Elsewhere

      {% else %}None{% endif %} {% endwith %} - + + {% with pkg.provides.all as provides %} + {% if provides %} + + Provides: + {% for p in provides %}{{ p.name }}{% if p.version %}={{ p.version }}{% endif %}
      {% endfor %} + + {% endif %} + {% endwith %} + Maintainers: {% with pkg.maintainers as maints %} {% if maints %} -- cgit v1.2.3-54-g00ecf From 293e42fc55ca0ee8f67d5bdcb3d32fd0ec93786e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 8 Sep 2011 14:10:13 -0500 Subject: List packages in required by list with depend provided by this package Implements FS#25862. Most noticable on package like util-linux (util-linux-ng) and openjdk6 (java-runtime, java-environment). Signed-off-by: Dan McGee --- main/models.py | 4 +++- templates/packages/details.html | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/main/models.py b/main/models.py index e0681abf..1efca78d 100644 --- a/main/models.py +++ b/main/models.py @@ -234,9 +234,11 @@ def get_requiredby(self): list slim by including the corresponding package in the same testing category as this package if that check makes sense. """ + provides = set(self.provides.values_list('name', flat=True)) + provides.add(self.pkgname) requiredby = PackageDepend.objects.select_related('pkg', 'pkg__arch', 'pkg__repo').filter( - depname=self.pkgname).order_by( + depname__in=provides).order_by( 'pkg__pkgname', 'pkg__arch__name', 'pkg__repo__name') if not self.arch.agnostic: # make sure we match architectures if possible diff --git a/templates/packages/details.html b/templates/packages/details.html index 8be408fb..1016b43a 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -203,6 +203,7 @@

        {% for req in rqdby %}
      • {% pkg_details_link req.pkg %} + {% if req.depname != pkg.pkgname %}(requires {{ req.depname }}){% endif %} {% if req.pkg.repo.testing %}(testing){% endif %} {% if req.optional %}(optional){% endif %}
      • -- cgit v1.2.3-54-g00ecf From 2cc3385aad120b9e9e0d3271b417c6764751c2a6 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sat, 10 Sep 2011 07:08:16 -0500 Subject: Use HTTPS by default in email URLs Signed-off-by: Dan McGee --- main/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main/models.py b/main/models.py index 1efca78d..b5cd8638 100644 --- a/main/models.py +++ b/main/models.py @@ -196,7 +196,7 @@ def get_absolute_url(self): return '/packages/%s/%s/%s/' % (self.repo.name.lower(), self.arch.name, self.pkgname) - def get_full_url(self, proto='http'): + def get_full_url(self, proto='https'): '''get a URL suitable for things like email including the domain''' domain = Site.objects.get_current().domain return '%s://%s%s' % (proto, domain, self.get_absolute_url()) -- cgit v1.2.3-54-g00ecf From 617550628b39f94e6cb11ec032eb18e6195bf64a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Sun, 11 Sep 2011 15:48:09 -0500 Subject: Bump Django requirement to 1.3.1 Signed-off-by: Dan McGee --- requirements.txt | 2 +- requirements_prod.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9be5d88e..27fda229 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.3 +Django==1.3.1 Markdown==2.0.3 South==0.7.3 pytz>=2011c diff --git a/requirements_prod.txt b/requirements_prod.txt index babf65f9..9749a072 100644 --- a/requirements_prod.txt +++ b/requirements_prod.txt @@ -1,4 +1,4 @@ -Django==1.3 +Django==1.3.1 Markdown==2.0.3 MySQL-python==1.2.3 South==0.7.3 -- cgit v1.2.3-54-g00ecf From b893682f356ca2861d676a51c4ae1c937d4c7c44 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 14 Sep 2011 14:37:13 -0400 Subject: Ensure we have a mirror URL to return If our query returned zero results, then try a slightly less exclusive query followed by returning a 404 result. Signed-off-by: Dan McGee --- packages/views.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/packages/views.py b/packages/views.py index 8c9c1b18..79793355 100644 --- a/packages/views.py +++ b/packages/views.py @@ -548,15 +548,21 @@ def flag_confirmed(request, name, repo, arch): def download(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) - mirrorurl = MirrorUrl.objects.filter(mirror__country='Any', + mirror_urls = MirrorUrl.objects.filter( mirror__public=True, mirror__active=True, - protocol__protocol__iexact='HTTP')[0] + protocol__protocol__iexact='HTTP') + # look first for an 'Any' URL, then fall back to any HTTP URL + filtered_urls = mirror_urls.filter(mirror__country='Any')[:1] + if not filtered_urls: + filtered_urls = mirror_urls[:1] + if not filtered_urls: + raise Http404 arch = pkg.arch.name if pkg.arch.agnostic: # grab the first non-any arch to fake the download path arch = Arch.objects.exclude(agnostic=True)[0].name values = { - 'host': mirrorurl.url, + 'host': filtered_urls[0].url, 'arch': arch, 'repo': pkg.repo.name.lower(), 'file': pkg.filename, -- cgit v1.2.3-54-g00ecf From 797185faed0555efb88a1e6a18e447548a9935fd Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Mon, 19 Sep 2011 08:44:22 -0500 Subject: Add some dev dashboard info regarding signed package count This adds a column similar to the flagged package count for the number of signed packages in a given architecture or repository. It is up to the user to do some simple math to figure out the number of unsigned packages. Also, add 'signed' as a hidden search field option similar to what we did for packager. Signed-off-by: Dan McGee --- main/models.py | 4 ++++ packages/views.py | 8 ++++++++ templates/devel/index.html | 5 +++++ 3 files changed, 17 insertions(+) diff --git a/main/models.py b/main/models.py index b5cd8638..ec1f0d2c 100644 --- a/main/models.py +++ b/main/models.py @@ -98,6 +98,10 @@ def flagged(self): """Used by dev dashboard.""" return self.filter(flag_date__isnull=False) + def signed(self): + """Used by dev dashboard.""" + return self.filter(pgp_signature__isnull=False) + def normal(self): return self.select_related('arch', 'repo') diff --git a/packages/views.py b/packages/views.py index 79793355..61e4d290 100644 --- a/packages/views.py +++ b/packages/views.py @@ -199,6 +199,9 @@ class PackageSearchForm(forms.Form): flagged = forms.ChoiceField( choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']), required=False) + signed = forms.ChoiceField( + choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']), + required=False) limit = LimitTypedChoiceField( choices=make_choice([50, 100, 250]) + [('all', 'All')], coerce=coerce_limit_value, @@ -254,6 +257,11 @@ def search(request, page=None): elif form.cleaned_data['flagged'] == 'Not Flagged': packages = packages.filter(flag_date__isnull=True) + if form.cleaned_data['signed'] == 'Signed': + packages = packages.filter(pgp_signature__isnull=False) + elif form.cleaned_data['signed'] == 'Unsigned': + packages = packages.filter(pgp_signature__isnull=True) + if form.cleaned_data['q']: query = form.cleaned_data['q'] q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) diff --git a/templates/devel/index.html b/templates/devel/index.html index 0c818d36..b2da70cf 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -135,6 +135,7 @@

        Stats by Architecture

        Arch # Packages # Flagged + # Signed @@ -147,6 +148,8 @@

        Stats by Architecture

        {{ arch.packages.flagged.count }} packages + + {{ arch.packages.signed.count }} packages {% endfor %} @@ -165,6 +168,7 @@

        Stats by Repository

        Repository # Packages # Flagged + # Signed @@ -177,6 +181,7 @@

        Stats by Repository

        {{ repo.packages.flagged.count }} packages + {{ repo.packages.signed.count }} packages {% endfor %} -- cgit v1.2.3-54-g00ecf From 447c4c4bd56b6364a68068ccf691d048e6c2d65b Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 29 Sep 2011 21:50:58 -0500 Subject: JSLint suggested cleanups Signed-off-by: Dan McGee --- media/archweb.js | 49 +++++++++++++++++++++++++++++++++++-------------- 1 file changed, 35 insertions(+), 14 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index 49f2a319..52f3cba2 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -6,7 +6,7 @@ if (typeof $.tablesorter !== 'undefined') { is: function(s) { return false; }, format: function(s) { var m = s.match(/\d+/); - return m ? parseInt(m[0]) : 0; + return m ? parseInt(m[0], 10) : 0; }, type: 'numeric' }); @@ -27,7 +27,9 @@ if (typeof $.tablesorter !== 'undefined') { return ($.inArray(s, this.special) > -1) || $.tablesorter.isDigit(s, c); }, format: function(s) { - if ($.inArray(s, this.special) > -1) return Number.MAX_VALUE; + if ($.inArray(s, this.special) > -1) { + return Number.MAX_VALUE; + } return $.tablesorter.formatFloat(s); }, type: 'numeric' @@ -41,9 +43,13 @@ if (typeof $.tablesorter !== 'undefined') { return ($.inArray(s, this.special) > -1) || this.re.test(s); }, format: function(s) { - if ($.inArray(s, this.special) > -1) return Number.MAX_VALUE; + if ($.inArray(s, this.special) > -1) { + return Number.MAX_VALUE; + } var matches = this.re.exec(s); - if (!matches) return Number.MAX_VALUE; + if (!matches) { + return Number.MAX_VALUE; + } return matches[1] * 60 + matches[2]; }, type: 'numeric' @@ -56,9 +62,13 @@ if (typeof $.tablesorter !== 'undefined') { }, format: function(s) { var matches = this.re.exec(s); - if (!matches) return 0; + if (!matches) { + return 0; + } /* skip group 6, group 7 is optional seconds */ - if (matches[7] == undefined) matches[7] = 0; + if (matches[7] === undefined) { + matches[7] = 0; + } /* The awesomeness of the JS date constructor. Month needs to be * between 0-11, because things have to be difficult. */ var date = new Date(matches[1], matches[2] - 1, matches[3], @@ -75,7 +85,9 @@ if (typeof $.tablesorter !== 'undefined') { }, format: function(s) { var matches = this.re.exec(s); - if (!matches) return 0; + if (!matches) { + return 0; + } var size = parseFloat(matches[1]); var suffix = matches[2]; @@ -146,16 +158,24 @@ function filter_packages() { // all this just to get the split version out of the table cell var ver_a = cells.eq(2).find('span').text().match(pat); - if (!ver_a) return true; + if (!ver_a) { + return true; + } var ver_b = cells.eq(3).find('span').text().match(pat); - if (!ver_b) return true; + if (!ver_b) { + return true; + } // first check pkgver - if (ver_a[1] !== ver_b[1]) return true; + if (ver_a[1] !== ver_b[1]) { + return true; + } // pkgver matched, so see if rounded pkgrel matches - if (Math.floor(parseFloat(ver_a[2])) == - Math.floor(parseFloat(ver_b[2]))) return false; + if (Math.floor(parseFloat(ver_a[2])) === + Math.floor(parseFloat(ver_b[2]))) { + return false; + } // pkgrel didn't match, so keep the row return true; }); @@ -199,12 +219,13 @@ function signoff_package() { $(link).append(signoff); } /* update the approved column to reflect reality */ + var approved; if (data.approved) { - var approved = $(link).closest('tr').children('.signoff-no'); + approved = $(link).closest('tr').children('.signoff-no'); approved.text('Yes').addClass( 'signoff-yes').removeClass('signoff-no'); } else { - var approved = $(link).closest('tr').children('.signoff-yes'); + approved = $(link).closest('tr').children('.signoff-yes'); approved.text('No').addClass( 'signoff-no').removeClass('signoff-yes'); } -- cgit v1.2.3-54-g00ecf From 94b4a6a3a322045e16429fd6d61e8cd051b21dd1 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 29 Sep 2011 22:01:44 -0500 Subject: Slight rework of package size sort function Signed-off-by: Dan McGee --- media/archweb.js | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index 52f3cba2..2414331d 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -92,20 +92,19 @@ if (typeof $.tablesorter !== 'undefined') { var suffix = matches[2]; switch(suffix) { - case 'byte': - case 'bytes': - return size; - case 'KB': - return size * 1024; - case 'MB': - return size * 1024 * 1024; - case 'GB': - return size * 1024 * 1024 * 1024; - case 'TB': - return size * 1024 * 1024 * 1024 * 1024; + /* intentional fall-through at each level */ case 'PB': - return size * 1024 * 1024 * 1024 * 1024 * 1024; + size *= 1024; + case 'TB': + size *= 1024; + case 'GB': + size *= 1024; + case 'MB': + size *= 1024; + case 'KB': + size *= 1024; } + return size; }, type: 'numeric' }); -- cgit v1.2.3-54-g00ecf From 81884005d8496e12f9636e872eaedff33af77e30 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 5 Oct 2011 10:33:54 -0500 Subject: Allow developer index to work with a non-authenticated user This is not the normal case given the decorator on the view, but during testing and development it is sometimes useful so others don't have to log in over a non-secure connection to check things out. Signed-off-by: Dan McGee --- devel/views.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/devel/views.py b/devel/views.py index 79eef318..ebae3b32 100644 --- a/devel/views.py +++ b/devel/views.py @@ -31,7 +31,12 @@ @never_cache def index(request): '''the developer dashboard''' - inner_q = PackageRelation.objects.filter(user=request.user).values('pkgbase') + if(request.user.is_authenticated()): + inner_q = PackageRelation.objects.filter(user=request.user) + else: + inner_q = PackageRelation.objects.none() + inner_q = inner_q.values('pkgbase') + flagged = Package.objects.normal().filter( flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname') -- cgit v1.2.3-54-g00ecf From e157f942e96ba827aebf08dd253c866fec88beaa Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 5 Oct 2011 10:48:26 -0500 Subject: Add D3 JavaScript visualization library https://github.com/mbostock/d3 http://mbostock.github.com/d3/ Signed-off-by: Dan McGee --- media/d3.js | 4042 ++++++++++++++++++++++++++++++++++++++++++++++++ media/d3.layout.js | 1883 ++++++++++++++++++++++ media/d3.layout.min.js | 1 + media/d3.min.js | 2 + 4 files changed, 5928 insertions(+) create mode 100644 media/d3.js create mode 100644 media/d3.layout.js create mode 100644 media/d3.layout.min.js create mode 100644 media/d3.min.js diff --git a/media/d3.js b/media/d3.js new file mode 100644 index 00000000..9b77bec1 --- /dev/null +++ b/media/d3.js @@ -0,0 +1,4042 @@ +(function(){if (!Date.now) Date.now = function() { + return +new Date; +}; +try { + document.createElement("div").style.setProperty("opacity", 0, ""); +} catch (error) { + var d3_style_prototype = CSSStyleDeclaration.prototype, + d3_style_setProperty = d3_style_prototype.setProperty; + d3_style_prototype.setProperty = function(name, value, priority) { + d3_style_setProperty.call(this, name, value + "", priority); + }; +} +d3 = {version: "2.3.0"}; // semver +var d3_array = d3_arraySlice; // conversion for NodeLists + +function d3_arrayCopy(pseudoarray) { + var i = -1, n = pseudoarray.length, array = []; + while (++i < n) array.push(pseudoarray[i]); + return array; +} + +function d3_arraySlice(pseudoarray) { + return Array.prototype.slice.call(pseudoarray); +} + +try { + d3_array(document.documentElement.childNodes)[0].nodeType; +} catch(e) { + d3_array = d3_arrayCopy; +} + +var d3_arraySubclass = [].__proto__? + +// Until ECMAScript supports array subclassing, prototype injection works well. +function(array, prototype) { + array.__proto__ = prototype; +}: + +// And if your browser doesn't support __proto__, we'll use direct extension. +function(array, prototype) { + for (var property in prototype) array[property] = prototype[property]; +}; +function d3_this() { + return this; +} +d3.functor = function(v) { + return typeof v === "function" ? v : function() { return v; }; +}; +// A getter-setter method that preserves the appropriate `this` context. +d3.rebind = function(object, method) { + return function() { + var x = method.apply(object, arguments); + return arguments.length ? object : x; + }; +}; +d3.ascending = function(a, b) { + return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; +}; +d3.descending = function(a, b) { + return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; +}; +d3.min = function(array, f) { + var i = -1, + n = array.length, + a, + b; + if (arguments.length === 1) { + while (++i < n && ((a = array[i]) == null || a != a)) a = undefined; + while (++i < n) if ((b = array[i]) != null && a > b) a = b; + } else { + while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined; + while (++i < n) if ((b = f.call(array, array[i], i)) != null && a > b) a = b; + } + return a; +}; +d3.max = function(array, f) { + var i = -1, + n = array.length, + a, + b; + if (arguments.length === 1) { + while (++i < n && ((a = array[i]) == null || a != a)) a = undefined; + while (++i < n) if ((b = array[i]) != null && b > a) a = b; + } else { + while (++i < n && ((a = f.call(array, array[i], i)) == null || a != a)) a = undefined; + while (++i < n) if ((b = f.call(array, array[i], i)) != null && b > a) a = b; + } + return a; +}; +d3.sum = function(array, f) { + var s = 0, + n = array.length, + a, + i = -1; + + if (arguments.length === 1) { + while (++i < n) if (!isNaN(a = +array[i])) s += a; + } else { + while (++i < n) if (!isNaN(a = +f.call(array, array[i], i))) s += a; + } + + return s; +}; +// R-7 per +d3.quantile = function(values, p) { + var H = (values.length - 1) * p + 1, + h = Math.floor(H), + v = values[h - 1], + e = H - h; + return e ? v + e * (values[h] - v) : v; +}; +d3.zip = function() { + if (!(n = arguments.length)) return []; + for (var i = -1, m = d3.min(arguments, d3_zipLength), zips = new Array(m); ++i < m;) { + for (var j = -1, n, zip = zips[i] = new Array(n); ++j < n;) { + zip[j] = arguments[j][i]; + } + } + return zips; +}; + +function d3_zipLength(d) { + return d.length; +} +// Locate the insertion point for x in a to maintain sorted order. The +// arguments lo and hi may be used to specify a subset of the array which should +// be considered; by default the entire array is used. If x is already present +// in a, the insertion point will be before (to the left of) any existing +// entries. The return value is suitable for use as the first argument to +// `array.splice` assuming that a is already sorted. +// +// The returned insertion point i partitions the array a into two halves so that +// all v < x for v in a[lo:i] for the left side and all v >= x for v in a[i:hi] +// for the right side. +d3.bisectLeft = function(a, x, lo, hi) { + if (arguments.length < 3) lo = 0; + if (arguments.length < 4) hi = a.length; + while (lo < hi) { + var mid = (lo + hi) >> 1; + if (a[mid] < x) lo = mid + 1; + else hi = mid; + } + return lo; +}; + +// Similar to bisectLeft, but returns an insertion point which comes after (to +// the right of) any existing entries of x in a. +// +// The returned insertion point i partitions the array into two halves so that +// all v <= x for v in a[lo:i] for the left side and all v > x for v in a[i:hi] +// for the right side. +d3.bisect = +d3.bisectRight = function(a, x, lo, hi) { + if (arguments.length < 3) lo = 0; + if (arguments.length < 4) hi = a.length; + while (lo < hi) { + var mid = (lo + hi) >> 1; + if (x < a[mid]) hi = mid; + else lo = mid + 1; + } + return lo; +}; +d3.first = function(array, f) { + var i = 0, + n = array.length, + a = array[0], + b; + if (arguments.length === 1) f = d3.ascending; + while (++i < n) { + if (f.call(array, a, b = array[i]) > 0) { + a = b; + } + } + return a; +}; +d3.last = function(array, f) { + var i = 0, + n = array.length, + a = array[0], + b; + if (arguments.length === 1) f = d3.ascending; + while (++i < n) { + if (f.call(array, a, b = array[i]) <= 0) { + a = b; + } + } + return a; +}; +d3.nest = function() { + var nest = {}, + keys = [], + sortKeys = [], + sortValues, + rollup; + + function map(array, depth) { + if (depth >= keys.length) return rollup + ? rollup.call(nest, array) : (sortValues + ? array.sort(sortValues) + : array); + + var i = -1, + n = array.length, + key = keys[depth++], + keyValue, + object, + o = {}; + + while (++i < n) { + if ((keyValue = key(object = array[i])) in o) { + o[keyValue].push(object); + } else { + o[keyValue] = [object]; + } + } + + for (keyValue in o) { + o[keyValue] = map(o[keyValue], depth); + } + + return o; + } + + function entries(map, depth) { + if (depth >= keys.length) return map; + + var a = [], + sortKey = sortKeys[depth++], + key; + + for (key in map) { + a.push({key: key, values: entries(map[key], depth)}); + } + + if (sortKey) a.sort(function(a, b) { + return sortKey(a.key, b.key); + }); + + return a; + } + + nest.map = function(array) { + return map(array, 0); + }; + + nest.entries = function(array) { + return entries(map(array, 0), 0); + }; + + nest.key = function(d) { + keys.push(d); + return nest; + }; + + // Specifies the order for the most-recently specified key. + // Note: only applies to entries. Map keys are unordered! + nest.sortKeys = function(order) { + sortKeys[keys.length - 1] = order; + return nest; + }; + + // Specifies the order for leaf values. + // Applies to both maps and entries array. + nest.sortValues = function(order) { + sortValues = order; + return nest; + }; + + nest.rollup = function(f) { + rollup = f; + return nest; + }; + + return nest; +}; +d3.keys = function(map) { + var keys = []; + for (var key in map) keys.push(key); + return keys; +}; +d3.values = function(map) { + var values = []; + for (var key in map) values.push(map[key]); + return values; +}; +d3.entries = function(map) { + var entries = []; + for (var key in map) entries.push({key: key, value: map[key]}); + return entries; +}; +d3.permute = function(array, indexes) { + var permutes = [], + i = -1, + n = indexes.length; + while (++i < n) permutes[i] = array[indexes[i]]; + return permutes; +}; +d3.merge = function(arrays) { + return Array.prototype.concat.apply([], arrays); +}; +d3.split = function(array, f) { + var arrays = [], + values = [], + value, + i = -1, + n = array.length; + if (arguments.length < 2) f = d3_splitter; + while (++i < n) { + if (f.call(values, value = array[i], i)) { + values = []; + } else { + if (!values.length) arrays.push(values); + values.push(value); + } + } + return arrays; +}; + +function d3_splitter(d) { + return d == null; +} +function d3_collapse(s) { + return s.replace(/(^\s+)|(\s+$)/g, "").replace(/\s+/g, " "); +} +/** + * @param {number} start + * @param {number=} stop + * @param {number=} step + */ +d3.range = function(start, stop, step) { + if (arguments.length < 3) { + step = 1; + if (arguments.length < 2) { + stop = start; + start = 0; + } + } + if ((stop - start) / step == Infinity) throw new Error("infinite range"); + var range = [], + i = -1, + j; + if (step < 0) while ((j = start + step * ++i) > stop) range.push(j); + else while ((j = start + step * ++i) < stop) range.push(j); + return range; +}; +d3.requote = function(s) { + return s.replace(d3_requote_re, "\\$&"); +}; + +var d3_requote_re = /[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g; +d3.round = function(x, n) { + return n + ? Math.round(x * Math.pow(10, n)) * Math.pow(10, -n) + : Math.round(x); +}; +d3.xhr = function(url, mime, callback) { + var req = new XMLHttpRequest; + if (arguments.length < 3) callback = mime; + else if (mime && req.overrideMimeType) req.overrideMimeType(mime); + req.open("GET", url, true); + req.onreadystatechange = function() { + if (req.readyState === 4) callback(req.status < 300 ? req : null); + }; + req.send(null); +}; +d3.text = function(url, mime, callback) { + function ready(req) { + callback(req && req.responseText); + } + if (arguments.length < 3) { + callback = mime; + mime = null; + } + d3.xhr(url, mime, ready); +}; +d3.json = function(url, callback) { + d3.text(url, "application/json", function(text) { + callback(text ? JSON.parse(text) : null); + }); +}; +d3.html = function(url, callback) { + d3.text(url, "text/html", function(text) { + if (text != null) { // Treat empty string as valid HTML. + var range = document.createRange(); + range.selectNode(document.body); + text = range.createContextualFragment(text); + } + callback(text); + }); +}; +d3.xml = function(url, mime, callback) { + function ready(req) { + callback(req && req.responseXML); + } + if (arguments.length < 3) { + callback = mime; + mime = null; + } + d3.xhr(url, mime, ready); +}; +d3.ns = { + + prefix: { + svg: "http://www.w3.org/2000/svg", + xhtml: "http://www.w3.org/1999/xhtml", + xlink: "http://www.w3.org/1999/xlink", + xml: "http://www.w3.org/XML/1998/namespace", + xmlns: "http://www.w3.org/2000/xmlns/" + }, + + qualify: function(name) { + var i = name.indexOf(":"); + return i < 0 ? name : { + space: d3.ns.prefix[name.substring(0, i)], + local: name.substring(i + 1) + }; + } + +}; +/** @param {...string} types */ +d3.dispatch = function(types) { + var dispatch = {}, + type; + for (var i = 0, n = arguments.length; i < n; i++) { + type = arguments[i]; + dispatch[type] = d3_dispatch(type); + } + return dispatch; +}; + +function d3_dispatch(type) { + var dispatch = {}, + listeners = []; + + dispatch.add = function(listener) { + for (var i = 0; i < listeners.length; i++) { + if (listeners[i].listener == listener) return dispatch; // already registered + } + listeners.push({listener: listener, on: true}); + return dispatch; + }; + + dispatch.remove = function(listener) { + for (var i = 0; i < listeners.length; i++) { + var l = listeners[i]; + if (l.listener == listener) { + l.on = false; + listeners = listeners.slice(0, i).concat(listeners.slice(i + 1)); + break; + } + } + return dispatch; + }; + + dispatch.dispatch = function() { + var ls = listeners; // defensive reference + for (var i = 0, n = ls.length; i < n; i++) { + var l = ls[i]; + if (l.on) l.listener.apply(this, arguments); + } + }; + + return dispatch; +}; +// TODO align +d3.format = function(specifier) { + var match = d3_format_re.exec(specifier), + fill = match[1] || " ", + sign = match[3] || "", + zfill = match[5], + width = +match[6], + comma = match[7], + precision = match[8], + type = match[9], + percentage = false, + integer = false; + + if (precision) precision = precision.substring(1); + + if (zfill) { + fill = "0"; // TODO align = "="; + if (comma) width -= Math.floor((width - 1) / 4); + } + + switch (type) { + case "n": comma = true; type = "g"; break; + case "%": percentage = true; type = "f"; break; + case "p": percentage = true; type = "r"; break; + case "d": integer = true; precision = "0"; break; + } + + type = d3_format_types[type] || d3_format_typeDefault; + + return function(value) { + var number = percentage ? value * 100 : +value, + negative = (number < 0) && (number = -number) ? "\u2212" : sign; + + // Return the empty string for floats formatted as ints. + if (integer && (number % 1)) return ""; + + // Convert the input value to the desired precision. + value = type(number, precision); + + // If the fill character is 0, the sign and group is applied after the fill. + if (zfill) { + var length = value.length + negative.length; + if (length < width) value = new Array(width - length + 1).join(fill) + value; + if (comma) value = d3_format_group(value); + value = negative + value; + } + + // Otherwise (e.g., space-filling), the sign and group is applied before. + else { + if (comma) value = d3_format_group(value); + value = negative + value; + var length = value.length; + if (length < width) value = new Array(width - length + 1).join(fill) + value; + } + if (percentage) value += "%"; + + return value; + }; +}; + +// [[fill]align][sign][#][0][width][,][.precision][type] +var d3_format_re = /(?:([^{])?([<>=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/; + +var d3_format_types = { + g: function(x, p) { return x.toPrecision(p); }, + e: function(x, p) { return x.toExponential(p); }, + f: function(x, p) { return x.toFixed(p); }, + r: function(x, p) { + var n = x ? 1 + Math.floor(1e-15 + Math.log(x) / Math.LN10) : 1; + return d3.round(x, p - n).toFixed(Math.max(0, Math.min(20, p - n))); + } +}; + +function d3_format_typeDefault(x) { + return x + ""; +} + +// Apply comma grouping for thousands. +function d3_format_group(value) { + var i = value.lastIndexOf("."), + f = i >= 0 ? value.substring(i) : (i = value.length, ""), + t = []; + while (i > 0) t.push(value.substring(i -= 3, i + 3)); + return t.reverse().join(",") + f; +} +/* + * TERMS OF USE - EASING EQUATIONS + * + * Open source under the BSD License. + * + * Copyright 2001 Robert Penner + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * - Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * - Neither the name of the author nor the names of contributors may be used to + * endorse or promote products derived from this software without specific + * prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +var d3_ease_quad = d3_ease_poly(2), + d3_ease_cubic = d3_ease_poly(3); + +var d3_ease = { + linear: function() { return d3_ease_linear; }, + poly: d3_ease_poly, + quad: function() { return d3_ease_quad; }, + cubic: function() { return d3_ease_cubic; }, + sin: function() { return d3_ease_sin; }, + exp: function() { return d3_ease_exp; }, + circle: function() { return d3_ease_circle; }, + elastic: d3_ease_elastic, + back: d3_ease_back, + bounce: function() { return d3_ease_bounce; } +}; + +var d3_ease_mode = { + "in": function(f) { return f; }, + "out": d3_ease_reverse, + "in-out": d3_ease_reflect, + "out-in": function(f) { return d3_ease_reflect(d3_ease_reverse(f)); } +}; + +d3.ease = function(name) { + var i = name.indexOf("-"), + t = i >= 0 ? name.substring(0, i) : name, + m = i >= 0 ? name.substring(i + 1) : "in"; + return d3_ease_clamp(d3_ease_mode[m](d3_ease[t].apply(null, Array.prototype.slice.call(arguments, 1)))); +}; + +function d3_ease_clamp(f) { + return function(t) { + return t <= 0 ? 0 : t >= 1 ? 1 : f(t); + }; +} + +function d3_ease_reverse(f) { + return function(t) { + return 1 - f(1 - t); + }; +} + +function d3_ease_reflect(f) { + return function(t) { + return .5 * (t < .5 ? f(2 * t) : (2 - f(2 - 2 * t))); + }; +} + +function d3_ease_linear(t) { + return t; +} + +function d3_ease_poly(e) { + return function(t) { + return Math.pow(t, e); + } +} + +function d3_ease_sin(t) { + return 1 - Math.cos(t * Math.PI / 2); +} + +function d3_ease_exp(t) { + return Math.pow(2, 10 * (t - 1)); +} + +function d3_ease_circle(t) { + return 1 - Math.sqrt(1 - t * t); +} + +function d3_ease_elastic(a, p) { + var s; + if (arguments.length < 2) p = 0.45; + if (arguments.length < 1) { a = 1; s = p / 4; } + else s = p / (2 * Math.PI) * Math.asin(1 / a); + return function(t) { + return 1 + a * Math.pow(2, 10 * -t) * Math.sin((t - s) * 2 * Math.PI / p); + }; +} + +function d3_ease_back(s) { + if (!s) s = 1.70158; + return function(t) { + return t * t * ((s + 1) * t - s); + }; +} + +function d3_ease_bounce(t) { + return t < 1 / 2.75 ? 7.5625 * t * t + : t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + .75 + : t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + .9375 + : 7.5625 * (t -= 2.625 / 2.75) * t + .984375; +} +d3.event = null; +d3.interpolate = function(a, b) { + var i = d3.interpolators.length, f; + while (--i >= 0 && !(f = d3.interpolators[i](a, b))); + return f; +}; + +d3.interpolateNumber = function(a, b) { + b -= a; + return function(t) { return a + b * t; }; +}; + +d3.interpolateRound = function(a, b) { + b -= a; + return function(t) { return Math.round(a + b * t); }; +}; + +d3.interpolateString = function(a, b) { + var m, // current match + i, // current index + j, // current index (for coallescing) + s0 = 0, // start index of current string prefix + s1 = 0, // end index of current string prefix + s = [], // string constants and placeholders + q = [], // number interpolators + n, // q.length + o; + + // Reset our regular expression! + d3_interpolate_number.lastIndex = 0; + + // Find all numbers in b. + for (i = 0; m = d3_interpolate_number.exec(b); ++i) { + if (m.index) s.push(b.substring(s0, s1 = m.index)); + q.push({i: s.length, x: m[0]}); + s.push(null); + s0 = d3_interpolate_number.lastIndex; + } + if (s0 < b.length) s.push(b.substring(s0)); + + // Find all numbers in a. + for (i = 0, n = q.length; (m = d3_interpolate_number.exec(a)) && i < n; ++i) { + o = q[i]; + if (o.x == m[0]) { // The numbers match, so coallesce. + if (o.i) { + if (s[o.i + 1] == null) { // This match is followed by another number. + s[o.i - 1] += o.x; + s.splice(o.i, 1); + for (j = i + 1; j < n; ++j) q[j].i--; + } else { // This match is followed by a string, so coallesce twice. + s[o.i - 1] += o.x + s[o.i + 1]; + s.splice(o.i, 2); + for (j = i + 1; j < n; ++j) q[j].i -= 2; + } + } else { + if (s[o.i + 1] == null) { // This match is followed by another number. + s[o.i] = o.x; + } else { // This match is followed by a string, so coallesce twice. + s[o.i] = o.x + s[o.i + 1]; + s.splice(o.i + 1, 1); + for (j = i + 1; j < n; ++j) q[j].i--; + } + } + q.splice(i, 1); + n--; + i--; + } else { + o.x = d3.interpolateNumber(parseFloat(m[0]), parseFloat(o.x)); + } + } + + // Remove any numbers in b not found in a. + while (i < n) { + o = q.pop(); + if (s[o.i + 1] == null) { // This match is followed by another number. + s[o.i] = o.x; + } else { // This match is followed by a string, so coallesce twice. + s[o.i] = o.x + s[o.i + 1]; + s.splice(o.i + 1, 1); + } + n--; + } + + // Special optimization for only a single match. + if (s.length === 1) { + return s[0] == null ? q[0].x : function() { return b; }; + } + + // Otherwise, interpolate each of the numbers and rejoin the string. + return function(t) { + for (i = 0; i < n; ++i) s[(o = q[i]).i] = o.x(t); + return s.join(""); + }; +}; + +d3.interpolateRgb = function(a, b) { + a = d3.rgb(a); + b = d3.rgb(b); + var ar = a.r, + ag = a.g, + ab = a.b, + br = b.r - ar, + bg = b.g - ag, + bb = b.b - ab; + return function(t) { + return "rgb(" + Math.round(ar + br * t) + + "," + Math.round(ag + bg * t) + + "," + Math.round(ab + bb * t) + + ")"; + }; +}; + +// interpolates HSL space, but outputs RGB string (for compatibility) +d3.interpolateHsl = function(a, b) { + a = d3.hsl(a); + b = d3.hsl(b); + var h0 = a.h, + s0 = a.s, + l0 = a.l, + h1 = b.h - h0, + s1 = b.s - s0, + l1 = b.l - l0; + return function(t) { + return d3_hsl_rgb(h0 + h1 * t, s0 + s1 * t, l0 + l1 * t).toString(); + }; +}; + +d3.interpolateArray = function(a, b) { + var x = [], + c = [], + na = a.length, + nb = b.length, + n0 = Math.min(a.length, b.length), + i; + for (i = 0; i < n0; ++i) x.push(d3.interpolate(a[i], b[i])); + for (; i < na; ++i) c[i] = a[i]; + for (; i < nb; ++i) c[i] = b[i]; + return function(t) { + for (i = 0; i < n0; ++i) c[i] = x[i](t); + return c; + }; +}; + +d3.interpolateObject = function(a, b) { + var i = {}, + c = {}, + k; + for (k in a) { + if (k in b) { + i[k] = d3_interpolateByName(k)(a[k], b[k]); + } else { + c[k] = a[k]; + } + } + for (k in b) { + if (!(k in a)) { + c[k] = b[k]; + } + } + return function(t) { + for (k in i) c[k] = i[k](t); + return c; + }; +} + +var d3_interpolate_number = /[-+]?(?:\d+\.\d+|\d+\.|\.\d+|\d+)(?:[eE][-]?\d+)?/g, + d3_interpolate_rgb = {background: 1, fill: 1, stroke: 1}; + +function d3_interpolateByName(n) { + return n in d3_interpolate_rgb || /\bcolor\b/.test(n) + ? d3.interpolateRgb + : d3.interpolate; +} + +d3.interpolators = [ + d3.interpolateObject, + function(a, b) { return (b instanceof Array) && d3.interpolateArray(a, b); }, + function(a, b) { return (typeof b === "string") && d3.interpolateString(String(a), b); }, + function(a, b) { return (typeof b === "string" ? b in d3_rgb_names || /^(#|rgb\(|hsl\()/.test(b) : b instanceof d3_Rgb || b instanceof d3_Hsl) && d3.interpolateRgb(String(a), b); }, + function(a, b) { return (typeof b === "number") && d3.interpolateNumber(+a, b); } +]; +function d3_uninterpolateNumber(a, b) { + b = b - (a = +a) ? 1 / (b - a) : 0; + return function(x) { return (x - a) * b; }; +} + +function d3_uninterpolateClamp(a, b) { + b = b - (a = +a) ? 1 / (b - a) : 0; + return function(x) { return Math.max(0, Math.min(1, (x - a) * b)); }; +} +d3.rgb = function(r, g, b) { + return arguments.length === 1 + ? d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) + : d3_rgb(~~r, ~~g, ~~b); +}; + +function d3_rgb(r, g, b) { + return new d3_Rgb(r, g, b); +} + +function d3_Rgb(r, g, b) { + this.r = r; + this.g = g; + this.b = b; +} + +d3_Rgb.prototype.brighter = function(k) { + k = Math.pow(0.7, arguments.length ? k : 1); + var r = this.r, + g = this.g, + b = this.b, + i = 30; + if (!r && !g && !b) return d3_rgb(i, i, i); + if (r && r < i) r = i; + if (g && g < i) g = i; + if (b && b < i) b = i; + return d3_rgb( + Math.min(255, Math.floor(r / k)), + Math.min(255, Math.floor(g / k)), + Math.min(255, Math.floor(b / k))); +}; + +d3_Rgb.prototype.darker = function(k) { + k = Math.pow(0.7, arguments.length ? k : 1); + return d3_rgb( + Math.max(0, Math.floor(k * this.r)), + Math.max(0, Math.floor(k * this.g)), + Math.max(0, Math.floor(k * this.b))); +}; + +d3_Rgb.prototype.hsl = function() { + return d3_rgb_hsl(this.r, this.g, this.b); +}; + +d3_Rgb.prototype.toString = function() { + return "#" + d3_rgb_hex(this.r) + d3_rgb_hex(this.g) + d3_rgb_hex(this.b); +}; + +function d3_rgb_hex(v) { + return v < 0x10 ? "0" + v.toString(16) : v.toString(16); +} + +function d3_rgb_parse(format, rgb, hsl) { + var r = 0, // red channel; int in [0, 255] + g = 0, // green channel; int in [0, 255] + b = 0, // blue channel; int in [0, 255] + m1, // CSS color specification match + m2, // CSS color specification type (e.g., rgb) + name; + + /* Handle hsl, rgb. */ + m1 = /([a-z]+)\((.*)\)/i.exec(format); + if (m1) { + m2 = m1[2].split(","); + switch (m1[1]) { + case "hsl": { + return hsl( + parseFloat(m2[0]), // degrees + parseFloat(m2[1]) / 100, // percentage + parseFloat(m2[2]) / 100 // percentage + ); + } + case "rgb": { + return rgb( + d3_rgb_parseNumber(m2[0]), + d3_rgb_parseNumber(m2[1]), + d3_rgb_parseNumber(m2[2]) + ); + } + } + } + + /* Named colors. */ + if (name = d3_rgb_names[format]) return rgb(name.r, name.g, name.b); + + /* Hexadecimal colors: #rgb and #rrggbb. */ + if (format != null && format.charAt(0) === "#") { + if (format.length === 4) { + r = format.charAt(1); r += r; + g = format.charAt(2); g += g; + b = format.charAt(3); b += b; + } else if (format.length === 7) { + r = format.substring(1, 3); + g = format.substring(3, 5); + b = format.substring(5, 7); + } + r = parseInt(r, 16); + g = parseInt(g, 16); + b = parseInt(b, 16); + } + + return rgb(r, g, b); +} + +function d3_rgb_hsl(r, g, b) { + var min = Math.min(r /= 255, g /= 255, b /= 255), + max = Math.max(r, g, b), + d = max - min, + h, + s, + l = (max + min) / 2; + if (d) { + s = l < .5 ? d / (max + min) : d / (2 - max - min); + if (r == max) h = (g - b) / d + (g < b ? 6 : 0); + else if (g == max) h = (b - r) / d + 2; + else h = (r - g) / d + 4; + h *= 60; + } else { + s = h = 0; + } + return d3_hsl(h, s, l); +} + +function d3_rgb_parseNumber(c) { // either integer or percentage + var f = parseFloat(c); + return c.charAt(c.length - 1) === "%" ? Math.round(f * 2.55) : f; +} + +var d3_rgb_names = { + aliceblue: "#f0f8ff", + antiquewhite: "#faebd7", + aqua: "#00ffff", + aquamarine: "#7fffd4", + azure: "#f0ffff", + beige: "#f5f5dc", + bisque: "#ffe4c4", + black: "#000000", + blanchedalmond: "#ffebcd", + blue: "#0000ff", + blueviolet: "#8a2be2", + brown: "#a52a2a", + burlywood: "#deb887", + cadetblue: "#5f9ea0", + chartreuse: "#7fff00", + chocolate: "#d2691e", + coral: "#ff7f50", + cornflowerblue: "#6495ed", + cornsilk: "#fff8dc", + crimson: "#dc143c", + cyan: "#00ffff", + darkblue: "#00008b", + darkcyan: "#008b8b", + darkgoldenrod: "#b8860b", + darkgray: "#a9a9a9", + darkgreen: "#006400", + darkgrey: "#a9a9a9", + darkkhaki: "#bdb76b", + darkmagenta: "#8b008b", + darkolivegreen: "#556b2f", + darkorange: "#ff8c00", + darkorchid: "#9932cc", + darkred: "#8b0000", + darksalmon: "#e9967a", + darkseagreen: "#8fbc8f", + darkslateblue: "#483d8b", + darkslategray: "#2f4f4f", + darkslategrey: "#2f4f4f", + darkturquoise: "#00ced1", + darkviolet: "#9400d3", + deeppink: "#ff1493", + deepskyblue: "#00bfff", + dimgray: "#696969", + dimgrey: "#696969", + dodgerblue: "#1e90ff", + firebrick: "#b22222", + floralwhite: "#fffaf0", + forestgreen: "#228b22", + fuchsia: "#ff00ff", + gainsboro: "#dcdcdc", + ghostwhite: "#f8f8ff", + gold: "#ffd700", + goldenrod: "#daa520", + gray: "#808080", + green: "#008000", + greenyellow: "#adff2f", + grey: "#808080", + honeydew: "#f0fff0", + hotpink: "#ff69b4", + indianred: "#cd5c5c", + indigo: "#4b0082", + ivory: "#fffff0", + khaki: "#f0e68c", + lavender: "#e6e6fa", + lavenderblush: "#fff0f5", + lawngreen: "#7cfc00", + lemonchiffon: "#fffacd", + lightblue: "#add8e6", + lightcoral: "#f08080", + lightcyan: "#e0ffff", + lightgoldenrodyellow: "#fafad2", + lightgray: "#d3d3d3", + lightgreen: "#90ee90", + lightgrey: "#d3d3d3", + lightpink: "#ffb6c1", + lightsalmon: "#ffa07a", + lightseagreen: "#20b2aa", + lightskyblue: "#87cefa", + lightslategray: "#778899", + lightslategrey: "#778899", + lightsteelblue: "#b0c4de", + lightyellow: "#ffffe0", + lime: "#00ff00", + limegreen: "#32cd32", + linen: "#faf0e6", + magenta: "#ff00ff", + maroon: "#800000", + mediumaquamarine: "#66cdaa", + mediumblue: "#0000cd", + mediumorchid: "#ba55d3", + mediumpurple: "#9370db", + mediumseagreen: "#3cb371", + mediumslateblue: "#7b68ee", + mediumspringgreen: "#00fa9a", + mediumturquoise: "#48d1cc", + mediumvioletred: "#c71585", + midnightblue: "#191970", + mintcream: "#f5fffa", + mistyrose: "#ffe4e1", + moccasin: "#ffe4b5", + navajowhite: "#ffdead", + navy: "#000080", + oldlace: "#fdf5e6", + olive: "#808000", + olivedrab: "#6b8e23", + orange: "#ffa500", + orangered: "#ff4500", + orchid: "#da70d6", + palegoldenrod: "#eee8aa", + palegreen: "#98fb98", + paleturquoise: "#afeeee", + palevioletred: "#db7093", + papayawhip: "#ffefd5", + peachpuff: "#ffdab9", + peru: "#cd853f", + pink: "#ffc0cb", + plum: "#dda0dd", + powderblue: "#b0e0e6", + purple: "#800080", + red: "#ff0000", + rosybrown: "#bc8f8f", + royalblue: "#4169e1", + saddlebrown: "#8b4513", + salmon: "#fa8072", + sandybrown: "#f4a460", + seagreen: "#2e8b57", + seashell: "#fff5ee", + sienna: "#a0522d", + silver: "#c0c0c0", + skyblue: "#87ceeb", + slateblue: "#6a5acd", + slategray: "#708090", + slategrey: "#708090", + snow: "#fffafa", + springgreen: "#00ff7f", + steelblue: "#4682b4", + tan: "#d2b48c", + teal: "#008080", + thistle: "#d8bfd8", + tomato: "#ff6347", + turquoise: "#40e0d0", + violet: "#ee82ee", + wheat: "#f5deb3", + white: "#ffffff", + whitesmoke: "#f5f5f5", + yellow: "#ffff00", + yellowgreen: "#9acd32" +}; + +for (var d3_rgb_name in d3_rgb_names) { + d3_rgb_names[d3_rgb_name] = d3_rgb_parse( + d3_rgb_names[d3_rgb_name], + d3_rgb, + d3_hsl_rgb); +} +d3.hsl = function(h, s, l) { + return arguments.length === 1 + ? d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) + : d3_hsl(+h, +s, +l); +}; + +function d3_hsl(h, s, l) { + return new d3_Hsl(h, s, l); +} + +function d3_Hsl(h, s, l) { + this.h = h; + this.s = s; + this.l = l; +} + +d3_Hsl.prototype.brighter = function(k) { + k = Math.pow(0.7, arguments.length ? k : 1); + return d3_hsl(this.h, this.s, this.l / k); +}; + +d3_Hsl.prototype.darker = function(k) { + k = Math.pow(0.7, arguments.length ? k : 1); + return d3_hsl(this.h, this.s, k * this.l); +}; + +d3_Hsl.prototype.rgb = function() { + return d3_hsl_rgb(this.h, this.s, this.l); +}; + +d3_Hsl.prototype.toString = function() { + return "hsl(" + this.h + "," + this.s * 100 + "%," + this.l * 100 + "%)"; +}; + +function d3_hsl_rgb(h, s, l) { + var m1, + m2; + + /* Some simple corrections for h, s and l. */ + h = h % 360; if (h < 0) h += 360; + s = s < 0 ? 0 : s > 1 ? 1 : s; + l = l < 0 ? 0 : l > 1 ? 1 : l; + + /* From FvD 13.37, CSS Color Module Level 3 */ + m2 = l <= .5 ? l * (1 + s) : l + s - l * s; + m1 = 2 * l - m2; + + function v(h) { + if (h > 360) h -= 360; + else if (h < 0) h += 360; + if (h < 60) return m1 + (m2 - m1) * h / 60; + if (h < 180) return m2; + if (h < 240) return m1 + (m2 - m1) * (240 - h) / 60; + return m1; + } + + function vv(h) { + return Math.round(v(h) * 255); + } + + return d3_rgb(vv(h + 120), vv(h), vv(h - 120)); +} +function d3_selection(groups) { + d3_arraySubclass(groups, d3_selectionPrototype); + return groups; +} + +var d3_select = function(s, n) { return n.querySelector(s); }, + d3_selectAll = function(s, n) { return n.querySelectorAll(s); }; + +// Prefer Sizzle, if available. +if (typeof Sizzle === "function") { + d3_select = function(s, n) { return Sizzle(s, n)[0]; }; + d3_selectAll = function(s, n) { return Sizzle.uniqueSort(Sizzle(s, n)); }; +} + +var d3_selectionPrototype = []; + +d3.selection = function() { + return d3_selectionRoot; +}; + +d3.selection.prototype = d3_selectionPrototype; +d3_selectionPrototype.select = function(selector) { + var subgroups = [], + subgroup, + subnode, + group, + node; + + if (typeof selector !== "function") selector = d3_selection_selector(selector); + + for (var j = -1, m = this.length; ++j < m;) { + subgroups.push(subgroup = []); + subgroup.parentNode = (group = this[j]).parentNode; + for (var i = -1, n = group.length; ++i < n;) { + if (node = group[i]) { + subgroup.push(subnode = selector.call(node, node.__data__, i)); + if (subnode && "__data__" in node) subnode.__data__ = node.__data__; + } else { + subgroup.push(null); + } + } + } + + return d3_selection(subgroups); +}; + +function d3_selection_selector(selector) { + return function() { + return d3_select(selector, this); + }; +} +d3_selectionPrototype.selectAll = function(selector) { + var subgroups = [], + subgroup, + node; + + if (typeof selector !== "function") selector = d3_selection_selectorAll(selector); + + for (var j = -1, m = this.length; ++j < m;) { + for (var group = this[j], i = -1, n = group.length; ++i < n;) { + if (node = group[i]) { + subgroups.push(subgroup = d3_array(selector.call(node, node.__data__, i))); + subgroup.parentNode = node; + } + } + } + + return d3_selection(subgroups); +}; + +function d3_selection_selectorAll(selector) { + return function() { + return d3_selectAll(selector, this); + }; +} +d3_selectionPrototype.attr = function(name, value) { + name = d3.ns.qualify(name); + + // If no value is specified, return the first value. + if (arguments.length < 2) { + var node = this.node(); + return name.local + ? node.getAttributeNS(name.space, name.local) + : node.getAttribute(name); + } + + function attrNull() { + this.removeAttribute(name); + } + + function attrNullNS() { + this.removeAttributeNS(name.space, name.local); + } + + function attrConstant() { + this.setAttribute(name, value); + } + + function attrConstantNS() { + this.setAttributeNS(name.space, name.local, value); + } + + function attrFunction() { + var x = value.apply(this, arguments); + if (x == null) this.removeAttribute(name); + else this.setAttribute(name, x); + } + + function attrFunctionNS() { + var x = value.apply(this, arguments); + if (x == null) this.removeAttributeNS(name.space, name.local); + else this.setAttributeNS(name.space, name.local, x); + } + + return this.each(value == null + ? (name.local ? attrNullNS : attrNull) : (typeof value === "function" + ? (name.local ? attrFunctionNS : attrFunction) + : (name.local ? attrConstantNS : attrConstant))); +}; +d3_selectionPrototype.classed = function(name, value) { + var re = new RegExp("(^|\\s+)" + d3.requote(name) + "(\\s+|$)", "g"); + + // If no value is specified, return the first value. + if (arguments.length < 2) { + var node = this.node(); + if (c = node.classList) return c.contains(name); + var c = node.className; + re.lastIndex = 0; + return re.test(c.baseVal != null ? c.baseVal : c); + } + + function classedAdd() { + if (c = this.classList) return c.add(name); + var c = this.className, + cb = c.baseVal != null, + cv = cb ? c.baseVal : c; + re.lastIndex = 0; + if (!re.test(cv)) { + cv = d3_collapse(cv + " " + name); + if (cb) c.baseVal = cv; + else this.className = cv; + } + } + + function classedRemove() { + if (c = this.classList) return c.remove(name); + var c = this.className, + cb = c.baseVal != null, + cv = cb ? c.baseVal : c; + cv = d3_collapse(cv.replace(re, " ")); + if (cb) c.baseVal = cv; + else this.className = cv; + } + + function classedFunction() { + (value.apply(this, arguments) + ? classedAdd + : classedRemove).call(this); + } + + return this.each(typeof value === "function" + ? classedFunction : value + ? classedAdd + : classedRemove); +}; +d3_selectionPrototype.style = function(name, value, priority) { + if (arguments.length < 3) priority = ""; + + // If no value is specified, return the first value. + if (arguments.length < 2) return window + .getComputedStyle(this.node(), null) + .getPropertyValue(name); + + function styleNull() { + this.style.removeProperty(name); + } + + function styleConstant() { + this.style.setProperty(name, value, priority); + } + + function styleFunction() { + var x = value.apply(this, arguments); + if (x == null) this.style.removeProperty(name); + else this.style.setProperty(name, x, priority); + } + + return this.each(value == null + ? styleNull : (typeof value === "function" + ? styleFunction : styleConstant)); +}; +d3_selectionPrototype.property = function(name, value) { + + // If no value is specified, return the first value. + if (arguments.length < 2) return this.node()[name]; + + function propertyNull() { + delete this[name]; + } + + function propertyConstant() { + this[name] = value; + } + + function propertyFunction() { + var x = value.apply(this, arguments); + if (x == null) delete this[name]; + else this[name] = x; + } + + return this.each(value == null + ? propertyNull : (typeof value === "function" + ? propertyFunction : propertyConstant)); +}; +d3_selectionPrototype.text = function(value) { + return arguments.length < 1 ? this.node().textContent + : (this.each(typeof value === "function" + ? function() { this.textContent = value.apply(this, arguments); } + : function() { this.textContent = value; })); +}; +d3_selectionPrototype.html = function(value) { + return arguments.length < 1 ? this.node().innerHTML + : (this.each(typeof value === "function" + ? function() { this.innerHTML = value.apply(this, arguments); } + : function() { this.innerHTML = value; })); +}; +// TODO append(node)? +// TODO append(function)? +d3_selectionPrototype.append = function(name) { + name = d3.ns.qualify(name); + + function append() { + return this.appendChild(document.createElement(name)); + } + + function appendNS() { + return this.appendChild(document.createElementNS(name.space, name.local)); + } + + return this.select(name.local ? appendNS : append); +}; +// TODO insert(node, function)? +// TODO insert(function, string)? +// TODO insert(function, function)? +d3_selectionPrototype.insert = function(name, before) { + name = d3.ns.qualify(name); + + function insert() { + return this.insertBefore( + document.createElement(name), + d3_select(before, this)); + } + + function insertNS() { + return this.insertBefore( + document.createElementNS(name.space, name.local), + d3_select(before, this)); + } + + return this.select(name.local ? insertNS : insert); +}; +// TODO remove(selector)? +// TODO remove(node)? +// TODO remove(function)? +d3_selectionPrototype.remove = function() { + return this.each(function() { + var parent = this.parentNode; + if (parent) parent.removeChild(this); + }); +}; +// TODO data(null) for clearing data? +d3_selectionPrototype.data = function(data, join) { + var enter = [], + update = [], + exit = []; + + function bind(group, groupData) { + var i, + n = group.length, + m = groupData.length, + n0 = Math.min(n, m), + n1 = Math.max(n, m), + updateNodes = [], + enterNodes = [], + exitNodes = [], + node, + nodeData; + + if (join) { + var nodeByKey = {}, + keys = [], + key, + j = groupData.length; + + for (i = -1; ++i < n;) { + key = join.call(node = group[i], node.__data__, i); + if (key in nodeByKey) { + exitNodes[j++] = node; // duplicate key + } else { + nodeByKey[key] = node; + } + keys.push(key); + } + + for (i = -1; ++i < m;) { + node = nodeByKey[key = join.call(groupData, nodeData = groupData[i], i)]; + if (node) { + node.__data__ = nodeData; + updateNodes[i] = node; + enterNodes[i] = exitNodes[i] = null; + } else { + enterNodes[i] = d3_selection_dataNode(nodeData); + updateNodes[i] = exitNodes[i] = null; + } + delete nodeByKey[key]; + } + + for (i = -1; ++i < n;) { + if (keys[i] in nodeByKey) { + exitNodes[i] = group[i]; + } + } + } else { + for (i = -1; ++i < n0;) { + node = group[i]; + nodeData = groupData[i]; + if (node) { + node.__data__ = nodeData; + updateNodes[i] = node; + enterNodes[i] = exitNodes[i] = null; + } else { + enterNodes[i] = d3_selection_dataNode(nodeData); + updateNodes[i] = exitNodes[i] = null; + } + } + for (; i < m; ++i) { + enterNodes[i] = d3_selection_dataNode(groupData[i]); + updateNodes[i] = exitNodes[i] = null; + } + for (; i < n1; ++i) { + exitNodes[i] = group[i]; + enterNodes[i] = updateNodes[i] = null; + } + } + + enterNodes.update + = updateNodes; + + enterNodes.parentNode + = updateNodes.parentNode + = exitNodes.parentNode + = group.parentNode; + + enter.push(enterNodes); + update.push(updateNodes); + exit.push(exitNodes); + } + + var i = -1, + n = this.length, + group; + if (typeof data === "function") { + while (++i < n) { + bind(group = this[i], data.call(group, group.parentNode.__data__, i)); + } + } else { + while (++i < n) { + bind(group = this[i], data); + } + } + + var selection = d3_selection(update); + selection.enter = function() { return d3_selection_enter(enter); }; + selection.exit = function() { return d3_selection(exit); }; + return selection; +}; + +function d3_selection_dataNode(data) { + return {__data__: data}; +} +function d3_selection_enter(selection) { + d3_arraySubclass(selection, d3_selection_enterPrototype); + return selection; +} + +var d3_selection_enterPrototype = []; + +d3_selection_enterPrototype.append = d3_selectionPrototype.append; +d3_selection_enterPrototype.insert = d3_selectionPrototype.insert; +d3_selection_enterPrototype.empty = d3_selectionPrototype.empty; +d3_selection_enterPrototype.select = function(selector) { + var subgroups = [], + subgroup, + subnode, + upgroup, + group, + node; + + for (var j = -1, m = this.length; ++j < m;) { + upgroup = (group = this[j]).update; + subgroups.push(subgroup = []); + subgroup.parentNode = group.parentNode; + for (var i = -1, n = group.length; ++i < n;) { + if (node = group[i]) { + subgroup.push(upgroup[i] = subnode = selector.call(group.parentNode, node.__data__, i)); + subnode.__data__ = node.__data__; + } else { + subgroup.push(null); + } + } + } + + return d3_selection(subgroups); +}; +// TODO preserve null elements to maintain index? +d3_selectionPrototype.filter = function(filter) { + var subgroups = [], + subgroup, + group, + node; + + for (var j = 0, m = this.length; j < m; j++) { + subgroups.push(subgroup = []); + subgroup.parentNode = (group = this[j]).parentNode; + for (var i = 0, n = group.length; i < n; i++) { + if ((node = group[i]) && filter.call(node, node.__data__, i)) { + subgroup.push(node); + } + } + } + + return d3_selection(subgroups); +}; +d3_selectionPrototype.map = function(map) { + return this.each(function() { + this.__data__ = map.apply(this, arguments); + }); +}; +d3_selectionPrototype.sort = function(comparator) { + comparator = d3_selection_sortComparator.apply(this, arguments); + for (var j = 0, m = this.length; j < m; j++) { + for (var group = this[j].sort(comparator), i = 1, n = group.length, prev = group[0]; i < n; i++) { + var node = group[i]; + if (node) { + if (prev) prev.parentNode.insertBefore(node, prev.nextSibling); + prev = node; + } + } + } + return this; +}; + +function d3_selection_sortComparator(comparator) { + if (!arguments.length) comparator = d3.ascending; + return function(a, b) { + return comparator(a && a.__data__, b && b.__data__); + }; +} +// type can be namespaced, e.g., "click.foo" +// listener can be null for removal +d3_selectionPrototype.on = function(type, listener, capture) { + if (arguments.length < 3) capture = false; + + // parse the type specifier + var name = "__on" + type, i = type.indexOf("."); + if (i > 0) type = type.substring(0, i); + + // if called with only one argument, return the current listener + if (arguments.length < 2) return (i = this.node()[name]) && i._; + + // remove the old event listener, and add the new event listener + return this.each(function(d, i) { + var node = this; + + if (node[name]) node.removeEventListener(type, node[name], capture); + if (listener) node.addEventListener(type, node[name] = l, capture); + + // wrapped event listener that preserves i + function l(e) { + var o = d3.event; // Events can be reentrant (e.g., focus). + d3.event = e; + try { + listener.call(node, node.__data__, i); + } finally { + d3.event = o; + } + } + + // stash the unwrapped listener for retrieval + l._ = listener; + }); +}; +d3_selectionPrototype.each = function(callback) { + for (var j = -1, m = this.length; ++j < m;) { + for (var group = this[j], i = -1, n = group.length; ++i < n;) { + var node = group[i]; + if (node) callback.call(node, node.__data__, i, j); + } + } + return this; +}; +// +// Note: assigning to the arguments array simultaneously changes the value of +// the corresponding argument! +// +// TODO The `this` argument probably shouldn't be the first argument to the +// callback, anyway, since it's redundant. However, that will require a major +// version bump due to backwards compatibility, so I'm not changing it right +// away. +// +d3_selectionPrototype.call = function(callback) { + callback.apply(this, (arguments[0] = this, arguments)); + return this; +}; +d3_selectionPrototype.empty = function() { + return !this.node(); +}; +d3_selectionPrototype.node = function(callback) { + for (var j = 0, m = this.length; j < m; j++) { + for (var group = this[j], i = 0, n = group.length; i < n; i++) { + var node = group[i]; + if (node) return node; + } + } + return null; +}; +d3_selectionPrototype.transition = function() { + var subgroups = [], + subgroup, + node; + + for (var j = -1, m = this.length; ++j < m;) { + subgroups.push(subgroup = []); + for (var group = this[j], i = -1, n = group.length; ++i < n;) { + subgroup.push((node = group[i]) ? {node: node, delay: 0, duration: 250} : null); + } + } + + return d3_transition(subgroups, d3_transitionInheritId || ++d3_transitionId); +}; +var d3_selectionRoot = d3_selection([[document]]); + +d3_selectionRoot[0].parentNode = document.documentElement; + +// TODO fast singleton implementation! +// TODO select(function) +d3.select = function(selector) { + return typeof selector === "string" + ? d3_selectionRoot.select(selector) + : d3_selection([[selector]]); // assume node +}; + +// TODO selectAll(function) +d3.selectAll = function(selector) { + return typeof selector === "string" + ? d3_selectionRoot.selectAll(selector) + : d3_selection([d3_array(selector)]); // assume node[] +}; +function d3_transition(groups, id) { + d3_arraySubclass(groups, d3_transitionPrototype); + + var tweens = {}, + event = d3.dispatch("start", "end"), + ease = d3_transitionEase, + then = Date.now(); + + groups.id = id; + + groups.tween = function(name, tween) { + if (arguments.length < 2) return tweens[name]; + if (tween == null) delete tweens[name]; + else tweens[name] = tween; + return groups; + }; + + groups.ease = function(value) { + if (!arguments.length) return ease; + ease = typeof value === "function" ? value : d3.ease.apply(d3, arguments); + return groups; + }; + + groups.each = function(type, listener) { + if (arguments.length < 2) return d3_transition_each.call(groups, type); + event[type].add(listener); + return groups; + }; + + d3.timer(function(elapsed) { + groups.each(function(d, i, j) { + var tweened = [], + node = this, + delay = groups[j][i].delay, + duration = groups[j][i].duration, + lock = node.__transition__ || (node.__transition__ = {active: 0, count: 0}); + + ++lock.count; + + delay <= elapsed ? start(elapsed) : d3.timer(start, delay, then); + + function start(elapsed) { + if (lock.active > id) return stop(); + lock.active = id; + + for (var tween in tweens) { + if (tween = tweens[tween].call(node, d, i)) { + tweened.push(tween); + } + } + + event.start.dispatch.call(node, d, i); + if (!tick(elapsed)) d3.timer(tick, 0, then); + return 1; + } + + function tick(elapsed) { + if (lock.active !== id) return stop(); + + var t = (elapsed - delay) / duration, + e = ease(t), + n = tweened.length; + + while (n > 0) { + tweened[--n].call(node, e); + } + + if (t >= 1) { + stop(); + d3_transitionInheritId = id; + event.end.dispatch.call(node, d, i); + d3_transitionInheritId = 0; + return 1; + } + } + + function stop() { + if (!--lock.count) delete node.__transition__; + return 1; + } + }); + return 1; + }, 0, then); + + return groups; +} + +function d3_transitionTween(b) { + return typeof b === "function" + ? function(d, i, a) { var v = b.call(this, d, i) + ""; return a != v && d3.interpolate(a, v); } + : (b = b + "", function(d, i, a) { return a != b && d3.interpolate(a, b); }); +} + +var d3_transitionPrototype = [], + d3_transitionId = 0, + d3_transitionInheritId = 0, + d3_transitionEase = d3.ease("cubic-in-out"); + +d3_transitionPrototype.call = d3_selectionPrototype.call; + +d3.transition = function() { + return d3_selectionRoot.transition(); +}; + +d3.transition.prototype = d3_transitionPrototype; +d3_transitionPrototype.select = function(selector) { + var subgroups = [], + subgroup, + subnode, + node; + + if (typeof selector !== "function") selector = d3_selection_selector(selector); + + for (var j = -1, m = this.length; ++j < m;) { + subgroups.push(subgroup = []); + for (var group = this[j], i = -1, n = group.length; ++i < n;) { + if ((node = group[i]) && (subnode = selector.call(node.node, node.node.__data__, i))) { + if ("__data__" in node.node) subnode.__data__ = node.node.__data__; + subgroup.push({node: subnode, delay: node.delay, duration: node.duration}); + } else { + subgroup.push(null); + } + } + } + + return d3_transition(subgroups, this.id).ease(this.ease()); +}; +d3_transitionPrototype.selectAll = function(selector) { + var subgroups = [], + subgroup, + node; + + if (typeof selector !== "function") selector = d3_selection_selectorAll(selector); + + for (var j = -1, m = this.length; ++j < m;) { + for (var group = this[j], i = -1, n = group.length; ++i < n;) { + if (node = group[i]) { + subgroups.push(subgroup = selector.call(node.node, node.node.__data__, i)); + for (var k = -1, o = subgroup.length; ++k < o;) { + subgroup[k] = {node: subgroup[k], delay: node.delay, duration: node.duration}; + } + } + } + } + + return d3_transition(subgroups, this.id).ease(this.ease()); +}; +d3_transitionPrototype.attr = function(name, value) { + return this.attrTween(name, d3_transitionTween(value)); +}; + +d3_transitionPrototype.attrTween = function(name, tween) { + name = d3.ns.qualify(name); + + function attrTween(d, i) { + var f = tween.call(this, d, i, this.getAttribute(name)); + return f && function(t) { + this.setAttribute(name, f(t)); + }; + } + + function attrTweenNS(d, i) { + var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local)); + return f && function(t) { + this.setAttributeNS(name.space, name.local, f(t)); + }; + } + + return this.tween("attr." + name, name.local ? attrTweenNS : attrTween); +}; +d3_transitionPrototype.style = function(name, value, priority) { + if (arguments.length < 3) priority = ""; + return this.styleTween(name, d3_transitionTween(value), priority); +}; + +d3_transitionPrototype.styleTween = function(name, tween, priority) { + if (arguments.length < 3) priority = ""; + return this.tween("style." + name, function(d, i) { + var f = tween.call(this, d, i, window.getComputedStyle(this, null).getPropertyValue(name)); + return f && function(t) { + this.style.setProperty(name, f(t), priority); + }; + }); +}; +d3_transitionPrototype.text = function(value) { + return this.tween("text", function(d, i) { + this.textContent = typeof value === "function" + ? value.call(this, d, i) + : value; + }); +}; +d3_transitionPrototype.remove = function() { + return this.each("end", function() { + var p; + if (!this.__transition__ && (p = this.parentNode)) p.removeChild(this); + }); +}; +d3_transitionPrototype.delay = function(value) { + var groups = this; + return groups.each(typeof value === "function" + ? function(d, i, j) { groups[j][i].delay = +value.apply(this, arguments); } + : (value = +value, function(d, i, j) { groups[j][i].delay = value; })); +}; +d3_transitionPrototype.duration = function(value) { + var groups = this; + return groups.each(typeof value === "function" + ? function(d, i, j) { groups[j][i].duration = +value.apply(this, arguments); } + : (value = +value, function(d, i, j) { groups[j][i].duration = value; })); +}; +function d3_transition_each(callback) { + for (var j = 0, m = this.length; j < m; j++) { + for (var group = this[j], i = 0, n = group.length; i < n; i++) { + var node = group[i]; + if (node) callback.call(node = node.node, node.__data__, i, j); + } + } + return this; +} +d3_transitionPrototype.transition = function() { + return this.select(d3_this); +}; +var d3_timer_queue = null, + d3_timer_interval, // is an interval (or frame) active? + d3_timer_timeout; // is a timeout active? + +// The timer will continue to fire until callback returns true. +d3.timer = function(callback, delay, then) { + var found = false, + t0, + t1 = d3_timer_queue; + + if (arguments.length < 3) { + if (arguments.length < 2) delay = 0; + else if (!isFinite(delay)) return; + then = Date.now(); + } + + // See if the callback's already in the queue. + while (t1) { + if (t1.callback === callback) { + t1.then = then; + t1.delay = delay; + found = true; + break; + } + t0 = t1; + t1 = t1.next; + } + + // Otherwise, add the callback to the queue. + if (!found) d3_timer_queue = { + callback: callback, + then: then, + delay: delay, + next: d3_timer_queue + }; + + // Start animatin'! + if (!d3_timer_interval) { + d3_timer_timeout = clearTimeout(d3_timer_timeout); + d3_timer_interval = 1; + d3_timer_frame(d3_timer_step); + } +} + +function d3_timer_step() { + var elapsed, + now = Date.now(), + t1 = d3_timer_queue; + + while (t1) { + elapsed = now - t1.then; + if (elapsed >= t1.delay) t1.flush = t1.callback(elapsed); + t1 = t1.next; + } + + var delay = d3_timer_flush() - now; + if (delay > 24) { + if (isFinite(delay)) { + clearTimeout(d3_timer_timeout); + d3_timer_timeout = setTimeout(d3_timer_step, delay); + } + d3_timer_interval = 0; + } else { + d3_timer_interval = 1; + d3_timer_frame(d3_timer_step); + } +} + +d3.timer.flush = function() { + var elapsed, + now = Date.now(), + t1 = d3_timer_queue; + + while (t1) { + elapsed = now - t1.then; + if (!t1.delay) t1.flush = t1.callback(elapsed); + t1 = t1.next; + } + + d3_timer_flush(); +}; + +// Flush after callbacks, to avoid concurrent queue modification. +function d3_timer_flush() { + var t0 = null, + t1 = d3_timer_queue, + then = Infinity; + while (t1) { + if (t1.flush) { + t1 = t0 ? t0.next = t1.next : d3_timer_queue = t1.next; + } else { + then = Math.min(then, t1.then + t1.delay); + t1 = (t0 = t1).next; + } + } + return then; +} + +var d3_timer_frame = window.requestAnimationFrame + || window.webkitRequestAnimationFrame + || window.mozRequestAnimationFrame + || window.oRequestAnimationFrame + || window.msRequestAnimationFrame + || function(callback) { setTimeout(callback, 17); }; +function d3_noop() {} +d3.scale = {}; + +function d3_scaleExtent(domain) { + var start = domain[0], stop = domain[domain.length - 1]; + return start < stop ? [start, stop] : [stop, start]; +} +function d3_scale_nice(domain, nice) { + var i0 = 0, + i1 = domain.length - 1, + x0 = domain[i0], + x1 = domain[i1], + dx; + + if (x1 < x0) { + dx = i0; i0 = i1; i1 = dx; + dx = x0; x0 = x1; x1 = dx; + } + + nice = nice(x1 - x0); + domain[i0] = nice.floor(x0); + domain[i1] = nice.ceil(x1); + return domain; +} + +function d3_scale_niceDefault() { + return Math; +} +d3.scale.linear = function() { + return d3_scale_linear([0, 1], [0, 1], d3.interpolate, false); +}; + +function d3_scale_linear(domain, range, interpolate, clamp) { + var output, + input; + + function rescale() { + var linear = domain.length == 2 ? d3_scale_bilinear : d3_scale_polylinear, + uninterpolate = clamp ? d3_uninterpolateClamp : d3_uninterpolateNumber; + output = linear(domain, range, uninterpolate, interpolate); + input = linear(range, domain, uninterpolate, d3.interpolate); + return scale; + } + + function scale(x) { + return output(x); + } + + // Note: requires range is coercible to number! + scale.invert = function(y) { + return input(y); + }; + + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = x.map(Number); + return rescale(); + }; + + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + + scale.rangeRound = function(x) { + return scale.range(x).interpolate(d3.interpolateRound); + }; + + scale.clamp = function(x) { + if (!arguments.length) return clamp; + clamp = x; + return rescale(); + }; + + scale.interpolate = function(x) { + if (!arguments.length) return interpolate; + interpolate = x; + return rescale(); + }; + + scale.ticks = function(m) { + return d3_scale_linearTicks(domain, m); + }; + + scale.tickFormat = function(m) { + return d3_scale_linearTickFormat(domain, m); + }; + + scale.nice = function() { + d3_scale_nice(domain, d3_scale_linearNice); + return rescale(); + }; + + scale.copy = function() { + return d3_scale_linear(domain, range, interpolate, clamp); + }; + + return rescale(); +}; + +function d3_scale_linearRebind(scale, linear) { + scale.range = d3.rebind(scale, linear.range); + scale.rangeRound = d3.rebind(scale, linear.rangeRound); + scale.interpolate = d3.rebind(scale, linear.interpolate); + scale.clamp = d3.rebind(scale, linear.clamp); + return scale; +} + +function d3_scale_linearNice(dx) { + dx = Math.pow(10, Math.round(Math.log(dx) / Math.LN10) - 1); + return { + floor: function(x) { return Math.floor(x / dx) * dx; }, + ceil: function(x) { return Math.ceil(x / dx) * dx; } + }; +} + +// TODO Dates? Ugh. +function d3_scale_linearTickRange(domain, m) { + var extent = d3_scaleExtent(domain), + span = extent[1] - extent[0], + step = Math.pow(10, Math.floor(Math.log(span / m) / Math.LN10)), + err = m / span * step; + + // Filter ticks to get closer to the desired count. + if (err <= .15) step *= 10; + else if (err <= .35) step *= 5; + else if (err <= .75) step *= 2; + + // Round start and stop values to step interval. + extent[0] = Math.ceil(extent[0] / step) * step; + extent[1] = Math.floor(extent[1] / step) * step + step * .5; // inclusive + extent[2] = step; + return extent; +} + +function d3_scale_linearTicks(domain, m) { + return d3.range.apply(d3, d3_scale_linearTickRange(domain, m)); +} + +function d3_scale_linearTickFormat(domain, m) { + return d3.format(",." + Math.max(0, -Math.floor(Math.log(d3_scale_linearTickRange(domain, m)[2]) / Math.LN10 + .01)) + "f"); +} +function d3_scale_bilinear(domain, range, uninterpolate, interpolate) { + var u = uninterpolate(domain[0], domain[1]), + i = interpolate(range[0], range[1]); + return function(x) { + return i(u(x)); + }; +} +function d3_scale_polylinear(domain, range, uninterpolate, interpolate) { + var u = [], + i = [], + j = 0, + n = domain.length; + + while (++j < n) { + u.push(uninterpolate(domain[j - 1], domain[j])); + i.push(interpolate(range[j - 1], range[j])); + } + + return function(x) { + var j = d3.bisect(domain, x, 1, domain.length - 1) - 1; + return i[j](u[j](x)); + }; +} +d3.scale.log = function() { + return d3_scale_log(d3.scale.linear(), d3_scale_logp); +}; + +function d3_scale_log(linear, log) { + var pow = log.pow; + + function scale(x) { + return linear(log(x)); + } + + scale.invert = function(x) { + return pow(linear.invert(x)); + }; + + scale.domain = function(x) { + if (!arguments.length) return linear.domain().map(pow); + log = x[0] < 0 ? d3_scale_logn : d3_scale_logp; + pow = log.pow; + linear.domain(x.map(log)); + return scale; + }; + + scale.nice = function() { + linear.domain(d3_scale_nice(linear.domain(), d3_scale_niceDefault)); + return scale; + }; + + scale.ticks = function() { + var extent = d3_scaleExtent(linear.domain()), + ticks = []; + if (extent.every(isFinite)) { + var i = Math.floor(extent[0]), + j = Math.ceil(extent[1]), + u = Math.round(pow(extent[0])), + v = Math.round(pow(extent[1])); + if (log === d3_scale_logn) { + ticks.push(pow(i)); + for (; i++ < j;) for (var k = 9; k > 0; k--) ticks.push(pow(i) * k); + } else { + for (; i < j; i++) for (var k = 1; k < 10; k++) ticks.push(pow(i) * k); + ticks.push(pow(i)); + } + for (i = 0; ticks[i] < u; i++) {} // strip small values + for (j = ticks.length; ticks[j - 1] > v; j--) {} // strip big values + ticks = ticks.slice(i, j); + } + return ticks; + }; + + scale.tickFormat = function(n, format) { + if (arguments.length < 2) format = d3_scale_logFormat; + if (arguments.length < 1) return format; + var k = n / scale.ticks().length, + f = log === d3_scale_logn ? (e = -1e-15, Math.floor) : (e = 1e-15, Math.ceil), + e; + return function(d) { + return d / pow(f(log(d) + e)) < k ? format(d) : ""; + }; + }; + + scale.copy = function() { + return d3_scale_log(linear.copy(), log); + }; + + return d3_scale_linearRebind(scale, linear); +}; + +var d3_scale_logFormat = d3.format("e"); + +function d3_scale_logp(x) { + return Math.log(x) / Math.LN10; +} + +function d3_scale_logn(x) { + return -Math.log(-x) / Math.LN10; +} + +d3_scale_logp.pow = function(x) { + return Math.pow(10, x); +}; + +d3_scale_logn.pow = function(x) { + return -Math.pow(10, -x); +}; +d3.scale.pow = function() { + return d3_scale_pow(d3.scale.linear(), 1); +}; + +function d3_scale_pow(linear, exponent) { + var powp = d3_scale_powPow(exponent), + powb = d3_scale_powPow(1 / exponent); + + function scale(x) { + return linear(powp(x)); + } + + scale.invert = function(x) { + return powb(linear.invert(x)); + }; + + scale.domain = function(x) { + if (!arguments.length) return linear.domain().map(powb); + linear.domain(x.map(powp)); + return scale; + }; + + scale.ticks = function(m) { + return d3_scale_linearTicks(scale.domain(), m); + }; + + scale.tickFormat = function(m) { + return d3_scale_linearTickFormat(scale.domain(), m); + }; + + scale.nice = function() { + return scale.domain(d3_scale_nice(scale.domain(), d3_scale_linearNice)); + }; + + scale.exponent = function(x) { + if (!arguments.length) return exponent; + var domain = scale.domain(); + powp = d3_scale_powPow(exponent = x); + powb = d3_scale_powPow(1 / exponent); + return scale.domain(domain); + }; + + scale.copy = function() { + return d3_scale_pow(linear.copy(), exponent); + }; + + return d3_scale_linearRebind(scale, linear); +}; + +function d3_scale_powPow(e) { + return function(x) { + return x < 0 ? -Math.pow(-x, e) : Math.pow(x, e); + }; +} +d3.scale.sqrt = function() { + return d3.scale.pow().exponent(.5); +}; +d3.scale.ordinal = function() { + return d3_scale_ordinal([], {t: "range", x: []}); +}; + +function d3_scale_ordinal(domain, ranger) { + var index, + range, + rangeBand; + + function scale(x) { + return range[((index[x] || (index[x] = domain.push(x))) - 1) % range.length]; + } + + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = []; + index = {}; + var i = -1, n = x.length, xi; + while (++i < n) if (!index[xi = x[i]]) index[xi] = domain.push(xi); + return scale[ranger.t](ranger.x, ranger.p); + }; + + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + rangeBand = 0; + ranger = {t: "range", x: x}; + return scale; + }; + + scale.rangePoints = function(x, padding) { + if (arguments.length < 2) padding = 0; + var start = x[0], + stop = x[1], + step = (stop - start) / (domain.length - 1 + padding); + range = domain.length < 2 ? [(start + stop) / 2] : d3.range(start + step * padding / 2, stop + step / 2, step); + rangeBand = 0; + ranger = {t: "rangePoints", x: x, p: padding}; + return scale; + }; + + scale.rangeBands = function(x, padding) { + if (arguments.length < 2) padding = 0; + var start = x[0], + stop = x[1], + step = (stop - start) / (domain.length + padding); + range = d3.range(start + step * padding, stop, step); + rangeBand = step * (1 - padding); + ranger = {t: "rangeBands", x: x, p: padding}; + return scale; + }; + + scale.rangeRoundBands = function(x, padding) { + if (arguments.length < 2) padding = 0; + var start = x[0], + stop = x[1], + step = Math.floor((stop - start) / (domain.length + padding)), + err = stop - start - (domain.length - padding) * step; + range = d3.range(start + Math.round(err / 2), stop, step); + rangeBand = Math.round(step * (1 - padding)); + ranger = {t: "rangeRoundBands", x: x, p: padding}; + return scale; + }; + + scale.rangeBand = function() { + return rangeBand; + }; + + scale.copy = function() { + return d3_scale_ordinal(domain, ranger); + }; + + return scale.domain(domain); +}; +/* + * This product includes color specifications and designs developed by Cynthia + * Brewer (http://colorbrewer.org/). See lib/colorbrewer for more information. + */ + +d3.scale.category10 = function() { + return d3.scale.ordinal().range(d3_category10); +}; + +d3.scale.category20 = function() { + return d3.scale.ordinal().range(d3_category20); +}; + +d3.scale.category20b = function() { + return d3.scale.ordinal().range(d3_category20b); +}; + +d3.scale.category20c = function() { + return d3.scale.ordinal().range(d3_category20c); +}; + +var d3_category10 = [ + "#1f77b4", "#ff7f0e", "#2ca02c", "#d62728", "#9467bd", + "#8c564b", "#e377c2", "#7f7f7f", "#bcbd22", "#17becf" +]; + +var d3_category20 = [ + "#1f77b4", "#aec7e8", + "#ff7f0e", "#ffbb78", + "#2ca02c", "#98df8a", + "#d62728", "#ff9896", + "#9467bd", "#c5b0d5", + "#8c564b", "#c49c94", + "#e377c2", "#f7b6d2", + "#7f7f7f", "#c7c7c7", + "#bcbd22", "#dbdb8d", + "#17becf", "#9edae5" +]; + +var d3_category20b = [ + "#393b79", "#5254a3", "#6b6ecf", "#9c9ede", + "#637939", "#8ca252", "#b5cf6b", "#cedb9c", + "#8c6d31", "#bd9e39", "#e7ba52", "#e7cb94", + "#843c39", "#ad494a", "#d6616b", "#e7969c", + "#7b4173", "#a55194", "#ce6dbd", "#de9ed6" +]; + +var d3_category20c = [ + "#3182bd", "#6baed6", "#9ecae1", "#c6dbef", + "#e6550d", "#fd8d3c", "#fdae6b", "#fdd0a2", + "#31a354", "#74c476", "#a1d99b", "#c7e9c0", + "#756bb1", "#9e9ac8", "#bcbddc", "#dadaeb", + "#636363", "#969696", "#bdbdbd", "#d9d9d9" +]; +d3.scale.quantile = function() { + return d3_scale_quantile([], []); +}; + +function d3_scale_quantile(domain, range) { + var thresholds; + + function rescale() { + var k = 0, + n = domain.length, + q = range.length; + thresholds = []; + while (++k < q) thresholds[k - 1] = d3.quantile(domain, k / q); + return scale; + } + + function scale(x) { + if (isNaN(x = +x)) return NaN; + return range[d3.bisect(thresholds, x)]; + } + + scale.domain = function(x) { + if (!arguments.length) return domain; + domain = x.filter(function(d) { return !isNaN(d); }).sort(d3.ascending); + return rescale(); + }; + + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + + scale.quantiles = function() { + return thresholds; + }; + + scale.copy = function() { + return d3_scale_quantile(domain, range); // copy on write! + }; + + return rescale(); +}; +d3.scale.quantize = function() { + return d3_scale_quantize(0, 1, [0, 1]); +}; + +function d3_scale_quantize(x0, x1, range) { + var kx, i; + + function scale(x) { + return range[Math.max(0, Math.min(i, Math.floor(kx * (x - x0))))]; + } + + function rescale() { + kx = range.length / (x1 - x0); + i = range.length - 1; + return scale; + } + + scale.domain = function(x) { + if (!arguments.length) return [x0, x1]; + x0 = +x[0]; + x1 = +x[x.length - 1]; + return rescale(); + }; + + scale.range = function(x) { + if (!arguments.length) return range; + range = x; + return rescale(); + }; + + scale.copy = function() { + return d3_scale_quantize(x0, x1, range); // copy on write + }; + + return rescale(); +}; +d3.svg = {}; +d3.svg.arc = function() { + var innerRadius = d3_svg_arcInnerRadius, + outerRadius = d3_svg_arcOuterRadius, + startAngle = d3_svg_arcStartAngle, + endAngle = d3_svg_arcEndAngle; + + function arc() { + var r0 = innerRadius.apply(this, arguments), + r1 = outerRadius.apply(this, arguments), + a0 = startAngle.apply(this, arguments) + d3_svg_arcOffset, + a1 = endAngle.apply(this, arguments) + d3_svg_arcOffset, + da = (a1 < a0 && (da = a0, a0 = a1, a1 = da), a1 - a0), + df = da < Math.PI ? "0" : "1", + c0 = Math.cos(a0), + s0 = Math.sin(a0), + c1 = Math.cos(a1), + s1 = Math.sin(a1); + return da >= d3_svg_arcMax + ? (r0 + ? "M0," + r1 + + "A" + r1 + "," + r1 + " 0 1,1 0," + (-r1) + + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + + "M0," + r0 + + "A" + r0 + "," + r0 + " 0 1,0 0," + (-r0) + + "A" + r0 + "," + r0 + " 0 1,0 0," + r0 + + "Z" + : "M0," + r1 + + "A" + r1 + "," + r1 + " 0 1,1 0," + (-r1) + + "A" + r1 + "," + r1 + " 0 1,1 0," + r1 + + "Z") + : (r0 + ? "M" + r1 * c0 + "," + r1 * s0 + + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + + "L" + r0 * c1 + "," + r0 * s1 + + "A" + r0 + "," + r0 + " 0 " + df + ",0 " + r0 * c0 + "," + r0 * s0 + + "Z" + : "M" + r1 * c0 + "," + r1 * s0 + + "A" + r1 + "," + r1 + " 0 " + df + ",1 " + r1 * c1 + "," + r1 * s1 + + "L0,0" + + "Z"); + } + + arc.innerRadius = function(v) { + if (!arguments.length) return innerRadius; + innerRadius = d3.functor(v); + return arc; + }; + + arc.outerRadius = function(v) { + if (!arguments.length) return outerRadius; + outerRadius = d3.functor(v); + return arc; + }; + + arc.startAngle = function(v) { + if (!arguments.length) return startAngle; + startAngle = d3.functor(v); + return arc; + }; + + arc.endAngle = function(v) { + if (!arguments.length) return endAngle; + endAngle = d3.functor(v); + return arc; + }; + + arc.centroid = function() { + var r = (innerRadius.apply(this, arguments) + + outerRadius.apply(this, arguments)) / 2, + a = (startAngle.apply(this, arguments) + + endAngle.apply(this, arguments)) / 2 + d3_svg_arcOffset; + return [Math.cos(a) * r, Math.sin(a) * r]; + }; + + return arc; +}; + +var d3_svg_arcOffset = -Math.PI / 2, + d3_svg_arcMax = 2 * Math.PI - 1e-6; + +function d3_svg_arcInnerRadius(d) { + return d.innerRadius; +} + +function d3_svg_arcOuterRadius(d) { + return d.outerRadius; +} + +function d3_svg_arcStartAngle(d) { + return d.startAngle; +} + +function d3_svg_arcEndAngle(d) { + return d.endAngle; +} +function d3_svg_line(projection) { + var x = d3_svg_lineX, + y = d3_svg_lineY, + interpolate = "linear", + interpolator = d3_svg_lineInterpolators[interpolate], + tension = .7; + + function line(d) { + return d.length < 1 ? null : "M" + interpolator(projection(d3_svg_linePoints(this, d, x, y)), tension); + } + + line.x = function(v) { + if (!arguments.length) return x; + x = v; + return line; + }; + + line.y = function(v) { + if (!arguments.length) return y; + y = v; + return line; + }; + + line.interpolate = function(v) { + if (!arguments.length) return interpolate; + interpolator = d3_svg_lineInterpolators[interpolate = v]; + return line; + }; + + line.tension = function(v) { + if (!arguments.length) return tension; + tension = v; + return line; + }; + + return line; +} + +d3.svg.line = function() { + return d3_svg_line(Object); +}; + +// Converts the specified array of data into an array of points +// (x-y tuples), by evaluating the specified `x` and `y` functions on each +// data point. The `this` context of the evaluated functions is the specified +// "self" object; each function is passed the current datum and index. +function d3_svg_linePoints(self, d, x, y) { + var points = [], + i = -1, + n = d.length, + fx = typeof x === "function", + fy = typeof y === "function", + value; + if (fx && fy) { + while (++i < n) points.push([ + x.call(self, value = d[i], i), + y.call(self, value, i) + ]); + } else if (fx) { + while (++i < n) points.push([x.call(self, d[i], i), y]); + } else if (fy) { + while (++i < n) points.push([x, y.call(self, d[i], i)]); + } else { + while (++i < n) points.push([x, y]); + } + return points; +} + +// The default `x` property, which references d[0]. +function d3_svg_lineX(d) { + return d[0]; +} + +// The default `y` property, which references d[1]. +function d3_svg_lineY(d) { + return d[1]; +} + +// The various interpolators supported by the `line` class. +var d3_svg_lineInterpolators = { + "linear": d3_svg_lineLinear, + "step-before": d3_svg_lineStepBefore, + "step-after": d3_svg_lineStepAfter, + "basis": d3_svg_lineBasis, + "basis-open": d3_svg_lineBasisOpen, + "basis-closed": d3_svg_lineBasisClosed, + "bundle": d3_svg_lineBundle, + "cardinal": d3_svg_lineCardinal, + "cardinal-open": d3_svg_lineCardinalOpen, + "cardinal-closed": d3_svg_lineCardinalClosed, + "monotone": d3_svg_lineMonotone +}; + +// Linear interpolation; generates "L" commands. +function d3_svg_lineLinear(points) { + var i = 0, + n = points.length, + p = points[0], + path = [p[0], ",", p[1]]; + while (++i < n) path.push("L", (p = points[i])[0], ",", p[1]); + return path.join(""); +} + +// Step interpolation; generates "H" and "V" commands. +function d3_svg_lineStepBefore(points) { + var i = 0, + n = points.length, + p = points[0], + path = [p[0], ",", p[1]]; + while (++i < n) path.push("V", (p = points[i])[1], "H", p[0]); + return path.join(""); +} + +// Step interpolation; generates "H" and "V" commands. +function d3_svg_lineStepAfter(points) { + var i = 0, + n = points.length, + p = points[0], + path = [p[0], ",", p[1]]; + while (++i < n) path.push("H", (p = points[i])[0], "V", p[1]); + return path.join(""); +} + +// Open cardinal spline interpolation; generates "C" commands. +function d3_svg_lineCardinalOpen(points, tension) { + return points.length < 4 + ? d3_svg_lineLinear(points) + : points[1] + d3_svg_lineHermite(points.slice(1, points.length - 1), + d3_svg_lineCardinalTangents(points, tension)); +} + +// Closed cardinal spline interpolation; generates "C" commands. +function d3_svg_lineCardinalClosed(points, tension) { + return points.length < 3 + ? d3_svg_lineLinear(points) + : points[0] + d3_svg_lineHermite((points.push(points[0]), points), + d3_svg_lineCardinalTangents([points[points.length - 2]] + .concat(points, [points[1]]), tension)); +} + +// Cardinal spline interpolation; generates "C" commands. +function d3_svg_lineCardinal(points, tension, closed) { + return points.length < 3 + ? d3_svg_lineLinear(points) + : points[0] + d3_svg_lineHermite(points, + d3_svg_lineCardinalTangents(points, tension)); +} + +// Hermite spline construction; generates "C" commands. +function d3_svg_lineHermite(points, tangents) { + if (tangents.length < 1 + || (points.length != tangents.length + && points.length != tangents.length + 2)) { + return d3_svg_lineLinear(points); + } + + var quad = points.length != tangents.length, + path = "", + p0 = points[0], + p = points[1], + t0 = tangents[0], + t = t0, + pi = 1; + + if (quad) { + path += "Q" + (p[0] - t0[0] * 2 / 3) + "," + (p[1] - t0[1] * 2 / 3) + + "," + p[0] + "," + p[1]; + p0 = points[1]; + pi = 2; + } + + if (tangents.length > 1) { + t = tangents[1]; + p = points[pi]; + pi++; + path += "C" + (p0[0] + t0[0]) + "," + (p0[1] + t0[1]) + + "," + (p[0] - t[0]) + "," + (p[1] - t[1]) + + "," + p[0] + "," + p[1]; + for (var i = 2; i < tangents.length; i++, pi++) { + p = points[pi]; + t = tangents[i]; + path += "S" + (p[0] - t[0]) + "," + (p[1] - t[1]) + + "," + p[0] + "," + p[1]; + } + } + + if (quad) { + var lp = points[pi]; + path += "Q" + (p[0] + t[0] * 2 / 3) + "," + (p[1] + t[1] * 2 / 3) + + "," + lp[0] + "," + lp[1]; + } + + return path; +} + +// Generates tangents for a cardinal spline. +function d3_svg_lineCardinalTangents(points, tension) { + var tangents = [], + a = (1 - tension) / 2, + p0, + p1 = points[0], + p2 = points[1], + i = 1, + n = points.length; + while (++i < n) { + p0 = p1; + p1 = p2; + p2 = points[i]; + tangents.push([a * (p2[0] - p0[0]), a * (p2[1] - p0[1])]); + } + return tangents; +} + +// B-spline interpolation; generates "C" commands. +function d3_svg_lineBasis(points) { + if (points.length < 3) return d3_svg_lineLinear(points); + var i = 1, + n = points.length, + pi = points[0], + x0 = pi[0], + y0 = pi[1], + px = [x0, x0, x0, (pi = points[1])[0]], + py = [y0, y0, y0, pi[1]], + path = [x0, ",", y0]; + d3_svg_lineBasisBezier(path, px, py); + while (++i < n) { + pi = points[i]; + px.shift(); px.push(pi[0]); + py.shift(); py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + i = -1; + while (++i < 2) { + px.shift(); px.push(pi[0]); + py.shift(); py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + return path.join(""); +} + +// Open B-spline interpolation; generates "C" commands. +function d3_svg_lineBasisOpen(points) { + if (points.length < 4) return d3_svg_lineLinear(points); + var path = [], + i = -1, + n = points.length, + pi, + px = [0], + py = [0]; + while (++i < 3) { + pi = points[i]; + px.push(pi[0]); + py.push(pi[1]); + } + path.push(d3_svg_lineDot4(d3_svg_lineBasisBezier3, px) + + "," + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py)); + --i; while (++i < n) { + pi = points[i]; + px.shift(); px.push(pi[0]); + py.shift(); py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + return path.join(""); +} + +// Closed B-spline interpolation; generates "C" commands. +function d3_svg_lineBasisClosed(points) { + var path, + i = -1, + n = points.length, + m = n + 4, + pi, + px = [], + py = []; + while (++i < 4) { + pi = points[i % n]; + px.push(pi[0]); + py.push(pi[1]); + } + path = [ + d3_svg_lineDot4(d3_svg_lineBasisBezier3, px), ",", + d3_svg_lineDot4(d3_svg_lineBasisBezier3, py) + ]; + --i; while (++i < m) { + pi = points[i % n]; + px.shift(); px.push(pi[0]); + py.shift(); py.push(pi[1]); + d3_svg_lineBasisBezier(path, px, py); + } + return path.join(""); +} + +function d3_svg_lineBundle(points, tension) { + var n = points.length - 1, + x0 = points[0][0], + y0 = points[0][1], + dx = points[n][0] - x0, + dy = points[n][1] - y0, + i = -1, + p, + t; + while (++i <= n) { + p = points[i]; + t = i / n; + p[0] = tension * p[0] + (1 - tension) * (x0 + t * dx); + p[1] = tension * p[1] + (1 - tension) * (y0 + t * dy); + } + return d3_svg_lineBasis(points); +} + +// Returns the dot product of the given four-element vectors. +function d3_svg_lineDot4(a, b) { + return a[0] * b[0] + a[1] * b[1] + a[2] * b[2] + a[3] * b[3]; +} + +// Matrix to transform basis (b-spline) control points to bezier +// control points. Derived from FvD 11.2.8. +var d3_svg_lineBasisBezier1 = [0, 2/3, 1/3, 0], + d3_svg_lineBasisBezier2 = [0, 1/3, 2/3, 0], + d3_svg_lineBasisBezier3 = [0, 1/6, 2/3, 1/6]; + +// Pushes a "C" Bézier curve onto the specified path array, given the +// two specified four-element arrays which define the control points. +function d3_svg_lineBasisBezier(path, x, y) { + path.push( + "C", d3_svg_lineDot4(d3_svg_lineBasisBezier1, x), + ",", d3_svg_lineDot4(d3_svg_lineBasisBezier1, y), + ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, x), + ",", d3_svg_lineDot4(d3_svg_lineBasisBezier2, y), + ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, x), + ",", d3_svg_lineDot4(d3_svg_lineBasisBezier3, y)); +} + +// Computes the slope from points p0 to p1. +function d3_svg_lineSlope(p0, p1) { + return (p1[1] - p0[1]) / (p1[0] - p0[0]); +} + +// Compute three-point differences for the given points. +// http://en.wikipedia.org/wiki/Cubic_Hermite_spline#Finite_difference +function d3_svg_lineFiniteDifferences(points) { + var i = 0, + j = points.length - 1, + m = [], + p0 = points[0], + p1 = points[1], + d = m[0] = d3_svg_lineSlope(p0, p1); + while (++i < j) { + m[i] = d + (d = d3_svg_lineSlope(p0 = p1, p1 = points[i + 1])); + } + m[i] = d; + return m; +} + +// Interpolates the given points using Fritsch-Carlson Monotone cubic Hermite +// interpolation. Returns an array of tangent vectors. For details, see +// http://en.wikipedia.org/wiki/Monotone_cubic_interpolation +function d3_svg_lineMonotoneTangents(points) { + var tangents = [], + d, + a, + b, + s, + m = d3_svg_lineFiniteDifferences(points), + i = -1, + j = points.length - 1; + + // The first two steps are done by computing finite-differences: + // 1. Compute the slopes of the secant lines between successive points. + // 2. Initialize the tangents at every point as the average of the secants. + + // Then, for each segment… + while (++i < j) { + d = d3_svg_lineSlope(points[i], points[i + 1]); + + // 3. If two successive yk = y{k + 1} are equal (i.e., d is zero), then set + // mk = m{k + 1} = 0 as the spline connecting these points must be flat to + // preserve monotonicity. Ignore step 4 and 5 for those k. + + if (Math.abs(d) < 1e-6) { + m[i] = m[i + 1] = 0; + } else { + // 4. Let ak = mk / dk and bk = m{k + 1} / dk. + a = m[i] / d; + b = m[i + 1] / d; + + // 5. Prevent overshoot and ensure monotonicity by restricting the + // magnitude of vector to a circle of radius 3. + s = a * a + b * b; + if (s > 9) { + s = d * 3 / Math.sqrt(s); + m[i] = s * a; + m[i + 1] = s * b; + } + } + } + + // Compute the normalized tangent vector from the slopes. Note that if x is + // not monotonic, it's possible that the slope will be infinite, so we protect + // against NaN by setting the coordinate to zero. + i = -1; while (++i <= j) { + s = (points[Math.min(j, i + 1)][0] - points[Math.max(0, i - 1)][0]) + / (6 * (1 + m[i] * m[i])); + tangents.push([s || 0, m[i] * s || 0]); + } + + return tangents; +} + +function d3_svg_lineMonotone(points) { + return points.length < 3 + ? d3_svg_lineLinear(points) + : points[0] + + d3_svg_lineHermite(points, d3_svg_lineMonotoneTangents(points)); +} +d3.svg.line.radial = function() { + var line = d3_svg_line(d3_svg_lineRadial); + line.radius = line.x, delete line.x; + line.angle = line.y, delete line.y; + return line; +}; + +function d3_svg_lineRadial(points) { + var point, + i = -1, + n = points.length, + r, + a; + while (++i < n) { + point = points[i]; + r = point[0]; + a = point[1] + d3_svg_arcOffset; + point[0] = r * Math.cos(a); + point[1] = r * Math.sin(a); + } + return points; +} +function d3_svg_area(projection) { + var x0 = d3_svg_lineX, + x1 = d3_svg_lineX, + y0 = 0, + y1 = d3_svg_lineY, + interpolate = "linear", + interpolator = d3_svg_lineInterpolators[interpolate], + tension = .7; + + function area(d) { + if (d.length < 1) return null; + var points0 = d3_svg_linePoints(this, d, x0, y0), + points1 = d3_svg_linePoints(this, d, x0 === x1 ? d3_svg_areaX(points0) : x1, y0 === y1 ? d3_svg_areaY(points0) : y1); + return "M" + interpolator(projection(points1), tension) + + "L" + interpolator(projection(points0.reverse()), tension) + + "Z"; + } + + area.x = function(x) { + if (!arguments.length) return x1; + x0 = x1 = x; + return area; + }; + + area.x0 = function(x) { + if (!arguments.length) return x0; + x0 = x; + return area; + }; + + area.x1 = function(x) { + if (!arguments.length) return x1; + x1 = x; + return area; + }; + + area.y = function(y) { + if (!arguments.length) return y1; + y0 = y1 = y; + return area; + }; + + area.y0 = function(y) { + if (!arguments.length) return y0; + y0 = y; + return area; + }; + + area.y1 = function(y) { + if (!arguments.length) return y1; + y1 = y; + return area; + }; + + area.interpolate = function(x) { + if (!arguments.length) return interpolate; + interpolator = d3_svg_lineInterpolators[interpolate = x]; + return area; + }; + + area.tension = function(x) { + if (!arguments.length) return tension; + tension = x; + return area; + }; + + return area; +} + +d3.svg.area = function() { + return d3_svg_area(Object); +}; + +function d3_svg_areaX(points) { + return function(d, i) { + return points[i][0]; + }; +} + +function d3_svg_areaY(points) { + return function(d, i) { + return points[i][1]; + }; +} +d3.svg.area.radial = function() { + var area = d3_svg_area(d3_svg_lineRadial); + area.radius = area.x, delete area.x; + area.innerRadius = area.x0, delete area.x0; + area.outerRadius = area.x1, delete area.x1; + area.angle = area.y, delete area.y; + area.startAngle = area.y0, delete area.y0; + area.endAngle = area.y1, delete area.y1; + return area; +}; +d3.svg.chord = function() { + var source = d3_svg_chordSource, + target = d3_svg_chordTarget, + radius = d3_svg_chordRadius, + startAngle = d3_svg_arcStartAngle, + endAngle = d3_svg_arcEndAngle; + + // TODO Allow control point to be customized. + + function chord(d, i) { + var s = subgroup(this, source, d, i), + t = subgroup(this, target, d, i); + return "M" + s.p0 + + arc(s.r, s.p1) + (equals(s, t) + ? curve(s.r, s.p1, s.r, s.p0) + : curve(s.r, s.p1, t.r, t.p0) + + arc(t.r, t.p1) + + curve(t.r, t.p1, s.r, s.p0)) + + "Z"; + } + + function subgroup(self, f, d, i) { + var subgroup = f.call(self, d, i), + r = radius.call(self, subgroup, i), + a0 = startAngle.call(self, subgroup, i) + d3_svg_arcOffset, + a1 = endAngle.call(self, subgroup, i) + d3_svg_arcOffset; + return { + r: r, + a0: a0, + a1: a1, + p0: [r * Math.cos(a0), r * Math.sin(a0)], + p1: [r * Math.cos(a1), r * Math.sin(a1)] + }; + } + + function equals(a, b) { + return a.a0 == b.a0 && a.a1 == b.a1; + } + + function arc(r, p) { + return "A" + r + "," + r + " 0 0,1 " + p; + } + + function curve(r0, p0, r1, p1) { + return "Q 0,0 " + p1; + } + + chord.radius = function(v) { + if (!arguments.length) return radius; + radius = d3.functor(v); + return chord; + }; + + chord.source = function(v) { + if (!arguments.length) return source; + source = d3.functor(v); + return chord; + }; + + chord.target = function(v) { + if (!arguments.length) return target; + target = d3.functor(v); + return chord; + }; + + chord.startAngle = function(v) { + if (!arguments.length) return startAngle; + startAngle = d3.functor(v); + return chord; + }; + + chord.endAngle = function(v) { + if (!arguments.length) return endAngle; + endAngle = d3.functor(v); + return chord; + }; + + return chord; +}; + +function d3_svg_chordSource(d) { + return d.source; +} + +function d3_svg_chordTarget(d) { + return d.target; +} + +function d3_svg_chordRadius(d) { + return d.radius; +} + +function d3_svg_chordStartAngle(d) { + return d.startAngle; +} + +function d3_svg_chordEndAngle(d) { + return d.endAngle; +} +d3.svg.diagonal = function() { + var source = d3_svg_chordSource, + target = d3_svg_chordTarget, + projection = d3_svg_diagonalProjection; + + function diagonal(d, i) { + var p0 = source.call(this, d, i), + p3 = target.call(this, d, i), + m = (p0.y + p3.y) / 2, + p = [p0, {x: p0.x, y: m}, {x: p3.x, y: m}, p3]; + p = p.map(projection); + return "M" + p[0] + "C" + p[1] + " " + p[2] + " " + p[3]; + } + + diagonal.source = function(x) { + if (!arguments.length) return source; + source = d3.functor(x); + return diagonal; + }; + + diagonal.target = function(x) { + if (!arguments.length) return target; + target = d3.functor(x); + return diagonal; + }; + + diagonal.projection = function(x) { + if (!arguments.length) return projection; + projection = x; + return diagonal; + }; + + return diagonal; +}; + +function d3_svg_diagonalProjection(d) { + return [d.x, d.y]; +} +d3.svg.diagonal.radial = function() { + var diagonal = d3.svg.diagonal(), + projection = d3_svg_diagonalProjection, + projection_ = diagonal.projection; + + diagonal.projection = function(x) { + return arguments.length + ? projection_(d3_svg_diagonalRadialProjection(projection = x)) + : projection; + }; + + return diagonal; +}; + +function d3_svg_diagonalRadialProjection(projection) { + return function() { + var d = projection.apply(this, arguments), + r = d[0], + a = d[1] + d3_svg_arcOffset; + return [r * Math.cos(a), r * Math.sin(a)]; + }; +} +d3.svg.mouse = function(container) { + return d3_svg_mousePoint(container, d3.event); +}; + +// https://bugs.webkit.org/show_bug.cgi?id=44083 +var d3_mouse_bug44083 = /WebKit/.test(navigator.userAgent) ? -1 : 0; + +function d3_svg_mousePoint(container, e) { + var point = (container.ownerSVGElement || container).createSVGPoint(); + if ((d3_mouse_bug44083 < 0) && (window.scrollX || window.scrollY)) { + var svg = d3.select(document.body) + .append("svg:svg") + .style("position", "absolute") + .style("top", 0) + .style("left", 0); + var ctm = svg[0][0].getScreenCTM(); + d3_mouse_bug44083 = !(ctm.f || ctm.e); + svg.remove(); + } + if (d3_mouse_bug44083) { + point.x = e.pageX; + point.y = e.pageY; + } else { + point.x = e.clientX; + point.y = e.clientY; + } + point = point.matrixTransform(container.getScreenCTM().inverse()); + return [point.x, point.y]; +}; +d3.svg.touches = function(container) { + var touches = d3.event.touches; + return touches ? d3_array(touches).map(function(touch) { + var point = d3_svg_mousePoint(container, touch); + point.identifier = touch.identifier; + return point; + }) : []; +}; +d3.svg.symbol = function() { + var type = d3_svg_symbolType, + size = d3_svg_symbolSize; + + function symbol(d, i) { + return (d3_svg_symbols[type.call(this, d, i)] + || d3_svg_symbols.circle) + (size.call(this, d, i)); + } + + symbol.type = function(x) { + if (!arguments.length) return type; + type = d3.functor(x); + return symbol; + }; + + // size of symbol in square pixels + symbol.size = function(x) { + if (!arguments.length) return size; + size = d3.functor(x); + return symbol; + }; + + return symbol; +}; + +function d3_svg_symbolSize() { + return 64; +} + +function d3_svg_symbolType() { + return "circle"; +} + +// TODO cross-diagonal? +var d3_svg_symbols = { + "circle": function(size) { + var r = Math.sqrt(size / Math.PI); + return "M0," + r + + "A" + r + "," + r + " 0 1,1 0," + (-r) + + "A" + r + "," + r + " 0 1,1 0," + r + + "Z"; + }, + "cross": function(size) { + var r = Math.sqrt(size / 5) / 2; + return "M" + -3 * r + "," + -r + + "H" + -r + + "V" + -3 * r + + "H" + r + + "V" + -r + + "H" + 3 * r + + "V" + r + + "H" + r + + "V" + 3 * r + + "H" + -r + + "V" + r + + "H" + -3 * r + + "Z"; + }, + "diamond": function(size) { + var ry = Math.sqrt(size / (2 * d3_svg_symbolTan30)), + rx = ry * d3_svg_symbolTan30; + return "M0," + -ry + + "L" + rx + ",0" + + " 0," + ry + + " " + -rx + ",0" + + "Z"; + }, + "square": function(size) { + var r = Math.sqrt(size) / 2; + return "M" + -r + "," + -r + + "L" + r + "," + -r + + " " + r + "," + r + + " " + -r + "," + r + + "Z"; + }, + "triangle-down": function(size) { + var rx = Math.sqrt(size / d3_svg_symbolSqrt3), + ry = rx * d3_svg_symbolSqrt3 / 2; + return "M0," + ry + + "L" + rx +"," + -ry + + " " + -rx + "," + -ry + + "Z"; + }, + "triangle-up": function(size) { + var rx = Math.sqrt(size / d3_svg_symbolSqrt3), + ry = rx * d3_svg_symbolSqrt3 / 2; + return "M0," + -ry + + "L" + rx +"," + ry + + " " + -rx + "," + ry + + "Z"; + } +}; + +d3.svg.symbolTypes = d3.keys(d3_svg_symbols); + +var d3_svg_symbolSqrt3 = Math.sqrt(3), + d3_svg_symbolTan30 = Math.tan(30 * Math.PI / 180); +d3.svg.axis = function() { + var scale = d3.scale.linear(), + orient = "bottom", + tickMajorSize = 6, + tickMinorSize = 6, + tickEndSize = 6, + tickPadding = 3, + tickArguments_ = [10], + tickFormat_, + tickSubdivide = 0; + + function axis(selection) { + selection.each(function(d, i, j) { + var g = d3.select(this); + + // Ticks. + var ticks = scale.ticks.apply(scale, tickArguments_), + tickFormat = tickFormat_ == null ? scale.tickFormat.apply(scale, tickArguments_) : tickFormat_; + + // Minor ticks. + var subticks = d3_svg_axisSubdivide(scale, ticks, tickSubdivide), + subtick = g.selectAll(".minor").data(subticks, String), + subtickEnter = subtick.enter().insert("svg:line", "g").attr("class", "tick minor").style("opacity", 1e-6), + subtickExit = transition(subtick.exit()).style("opacity", 1e-6).remove(), + subtickUpdate = transition(subtick).style("opacity", 1); + + // Major ticks. + var tick = g.selectAll("g").data(ticks, String), + tickEnter = tick.enter().insert("svg:g", "path").style("opacity", 1e-6), + tickExit = transition(tick.exit()).style("opacity", 1e-6).remove(), + tickUpdate = transition(tick).style("opacity", 1), + tickTransform; + + // Domain. + var range = d3_scaleExtent(scale.range()), + path = g.selectAll(".domain").data([0]), + pathEnter = path.enter().append("svg:path").attr("class", "domain"), + pathUpdate = transition(path); + + // Stash the new scale and grab the old scale. + var scale0 = this.__chart__ || scale; + this.__chart__ = scale.copy(); + + tickEnter.append("svg:line").attr("class", "tick"); + tickEnter.append("svg:text"); + tickUpdate.select("text").text(tickFormat); + + switch (orient) { + case "bottom": { + tickTransform = d3_svg_axisX; + subtickUpdate.attr("y2", tickMinorSize); + tickEnter.select("text").attr("dy", ".71em").attr("text-anchor", "middle"); + tickUpdate.select("line").attr("y2", tickMajorSize); + tickUpdate.select("text").attr("y", Math.max(tickMajorSize, 0) + tickPadding); + pathUpdate.attr("d", "M" + range[0] + "," + tickEndSize + "V0H" + range[1] + "V" + tickEndSize); + break; + } + case "top": { + tickTransform = d3_svg_axisX; + subtickUpdate.attr("y2", -tickMinorSize); + tickEnter.select("text").attr("text-anchor", "middle"); + tickUpdate.select("line").attr("y2", -tickMajorSize); + tickUpdate.select("text").attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)); + pathUpdate.attr("d", "M" + range[0] + "," + -tickEndSize + "V0H" + range[1] + "V" + -tickEndSize); + break; + } + case "left": { + tickTransform = d3_svg_axisY; + subtickUpdate.attr("x2", -tickMinorSize); + tickEnter.select("text").attr("dy", ".32em").attr("text-anchor", "end"); + tickUpdate.select("line").attr("x2", -tickMajorSize); + tickUpdate.select("text").attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)); + pathUpdate.attr("d", "M" + -tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + -tickEndSize); + break; + } + case "right": { + tickTransform = d3_svg_axisY; + subtickUpdate.attr("x2", tickMinorSize); + tickEnter.select("text").attr("dy", ".32em"); + tickUpdate.select("line").attr("x2", tickMajorSize); + tickUpdate.select("text").attr("x", Math.max(tickMajorSize, 0) + tickPadding); + pathUpdate.attr("d", "M" + tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + tickEndSize); + break; + } + } + + tickEnter.call(tickTransform, scale0); + tickUpdate.call(tickTransform, scale); + tickExit.call(tickTransform, scale); + + subtickEnter.call(tickTransform, scale0); + subtickUpdate.call(tickTransform, scale); + subtickExit.call(tickTransform, scale); + + function transition(o) { + return selection.delay ? o.transition() + .delay(selection[j][i].delay) + .duration(selection[j][i].duration) + .ease(selection.ease()) : o; + } + }); + } + + axis.scale = function(x) { + if (!arguments.length) return scale; + scale = x; + return axis; + }; + + axis.orient = function(x) { + if (!arguments.length) return orient; + orient = x; + return axis; + }; + + axis.ticks = function() { + if (!arguments.length) return tickArguments_; + tickArguments_ = arguments; + return axis; + }; + + axis.tickFormat = function(x) { + if (!arguments.length) return tickFormat_; + tickFormat_ = x; + return axis; + }; + + axis.tickSize = function(x, y, z) { + if (!arguments.length) return tickMajorSize; + var n = arguments.length - 1; + tickMajorSize = +x; + tickMinorSize = n > 1 ? +y : tickMajorSize; + tickEndSize = n > 0 ? +arguments[n] : tickMajorSize; + return axis; + }; + + axis.tickPadding = function(x) { + if (!arguments.length) return tickPadding; + tickPadding = +x; + return axis; + }; + + axis.tickSubdivide = function(x) { + if (!arguments.length) return tickSubdivide; + tickSubdivide = +x; + return axis; + }; + + return axis; +}; + +function d3_svg_axisX(selection, x) { + selection.attr("transform", function(d) { return "translate(" + x(d) + ",0)"; }); +} + +function d3_svg_axisY(selection, y) { + selection.attr("transform", function(d) { return "translate(0," + y(d) + ")"; }); +} + +function d3_svg_axisSubdivide(scale, ticks, m) { + subticks = []; + if (m && ticks.length > 1) { + var extent = d3_scaleExtent(scale.domain()), + subticks, + i = -1, + n = ticks.length, + d = (ticks[1] - ticks[0]) / ++m, + j, + v; + while (++i < n) { + for (j = m; --j > 0;) { + if ((v = +ticks[i] - j * d) >= extent[0]) { + subticks.push(v); + } + } + } + for (--i, j = 0; ++j < m && (v = +ticks[i] + j * d) < extent[1];) { + subticks.push(v); + } + } + return subticks; +} +d3.behavior = {}; +d3.behavior.drag = function() { + var event = d3.dispatch("drag", "dragstart", "dragend"); + + function drag() { + this + .on("mousedown.drag", mousedown) + .on("touchstart.drag", mousedown); + + d3.select(window) + .on("mousemove.drag", d3_behavior_dragMove) + .on("touchmove.drag", d3_behavior_dragMove) + .on("mouseup.drag", d3_behavior_dragUp, true) + .on("touchend.drag", d3_behavior_dragUp, true) + .on("click.drag", d3_behavior_dragClick, true); + } + + // snapshot the local context for subsequent dispatch + function start() { + d3_behavior_dragEvent = event; + d3_behavior_dragEventTarget = d3.event.target; + d3_behavior_dragOffset = d3_behavior_dragPoint((d3_behavior_dragTarget = this).parentNode); + d3_behavior_dragMoved = 0; + d3_behavior_dragArguments = arguments; + } + + function mousedown() { + start.apply(this, arguments); + d3_behavior_dragDispatch("dragstart"); + } + + drag.on = function(type, listener) { + event[type].add(listener); + return drag; + }; + + return drag; +}; + +var d3_behavior_dragEvent, + d3_behavior_dragEventTarget, + d3_behavior_dragTarget, + d3_behavior_dragArguments, + d3_behavior_dragOffset, + d3_behavior_dragMoved, + d3_behavior_dragStopClick; + +function d3_behavior_dragDispatch(type) { + var o = d3.event, p = d3_behavior_dragTarget.parentNode, dx = 0, dy = 0; + + if (p) { + p = d3_behavior_dragPoint(p); + dx = p[0] - d3_behavior_dragOffset[0]; + dy = p[1] - d3_behavior_dragOffset[1]; + d3_behavior_dragOffset = p; + d3_behavior_dragMoved |= dx | dy; + } + + try { + d3.event = {dx: dx, dy: dy}; + d3_behavior_dragEvent[type].dispatch.apply(d3_behavior_dragTarget, d3_behavior_dragArguments); + } finally { + d3.event = o; + } + + o.preventDefault(); +} + +function d3_behavior_dragPoint(container) { + return d3.event.touches + ? d3.svg.touches(container)[0] + : d3.svg.mouse(container); +} + +function d3_behavior_dragMove() { + if (!d3_behavior_dragTarget) return; + var parent = d3_behavior_dragTarget.parentNode; + + // O NOES! The drag element was removed from the DOM. + if (!parent) return d3_behavior_dragUp(); + + d3_behavior_dragDispatch("drag"); + d3_behavior_dragCancel(); +} + +function d3_behavior_dragUp() { + if (!d3_behavior_dragTarget) return; + d3_behavior_dragDispatch("dragend"); + d3_behavior_dragTarget = null; + + // If the node was moved, prevent the mouseup from propagating. + // Also prevent the subsequent click from propagating (e.g., for anchors). + if (d3_behavior_dragMoved && d3_behavior_dragEventTarget === d3.event.target) { + d3_behavior_dragStopClick = true; + d3_behavior_dragCancel(); + } +} + +function d3_behavior_dragClick() { + if (d3_behavior_dragStopClick && d3_behavior_dragEventTarget === d3.event.target) { + d3_behavior_dragCancel(); + d3_behavior_dragStopClick = false; + d3_behavior_dragEventTarget = null; + } +} + +function d3_behavior_dragCancel() { + d3.event.stopPropagation(); + d3.event.preventDefault(); +} +// TODO unbind zoom behavior? +// TODO unbind listener? +d3.behavior.zoom = function() { + var xyz = [0, 0, 0], + event = d3.dispatch("zoom"); + + function zoom() { + this + .on("mousedown.zoom", mousedown) + .on("mousewheel.zoom", mousewheel) + .on("DOMMouseScroll.zoom", mousewheel) + .on("dblclick.zoom", dblclick) + .on("touchstart.zoom", touchstart); + + d3.select(window) + .on("mousemove.zoom", d3_behavior_zoomMousemove) + .on("mouseup.zoom", d3_behavior_zoomMouseup) + .on("touchmove.zoom", d3_behavior_zoomTouchmove) + .on("touchend.zoom", d3_behavior_zoomTouchup) + .on("click.zoom", d3_behavior_zoomClick, true); + } + + // snapshot the local context for subsequent dispatch + function start() { + d3_behavior_zoomXyz = xyz; + d3_behavior_zoomDispatch = event.zoom.dispatch; + d3_behavior_zoomEventTarget = d3.event.target; + d3_behavior_zoomTarget = this; + d3_behavior_zoomArguments = arguments; + } + + function mousedown() { + start.apply(this, arguments); + d3_behavior_zoomPanning = d3_behavior_zoomLocation(d3.svg.mouse(d3_behavior_zoomTarget)); + d3_behavior_zoomMoved = false; + d3.event.preventDefault(); + window.focus(); + } + + // store starting mouse location + function mousewheel() { + start.apply(this, arguments); + if (!d3_behavior_zoomZooming) d3_behavior_zoomZooming = d3_behavior_zoomLocation(d3.svg.mouse(d3_behavior_zoomTarget)); + d3_behavior_zoomTo(d3_behavior_zoomDelta() + xyz[2], d3.svg.mouse(d3_behavior_zoomTarget), d3_behavior_zoomZooming); + } + + function dblclick() { + start.apply(this, arguments); + var mouse = d3.svg.mouse(d3_behavior_zoomTarget); + d3_behavior_zoomTo(d3.event.shiftKey ? Math.ceil(xyz[2] - 1) : Math.floor(xyz[2] + 1), mouse, d3_behavior_zoomLocation(mouse)); + } + + // doubletap detection + function touchstart() { + start.apply(this, arguments); + var touches = d3_behavior_zoomTouchup(), + touch, + now = Date.now(); + if ((touches.length === 1) && (now - d3_behavior_zoomLast < 300)) { + d3_behavior_zoomTo(1 + Math.floor(xyz[2]), touch = touches[0], d3_behavior_zoomLocations[touch.identifier]); + } + d3_behavior_zoomLast = now; + } + + zoom.on = function(type, listener) { + event[type].add(listener); + return zoom; + }; + + return zoom; +}; + +var d3_behavior_zoomDiv, + d3_behavior_zoomPanning, + d3_behavior_zoomZooming, + d3_behavior_zoomLocations = {}, // identifier -> location + d3_behavior_zoomLast = 0, + d3_behavior_zoomXyz, + d3_behavior_zoomDispatch, + d3_behavior_zoomEventTarget, + d3_behavior_zoomTarget, + d3_behavior_zoomArguments, + d3_behavior_zoomMoved, + d3_behavior_zoomStopClick; + +function d3_behavior_zoomLocation(point) { + return [ + point[0] - d3_behavior_zoomXyz[0], + point[1] - d3_behavior_zoomXyz[1], + d3_behavior_zoomXyz[2] + ]; +} + +// detect the pixels that would be scrolled by this wheel event +function d3_behavior_zoomDelta() { + + // mousewheel events are totally broken! + // https://bugs.webkit.org/show_bug.cgi?id=40441 + // not only that, but Chrome and Safari differ in re. to acceleration! + if (!d3_behavior_zoomDiv) { + d3_behavior_zoomDiv = d3.select("body").append("div") + .style("visibility", "hidden") + .style("top", 0) + .style("height", 0) + .style("width", 0) + .style("overflow-y", "scroll") + .append("div") + .style("height", "2000px") + .node().parentNode; + } + + var e = d3.event, delta; + try { + d3_behavior_zoomDiv.scrollTop = 1000; + d3_behavior_zoomDiv.dispatchEvent(e); + delta = 1000 - d3_behavior_zoomDiv.scrollTop; + } catch (error) { + delta = e.wheelDelta || (-e.detail * 5); + } + + return delta * .005; +} + +// Note: Since we don't rotate, it's possible for the touches to become +// slightly detached from their original positions. Thus, we recompute the +// touch points on touchend as well as touchstart! +function d3_behavior_zoomTouchup() { + var touches = d3.svg.touches(d3_behavior_zoomTarget), + i = -1, + n = touches.length, + touch; + while (++i < n) d3_behavior_zoomLocations[(touch = touches[i]).identifier] = d3_behavior_zoomLocation(touch); + return touches; +} + +function d3_behavior_zoomTouchmove() { + var touches = d3.svg.touches(d3_behavior_zoomTarget); + switch (touches.length) { + + // single-touch pan + case 1: { + var touch = touches[0]; + d3_behavior_zoomTo(d3_behavior_zoomXyz[2], touch, d3_behavior_zoomLocations[touch.identifier]); + break; + } + + // double-touch pan + zoom + case 2: { + var p0 = touches[0], + p1 = touches[1], + p2 = [(p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2], + l0 = d3_behavior_zoomLocations[p0.identifier], + l1 = d3_behavior_zoomLocations[p1.identifier], + l2 = [(l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2, l0[2]]; + d3_behavior_zoomTo(Math.log(d3.event.scale) / Math.LN2 + l0[2], p2, l2); + break; + } + } +} + +function d3_behavior_zoomMousemove() { + d3_behavior_zoomZooming = null; + if (d3_behavior_zoomPanning) { + d3_behavior_zoomMoved = true; + d3_behavior_zoomTo(d3_behavior_zoomXyz[2], d3.svg.mouse(d3_behavior_zoomTarget), d3_behavior_zoomPanning); + } +} + +function d3_behavior_zoomMouseup() { + if (d3_behavior_zoomPanning) { + if (d3_behavior_zoomMoved && d3_behavior_zoomEventTarget === d3.event.target) { + d3_behavior_zoomStopClick = true; + } + d3_behavior_zoomMousemove(); + d3_behavior_zoomPanning = null; + } +} + +function d3_behavior_zoomClick() { + if (d3_behavior_zoomStopClick && d3_behavior_zoomEventTarget === d3.event.target) { + d3.event.stopPropagation(); + d3.event.preventDefault(); + d3_behavior_zoomStopClick = false; + d3_behavior_zoomEventTarget = null; + } +} + +function d3_behavior_zoomTo(z, x0, x1) { + var K = Math.pow(2, (d3_behavior_zoomXyz[2] = z) - x1[2]), + x = d3_behavior_zoomXyz[0] = x0[0] - K * x1[0], + y = d3_behavior_zoomXyz[1] = x0[1] - K * x1[1], + o = d3.event, // Events can be reentrant (e.g., focus). + k = Math.pow(2, z); + + d3.event = { + scale: k, + translate: [x, y], + transform: function(sx, sy) { + if (sx) transform(sx, x); + if (sy) transform(sy, y); + } + }; + + function transform(scale, o) { + var domain = scale.__domain || (scale.__domain = scale.domain()), + range = scale.range().map(function(v) { return (v - o) / k; }); + scale.domain(domain).domain(range.map(scale.invert)); + } + + try { + d3_behavior_zoomDispatch.apply(d3_behavior_zoomTarget, d3_behavior_zoomArguments); + } finally { + d3.event = o; + } + + o.preventDefault(); +} +})(); diff --git a/media/d3.layout.js b/media/d3.layout.js new file mode 100644 index 00000000..bcfddc04 --- /dev/null +++ b/media/d3.layout.js @@ -0,0 +1,1883 @@ +(function(){d3.layout = {}; +// Implements hierarchical edge bundling using Holten's algorithm. For each +// input link, a path is computed that travels through the tree, up the parent +// hierarchy to the least common ancestor, and then back down to the destination +// node. Each path is simply an array of nodes. +d3.layout.bundle = function() { + return function(links) { + var paths = [], + i = -1, + n = links.length; + while (++i < n) paths.push(d3_layout_bundlePath(links[i])); + return paths; + }; +}; + +function d3_layout_bundlePath(link) { + var start = link.source, + end = link.target, + lca = d3_layout_bundleLeastCommonAncestor(start, end), + points = [start]; + while (start !== lca) { + start = start.parent; + points.push(start); + } + var k = points.length; + while (end !== lca) { + points.splice(k, 0, end); + end = end.parent; + } + return points; +} + +function d3_layout_bundleAncestors(node) { + var ancestors = [], + parent = node.parent; + while (parent != null) { + ancestors.push(node); + node = parent; + parent = parent.parent; + } + ancestors.push(node); + return ancestors; +} + +function d3_layout_bundleLeastCommonAncestor(a, b) { + if (a === b) return a; + var aNodes = d3_layout_bundleAncestors(a), + bNodes = d3_layout_bundleAncestors(b), + aNode = aNodes.pop(), + bNode = bNodes.pop(), + sharedNode = null; + while (aNode === bNode) { + sharedNode = aNode; + aNode = aNodes.pop(); + bNode = bNodes.pop(); + } + return sharedNode; +} +d3.layout.chord = function() { + var chord = {}, + chords, + groups, + matrix, + n, + padding = 0, + sortGroups, + sortSubgroups, + sortChords; + + function relayout() { + var subgroups = {}, + groupSums = [], + groupIndex = d3.range(n), + subgroupIndex = [], + k, + x, + x0, + i, + j; + + chords = []; + groups = []; + + // Compute the sum. + k = 0, i = -1; while (++i < n) { + x = 0, j = -1; while (++j < n) { + x += matrix[i][j]; + } + groupSums.push(x); + subgroupIndex.push(d3.range(n)); + k += x; + } + + // Sort groups… + if (sortGroups) { + groupIndex.sort(function(a, b) { + return sortGroups(groupSums[a], groupSums[b]); + }); + } + + // Sort subgroups… + if (sortSubgroups) { + subgroupIndex.forEach(function(d, i) { + d.sort(function(a, b) { + return sortSubgroups(matrix[i][a], matrix[i][b]); + }); + }); + } + + // Convert the sum to scaling factor for [0, 2pi]. + // TODO Allow start and end angle to be specified. + // TODO Allow padding to be specified as percentage? + k = (2 * Math.PI - padding * n) / k; + + // Compute the start and end angle for each group and subgroup. + x = 0, i = -1; while (++i < n) { + x0 = x, j = -1; while (++j < n) { + var di = groupIndex[i], + dj = subgroupIndex[i][j], + v = matrix[di][dj]; + subgroups[di + "-" + dj] = { + index: di, + subindex: dj, + startAngle: x, + endAngle: x += v * k, + value: v + }; + } + groups.push({ + index: di, + startAngle: x0, + endAngle: x, + value: (x - x0) / k + }); + x += padding; + } + + // Generate chords for each (non-empty) subgroup-subgroup link. + i = -1; while (++i < n) { + j = i - 1; while (++j < n) { + var source = subgroups[i + "-" + j], + target = subgroups[j + "-" + i]; + if (source.value || target.value) { + chords.push(source.value < target.value + ? {source: target, target: source} + : {source: source, target: target}); + } + } + } + + if (sortChords) resort(); + } + + function resort() { + chords.sort(function(a, b) { + return sortChords(a.target.value, b.target.value); + }); + } + + chord.matrix = function(x) { + if (!arguments.length) return matrix; + n = (matrix = x) && matrix.length; + chords = groups = null; + return chord; + }; + + chord.padding = function(x) { + if (!arguments.length) return padding; + padding = x; + chords = groups = null; + return chord; + }; + + chord.sortGroups = function(x) { + if (!arguments.length) return sortGroups; + sortGroups = x; + chords = groups = null; + return chord; + }; + + chord.sortSubgroups = function(x) { + if (!arguments.length) return sortSubgroups; + sortSubgroups = x; + chords = null; + return chord; + }; + + chord.sortChords = function(x) { + if (!arguments.length) return sortChords; + sortChords = x; + if (chords) resort(); + return chord; + }; + + chord.chords = function() { + if (!chords) relayout(); + return chords; + }; + + chord.groups = function() { + if (!groups) relayout(); + return groups; + }; + + return chord; +}; +// A rudimentary force layout using Gauss-Seidel. +d3.layout.force = function() { + var force = {}, + event = d3.dispatch("tick"), + size = [1, 1], + drag, + alpha, + friction = .9, + linkDistance = d3_layout_forceLinkDistance, + linkStrength = d3_layout_forceLinkStrength, + charge = -30, + gravity = .1, + theta = .8, + interval, + nodes = [], + links = [], + distances, + strengths, + charges; + + function repulse(node) { + return function(quad, x1, y1, x2, y2) { + if (quad.point !== node) { + var dx = quad.cx - node.x, + dy = quad.cy - node.y, + dn = 1 / Math.sqrt(dx * dx + dy * dy); + + /* Barnes-Hut criterion. */ + if ((x2 - x1) * dn < theta) { + var k = quad.charge * dn * dn; + node.px -= dx * k; + node.py -= dy * k; + return true; + } + + if (quad.point && isFinite(dn)) { + var k = quad.pointCharge * dn * dn; + node.px -= dx * k; + node.py -= dy * k; + } + } + return !quad.charge; + }; + } + + function tick() { + var n = nodes.length, + m = links.length, + q, + i, // current index + o, // current object + s, // current source + t, // current target + l, // current distance + k, // current force + x, // x-distance + y; // y-distance + + // gauss-seidel relaxation for links + for (i = 0; i < m; ++i) { + o = links[i]; + s = o.source; + t = o.target; + x = t.x - s.x; + y = t.y - s.y; + if (l = (x * x + y * y)) { + l = alpha * strengths[i] * ((l = Math.sqrt(l)) - distances[i]) / l; + x *= l; + y *= l; + t.x -= x * (k = s.weight / (t.weight + s.weight)); + t.y -= y * k; + s.x += x * (k = 1 - k); + s.y += y * k; + } + } + + // apply gravity forces + if (k = alpha * gravity) { + x = size[0] / 2; + y = size[1] / 2; + i = -1; if (k) while (++i < n) { + o = nodes[i]; + o.x += (x - o.x) * k; + o.y += (y - o.y) * k; + } + } + + // compute quadtree center of mass and apply charge forces + if (charge) { + d3_layout_forceAccumulate(q = d3.geom.quadtree(nodes), alpha, charges); + i = -1; while (++i < n) { + if (!(o = nodes[i]).fixed) { + q.visit(repulse(o)); + } + } + } + + // position verlet integration + i = -1; while (++i < n) { + o = nodes[i]; + if (o.fixed) { + o.x = o.px; + o.y = o.py; + } else { + o.x -= (o.px - (o.px = o.x)) * friction; + o.y -= (o.py - (o.py = o.y)) * friction; + } + } + + event.tick.dispatch({type: "tick", alpha: alpha}); + + // simulated annealing, basically + return (alpha *= .99) < .005; + } + + force.on = function(type, listener) { + event[type].add(listener); + return force; + }; + + force.nodes = function(x) { + if (!arguments.length) return nodes; + nodes = x; + return force; + }; + + force.links = function(x) { + if (!arguments.length) return links; + links = x; + return force; + }; + + force.size = function(x) { + if (!arguments.length) return size; + size = x; + return force; + }; + + force.linkDistance = function(x) { + if (!arguments.length) return linkDistance; + linkDistance = d3.functor(x); + return force; + }; + + // For backwards-compatibility. + force.distance = force.linkDistance; + + force.linkStrength = function(x) { + if (!arguments.length) return linkStrength; + linkStrength = d3.functor(x); + return force; + }; + + force.friction = function(x) { + if (!arguments.length) return friction; + friction = x; + return force; + }; + + force.charge = function(x) { + if (!arguments.length) return charge; + charge = typeof x === "function" ? x : +x; + return force; + }; + + force.gravity = function(x) { + if (!arguments.length) return gravity; + gravity = x; + return force; + }; + + force.theta = function(x) { + if (!arguments.length) return theta; + theta = x; + return force; + }; + + force.start = function() { + var i, + j, + n = nodes.length, + m = links.length, + w = size[0], + h = size[1], + neighbors, + o; + + for (i = 0; i < n; ++i) { + (o = nodes[i]).index = i; + o.weight = 0; + } + + distances = []; + strengths = []; + for (i = 0; i < m; ++i) { + o = links[i]; + if (typeof o.source == "number") o.source = nodes[o.source]; + if (typeof o.target == "number") o.target = nodes[o.target]; + distances[i] = linkDistance.call(this, o, i); + strengths[i] = linkStrength.call(this, o, i); + ++o.source.weight; + ++o.target.weight; + } + + for (i = 0; i < n; ++i) { + o = nodes[i]; + if (isNaN(o.x)) o.x = position("x", w); + if (isNaN(o.y)) o.y = position("y", h); + if (isNaN(o.px)) o.px = o.x; + if (isNaN(o.py)) o.py = o.y; + } + + charges = []; + if (typeof charge === "function") { + for (i = 0; i < n; ++i) { + charges[i] = +charge.call(this, nodes[i], i); + } + } else { + for (i = 0; i < n; ++i) { + charges[i] = charge; + } + } + + // initialize node position based on first neighbor + function position(dimension, size) { + var neighbors = neighbor(i), + j = -1, + m = neighbors.length, + x; + while (++j < m) if (!isNaN(x = neighbors[j][dimension])) return x; + return Math.random() * size; + } + + // initialize neighbors lazily + function neighbor() { + if (!neighbors) { + neighbors = []; + for (j = 0; j < n; ++j) { + neighbors[j] = []; + } + for (j = 0; j < m; ++j) { + var o = links[j]; + neighbors[o.source.index].push(o.target); + neighbors[o.target.index].push(o.source); + } + } + return neighbors[i]; + } + + return force.resume(); + }; + + force.resume = function() { + alpha = .1; + d3.timer(tick); + return force; + }; + + force.stop = function() { + alpha = 0; + return force; + }; + + // use `node.call(force.drag)` to make nodes draggable + force.drag = function() { + if (!drag) drag = d3.behavior.drag() + .on("dragstart", dragstart) + .on("drag", d3_layout_forceDrag) + .on("dragend", d3_layout_forceDragEnd); + + this.on("mouseover.force", d3_layout_forceDragOver) + .on("mouseout.force", d3_layout_forceDragOut) + .call(drag); + }; + + function dragstart(d) { + d3_layout_forceDragOver(d3_layout_forceDragNode = d); + d3_layout_forceDragForce = force; + } + + return force; +}; + +var d3_layout_forceDragForce, + d3_layout_forceDragNode; + +function d3_layout_forceDragOver(d) { + d.fixed |= 2; +} + +function d3_layout_forceDragOut(d) { + if (d !== d3_layout_forceDragNode) d.fixed &= 1; +} + +function d3_layout_forceDragEnd() { + d3_layout_forceDrag(); + d3_layout_forceDragNode.fixed &= 1; + d3_layout_forceDragForce = d3_layout_forceDragNode = null; +} + +function d3_layout_forceDrag() { + d3_layout_forceDragNode.px += d3.event.dx; + d3_layout_forceDragNode.py += d3.event.dy; + d3_layout_forceDragForce.resume(); // restart annealing +} + +function d3_layout_forceAccumulate(quad, alpha, charges) { + var cx = 0, + cy = 0; + quad.charge = 0; + if (!quad.leaf) { + var nodes = quad.nodes, + n = nodes.length, + i = -1, + c; + while (++i < n) { + c = nodes[i]; + if (c == null) continue; + d3_layout_forceAccumulate(c, alpha, charges); + quad.charge += c.charge; + cx += c.charge * c.cx; + cy += c.charge * c.cy; + } + } + if (quad.point) { + // jitter internal nodes that are coincident + if (!quad.leaf) { + quad.point.x += Math.random() - .5; + quad.point.y += Math.random() - .5; + } + var k = alpha * charges[quad.point.index]; + quad.charge += quad.pointCharge = k; + cx += k * quad.point.x; + cy += k * quad.point.y; + } + quad.cx = cx / quad.charge; + quad.cy = cy / quad.charge; +} + +function d3_layout_forceLinkDistance(link) { + return 20; +} + +function d3_layout_forceLinkStrength(link) { + return 1; +} +d3.layout.partition = function() { + var hierarchy = d3.layout.hierarchy(), + size = [1, 1]; // width, height + + function position(node, x, dx, dy) { + var children = node.children; + node.x = x; + node.y = node.depth * dy; + node.dx = dx; + node.dy = dy; + if (children) { + var i = -1, + n = children.length, + c, + d; + dx = node.value ? dx / node.value : 0; + while (++i < n) { + position(c = children[i], x, d = c.value * dx, dy); + x += d; + } + } + } + + function depth(node) { + var children = node.children, + d = 0; + if (children) { + var i = -1, + n = children.length; + while (++i < n) d = Math.max(d, depth(children[i])); + } + return 1 + d; + } + + function partition(d, i) { + var nodes = hierarchy.call(this, d, i); + position(nodes[0], 0, size[0], size[1] / depth(nodes[0])); + return nodes; + } + + partition.size = function(x) { + if (!arguments.length) return size; + size = x; + return partition; + }; + + return d3_layout_hierarchyRebind(partition, hierarchy); +}; +d3.layout.pie = function() { + var value = Number, + sort = null, + startAngle = 0, + endAngle = 2 * Math.PI; + + function pie(data, i) { + + // Compute the start angle. + var a = +(typeof startAngle === "function" + ? startAngle.apply(this, arguments) + : startAngle); + + // Compute the angular range (end - start). + var k = (typeof endAngle === "function" + ? endAngle.apply(this, arguments) + : endAngle) - startAngle; + + // Optionally sort the data. + var index = d3.range(data.length); + if (sort != null) index.sort(function(i, j) { + return sort(data[i], data[j]); + }); + + // Compute the numeric values for each data element. + var values = data.map(value); + + // Convert k into a scale factor from value to angle, using the sum. + k /= values.reduce(function(p, d) { return p + d; }, 0); + + // Compute the arcs! + var arcs = index.map(function(i) { + return { + data: data[i], + value: d = values[i], + startAngle: a, + endAngle: a += d * k + }; + }); + + // Return the arcs in the original data's order. + return data.map(function(d, i) { + return arcs[index[i]]; + }); + } + + /** + * Specifies the value function *x*, which returns a nonnegative numeric value + * for each datum. The default value function is `Number`. The value function + * is passed two arguments: the current datum and the current index. + */ + pie.value = function(x) { + if (!arguments.length) return value; + value = x; + return pie; + }; + + /** + * Specifies a sort comparison operator *x*. The comparator is passed two data + * elements from the data array, a and b; it returns a negative value if a is + * less than b, a positive value if a is greater than b, and zero if a equals + * b. + */ + pie.sort = function(x) { + if (!arguments.length) return sort; + sort = x; + return pie; + }; + + /** + * Specifies the overall start angle of the pie chart. Defaults to 0. The + * start angle can be specified either as a constant or as a function; in the + * case of a function, it is evaluated once per array (as opposed to per + * element). + */ + pie.startAngle = function(x) { + if (!arguments.length) return startAngle; + startAngle = x; + return pie; + }; + + /** + * Specifies the overall end angle of the pie chart. Defaults to 2π. The + * end angle can be specified either as a constant or as a function; in the + * case of a function, it is evaluated once per array (as opposed to per + * element). + */ + pie.endAngle = function(x) { + if (!arguments.length) return endAngle; + endAngle = x; + return pie; + }; + + return pie; +}; +// data is two-dimensional array of x,y; we populate y0 +d3.layout.stack = function() { + var values = Object, + order = d3_layout_stackOrders["default"], + offset = d3_layout_stackOffsets["zero"], + out = d3_layout_stackOut, + x = d3_layout_stackX, + y = d3_layout_stackY; + + function stack(data, index) { + + // Convert series to canonical two-dimensional representation. + var series = data.map(function(d, i) { + return values.call(stack, d, i); + }); + + // Convert each series to canonical [[x,y]] representation. + var points = series.map(function(d, i) { + return d.map(function(v, i) { + return [x.call(stack, v, i), y.call(stack, v, i)]; + }); + }); + + // Compute the order of series, and permute them. + var orders = order.call(stack, points, index); + series = d3.permute(series, orders); + points = d3.permute(points, orders); + + // Compute the baseline… + var offsets = offset.call(stack, points, index); + + // And propagate it to other series. + var n = series.length, + m = series[0].length, + i, + j, + o; + for (j = 0; j < m; ++j) { + out.call(stack, series[0][j], o = offsets[j], points[0][j][1]); + for (i = 1; i < n; ++i) { + out.call(stack, series[i][j], o += points[i - 1][j][1], points[i][j][1]); + } + } + + return data; + } + + stack.values = function(x) { + if (!arguments.length) return values; + values = x; + return stack; + }; + + stack.order = function(x) { + if (!arguments.length) return order; + order = typeof x === "function" ? x : d3_layout_stackOrders[x]; + return stack; + }; + + stack.offset = function(x) { + if (!arguments.length) return offset; + offset = typeof x === "function" ? x : d3_layout_stackOffsets[x]; + return stack; + }; + + stack.x = function(z) { + if (!arguments.length) return x; + x = z; + return stack; + }; + + stack.y = function(z) { + if (!arguments.length) return y; + y = z; + return stack; + }; + + stack.out = function(z) { + if (!arguments.length) return out; + out = z; + return stack; + }; + + return stack; +} + +function d3_layout_stackX(d) { + return d.x; +} + +function d3_layout_stackY(d) { + return d.y; +} + +function d3_layout_stackOut(d, y0, y) { + d.y0 = y0; + d.y = y; +} + +var d3_layout_stackOrders = { + + "inside-out": function(data) { + var n = data.length, + i, + j, + max = data.map(d3_layout_stackMaxIndex), + sums = data.map(d3_layout_stackReduceSum), + index = d3.range(n).sort(function(a, b) { return max[a] - max[b]; }), + top = 0, + bottom = 0, + tops = [], + bottoms = []; + for (i = 0; i < n; ++i) { + j = index[i]; + if (top < bottom) { + top += sums[j]; + tops.push(j); + } else { + bottom += sums[j]; + bottoms.push(j); + } + } + return bottoms.reverse().concat(tops); + }, + + "reverse": function(data) { + return d3.range(data.length).reverse(); + }, + + "default": function(data) { + return d3.range(data.length); + } + +}; + +var d3_layout_stackOffsets = { + + "silhouette": function(data) { + var n = data.length, + m = data[0].length, + sums = [], + max = 0, + i, + j, + o, + y0 = []; + for (j = 0; j < m; ++j) { + for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; + if (o > max) max = o; + sums.push(o); + } + for (j = 0; j < m; ++j) { + y0[j] = (max - sums[j]) / 2; + } + return y0; + }, + + "wiggle": function(data) { + var n = data.length, + x = data[0], + m = x.length, + max = 0, + i, + j, + k, + s1, + s2, + s3, + dx, + o, + o0, + y0 = []; + y0[0] = o = o0 = 0; + for (j = 1; j < m; ++j) { + for (i = 0, s1 = 0; i < n; ++i) s1 += data[i][j][1]; + for (i = 0, s2 = 0, dx = x[j][0] - x[j - 1][0]; i < n; ++i) { + for (k = 0, s3 = (data[i][j][1] - data[i][j - 1][1]) / (2 * dx); k < i; ++k) { + s3 += (data[k][j][1] - data[k][j - 1][1]) / dx; + } + s2 += s3 * data[i][j][1]; + } + y0[j] = o -= s1 ? s2 / s1 * dx : 0; + if (o < o0) o0 = o; + } + for (j = 0; j < m; ++j) y0[j] -= o0; + return y0; + }, + + "expand": function(data) { + var n = data.length, + m = data[0].length, + k = 1 / n, + i, + j, + o, + y0 = []; + for (j = 0; j < m; ++j) { + for (i = 0, o = 0; i < n; i++) o += data[i][j][1]; + if (o) for (i = 0; i < n; i++) data[i][j][1] /= o; + else for (i = 0; i < n; i++) data[i][j][1] = k; + } + for (j = 0; j < m; ++j) y0[j] = 0; + return y0; + }, + + "zero": function(data) { + var j = -1, + m = data[0].length, + y0 = []; + while (++j < m) y0[j] = 0; + return y0; + } + +}; + +function d3_layout_stackMaxIndex(array) { + var i = 1, + j = 0, + v = array[0][1], + k, + n = array.length; + for (; i < n; ++i) { + if ((k = array[i][1]) > v) { + j = i; + v = k; + } + } + return j; +} + +function d3_layout_stackReduceSum(d) { + return d.reduce(d3_layout_stackSum, 0); +} + +function d3_layout_stackSum(p, d) { + return p + d[1]; +} +d3.layout.histogram = function() { + var frequency = true, + valuer = Number, + ranger = d3_layout_histogramRange, + binner = d3_layout_histogramBinSturges; + + function histogram(data, i) { + var bins = [], + values = data.map(valuer, this), + range = ranger.call(this, values, i), + thresholds = binner.call(this, range, values, i), + bin, + i = -1, + n = values.length, + m = thresholds.length - 1, + k = frequency ? 1 : 1 / n, + x; + + // Initialize the bins. + while (++i < m) { + bin = bins[i] = []; + bin.dx = thresholds[i + 1] - (bin.x = thresholds[i]); + bin.y = 0; + } + + // Fill the bins, ignoring values outside the range. + i = -1; while(++i < n) { + x = values[i]; + if ((x >= range[0]) && (x <= range[1])) { + bin = bins[d3.bisect(thresholds, x, 1, m) - 1]; + bin.y += k; + bin.push(data[i]); + } + } + + return bins; + } + + // Specifies how to extract a value from the associated data. The default + // value function is `Number`, which is equivalent to the identity function. + histogram.value = function(x) { + if (!arguments.length) return valuer; + valuer = x; + return histogram; + }; + + // Specifies the range of the histogram. Values outside the specified range + // will be ignored. The argument `x` may be specified either as a two-element + // array representing the minimum and maximum value of the range, or as a + // function that returns the range given the array of values and the current + // index `i`. The default range is the extent (minimum and maximum) of the + // values. + histogram.range = function(x) { + if (!arguments.length) return ranger; + ranger = d3.functor(x); + return histogram; + }; + + // Specifies how to bin values in the histogram. The argument `x` may be + // specified as a number, in which case the range of values will be split + // uniformly into the given number of bins. Or, `x` may be an array of + // threshold values, defining the bins; the specified array must contain the + // rightmost (upper) value, thus specifying n + 1 values for n bins. Or, `x` + // may be a function which is evaluated, being passed the range, the array of + // values, and the current index `i`, returning an array of thresholds. The + // default bin function will divide the values into uniform bins using + // Sturges' formula. + histogram.bins = function(x) { + if (!arguments.length) return binner; + binner = typeof x === "number" + ? function(range) { return d3_layout_histogramBinFixed(range, x); } + : d3.functor(x); + return histogram; + }; + + // Specifies whether the histogram's `y` value is a count (frequency) or a + // probability (density). The default value is true. + histogram.frequency = function(x) { + if (!arguments.length) return frequency; + frequency = !!x; + return histogram; + }; + + return histogram; +}; + +function d3_layout_histogramBinSturges(range, values) { + return d3_layout_histogramBinFixed(range, Math.ceil(Math.log(values.length) / Math.LN2 + 1)); +} + +function d3_layout_histogramBinFixed(range, n) { + var x = -1, + b = +range[0], + m = (range[1] - b) / n, + f = []; + while (++x <= n) f[x] = m * x + b; + return f; +} + +function d3_layout_histogramRange(values) { + return [d3.min(values), d3.max(values)]; +} +d3.layout.hierarchy = function() { + var sort = d3_layout_hierarchySort, + children = d3_layout_hierarchyChildren, + value = d3_layout_hierarchyValue; + + // Recursively compute the node depth and value. + // Also converts the data representation into a standard hierarchy structure. + function recurse(data, depth, nodes) { + var childs = children.call(hierarchy, data, depth), + node = d3_layout_hierarchyInline ? data : {data: data}; + node.depth = depth; + nodes.push(node); + if (childs && (n = childs.length)) { + var i = -1, + n, + c = node.children = [], + v = 0, + j = depth + 1; + while (++i < n) { + d = recurse(childs[i], j, nodes); + d.parent = node; + c.push(d); + v += d.value; + } + if (sort) c.sort(sort); + if (value) node.value = v; + } else if (value) { + node.value = +value.call(hierarchy, data, depth) || 0; + } + return node; + } + + // Recursively re-evaluates the node value. + function revalue(node, depth) { + var children = node.children, + v = 0; + if (children && (n = children.length)) { + var i = -1, + n, + j = depth + 1; + while (++i < n) v += revalue(children[i], j); + } else if (value) { + v = +value.call(hierarchy, d3_layout_hierarchyInline ? node : node.data, depth) || 0; + } + if (value) node.value = v; + return v; + } + + function hierarchy(d) { + var nodes = []; + recurse(d, 0, nodes); + return nodes; + } + + hierarchy.sort = function(x) { + if (!arguments.length) return sort; + sort = x; + return hierarchy; + }; + + hierarchy.children = function(x) { + if (!arguments.length) return children; + children = x; + return hierarchy; + }; + + hierarchy.value = function(x) { + if (!arguments.length) return value; + value = x; + return hierarchy; + }; + + // Re-evaluates the `value` property for the specified hierarchy. + hierarchy.revalue = function(root) { + revalue(root, 0); + return root; + }; + + return hierarchy; +}; + +// A method assignment helper for hierarchy subclasses. +function d3_layout_hierarchyRebind(object, hierarchy) { + object.sort = d3.rebind(object, hierarchy.sort); + object.children = d3.rebind(object, hierarchy.children); + object.links = d3_layout_hierarchyLinks; + object.value = d3.rebind(object, hierarchy.value); + + // If the new API is used, enabling inlining. + object.nodes = function(d) { + d3_layout_hierarchyInline = true; + return (object.nodes = object)(d); + }; + + return object; +} + +function d3_layout_hierarchyChildren(d) { + return d.children; +} + +function d3_layout_hierarchyValue(d) { + return d.value; +} + +function d3_layout_hierarchySort(a, b) { + return b.value - a.value; +} + +// Returns an array source+target objects for the specified nodes. +function d3_layout_hierarchyLinks(nodes) { + return d3.merge(nodes.map(function(parent) { + return (parent.children || []).map(function(child) { + return {source: parent, target: child}; + }); + })); +} + +// For backwards-compatibility, don't enable inlining by default. +var d3_layout_hierarchyInline = false; +d3.layout.pack = function() { + var hierarchy = d3.layout.hierarchy().sort(d3_layout_packSort), + size = [1, 1]; + + function pack(d, i) { + var nodes = hierarchy.call(this, d, i), + root = nodes[0]; + + // Recursively compute the layout. + root.x = 0; + root.y = 0; + d3_layout_packTree(root); + + // Scale the layout to fit the requested size. + var w = size[0], + h = size[1], + k = 1 / Math.max(2 * root.r / w, 2 * root.r / h); + d3_layout_packTransform(root, w / 2, h / 2, k); + + return nodes; + } + + pack.size = function(x) { + if (!arguments.length) return size; + size = x; + return pack; + }; + + return d3_layout_hierarchyRebind(pack, hierarchy); +}; + +function d3_layout_packSort(a, b) { + return a.value - b.value; +} + +function d3_layout_packInsert(a, b) { + var c = a._pack_next; + a._pack_next = b; + b._pack_prev = a; + b._pack_next = c; + c._pack_prev = b; +} + +function d3_layout_packSplice(a, b) { + a._pack_next = b; + b._pack_prev = a; +} + +function d3_layout_packIntersects(a, b) { + var dx = b.x - a.x, + dy = b.y - a.y, + dr = a.r + b.r; + return (dr * dr - dx * dx - dy * dy) > .001; // within epsilon +} + +function d3_layout_packCircle(nodes) { + var xMin = Infinity, + xMax = -Infinity, + yMin = Infinity, + yMax = -Infinity, + n = nodes.length, + a, b, c, j, k; + + function bound(node) { + xMin = Math.min(node.x - node.r, xMin); + xMax = Math.max(node.x + node.r, xMax); + yMin = Math.min(node.y - node.r, yMin); + yMax = Math.max(node.y + node.r, yMax); + } + + // Create node links. + nodes.forEach(d3_layout_packLink); + + // Create first node. + a = nodes[0]; + a.x = -a.r; + a.y = 0; + bound(a); + + // Create second node. + if (n > 1) { + b = nodes[1]; + b.x = b.r; + b.y = 0; + bound(b); + + // Create third node and build chain. + if (n > 2) { + c = nodes[2]; + d3_layout_packPlace(a, b, c); + bound(c); + d3_layout_packInsert(a, c); + a._pack_prev = c; + d3_layout_packInsert(c, b); + b = a._pack_next; + + // Now iterate through the rest. + for (var i = 3; i < n; i++) { + d3_layout_packPlace(a, b, c = nodes[i]); + + // Search for the closest intersection. + var isect = 0, s1 = 1, s2 = 1; + for (j = b._pack_next; j !== b; j = j._pack_next, s1++) { + if (d3_layout_packIntersects(j, c)) { + isect = 1; + break; + } + } + if (isect == 1) { + for (k = a._pack_prev; k !== j._pack_prev; k = k._pack_prev, s2++) { + if (d3_layout_packIntersects(k, c)) { + if (s2 < s1) { + isect = -1; + j = k; + } + break; + } + } + } + + // Update node chain. + if (isect == 0) { + d3_layout_packInsert(a, c); + b = c; + bound(c); + } else if (isect > 0) { + d3_layout_packSplice(a, j); + b = j; + i--; + } else { // isect < 0 + d3_layout_packSplice(j, b); + a = j; + i--; + } + } + } + } + + // Re-center the circles and return the encompassing radius. + var cx = (xMin + xMax) / 2, + cy = (yMin + yMax) / 2, + cr = 0; + for (var i = 0; i < n; i++) { + var node = nodes[i]; + node.x -= cx; + node.y -= cy; + cr = Math.max(cr, node.r + Math.sqrt(node.x * node.x + node.y * node.y)); + } + + // Remove node links. + nodes.forEach(d3_layout_packUnlink); + + return cr; +} + +function d3_layout_packLink(node) { + node._pack_next = node._pack_prev = node; +} + +function d3_layout_packUnlink(node) { + delete node._pack_next; + delete node._pack_prev; +} + +function d3_layout_packTree(node) { + var children = node.children; + if (children && children.length) { + children.forEach(d3_layout_packTree); + node.r = d3_layout_packCircle(children); + } else { + node.r = Math.sqrt(node.value); + } +} + +function d3_layout_packTransform(node, x, y, k) { + var children = node.children; + node.x = (x += k * node.x); + node.y = (y += k * node.y); + node.r *= k; + if (children) { + var i = -1, n = children.length; + while (++i < n) d3_layout_packTransform(children[i], x, y, k); + } +} + +function d3_layout_packPlace(a, b, c) { + var db = a.r + c.r, + dx = b.x - a.x, + dy = b.y - a.y; + if (db && (dx || dy)) { + var da = b.r + c.r, + dc = Math.sqrt(dx * dx + dy * dy), + cos = Math.max(-1, Math.min(1, (db * db + dc * dc - da * da) / (2 * db * dc))), + theta = Math.acos(cos), + x = cos * (db /= dc), + y = Math.sin(theta) * db; + c.x = a.x + x * dx + y * dy; + c.y = a.y + x * dy - y * dx; + } else { + c.x = a.x + db; + c.y = a.y; + } +} +// Implements a hierarchical layout using the cluster (or dendogram) algorithm. +d3.layout.cluster = function() { + var hierarchy = d3.layout.hierarchy().sort(null).value(null), + separation = d3_layout_treeSeparation, + size = [1, 1]; // width, height + + function cluster(d, i) { + var nodes = hierarchy.call(this, d, i), + root = nodes[0], + previousNode, + x = 0, + kx, + ky; + + // First walk, computing the initial x & y values. + d3_layout_treeVisitAfter(root, function(node) { + var children = node.children; + if (children && children.length) { + node.x = d3_layout_clusterX(children); + node.y = d3_layout_clusterY(children); + } else { + node.x = previousNode ? x += separation(node, previousNode) : 0; + node.y = 0; + previousNode = node; + } + }); + + // Compute the left-most, right-most, and depth-most nodes for extents. + var left = d3_layout_clusterLeft(root), + right = d3_layout_clusterRight(root), + x0 = left.x - separation(left, right) / 2, + x1 = right.x + separation(right, left) / 2; + + // Second walk, normalizing x & y to the desired size. + d3_layout_treeVisitAfter(root, function(node) { + node.x = (node.x - x0) / (x1 - x0) * size[0]; + node.y = (1 - node.y / root.y) * size[1]; + }); + + return nodes; + } + + cluster.separation = function(x) { + if (!arguments.length) return separation; + separation = x; + return cluster; + }; + + cluster.size = function(x) { + if (!arguments.length) return size; + size = x; + return cluster; + }; + + return d3_layout_hierarchyRebind(cluster, hierarchy); +}; + +function d3_layout_clusterY(children) { + return 1 + d3.max(children, function(child) { + return child.y; + }); +} + +function d3_layout_clusterX(children) { + return children.reduce(function(x, child) { + return x + child.x; + }, 0) / children.length; +} + +function d3_layout_clusterLeft(node) { + var children = node.children; + return children && children.length ? d3_layout_clusterLeft(children[0]) : node; +} + +function d3_layout_clusterRight(node) { + var children = node.children, n; + return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node; +} +// Node-link tree diagram using the Reingold-Tilford "tidy" algorithm +d3.layout.tree = function() { + var hierarchy = d3.layout.hierarchy().sort(null).value(null), + separation = d3_layout_treeSeparation, + size = [1, 1]; // width, height + + function tree(d, i) { + var nodes = hierarchy.call(this, d, i), + root = nodes[0]; + + function firstWalk(node, previousSibling) { + var children = node.children, + layout = node._tree; + if (children && (n = children.length)) { + var n, + firstChild = children[0], + previousChild, + ancestor = firstChild, + child, + i = -1; + while (++i < n) { + child = children[i]; + firstWalk(child, previousChild); + ancestor = apportion(child, previousChild, ancestor); + previousChild = child; + } + d3_layout_treeShift(node); + var midpoint = .5 * (firstChild._tree.prelim + child._tree.prelim); + if (previousSibling) { + layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling); + layout.mod = layout.prelim - midpoint; + } else { + layout.prelim = midpoint; + } + } else { + if (previousSibling) { + layout.prelim = previousSibling._tree.prelim + separation(node, previousSibling); + } + } + } + + function secondWalk(node, x) { + node.x = node._tree.prelim + x; + var children = node.children; + if (children) { + var i = -1, + n = children.length; + x += node._tree.mod; + while (++i < n) { + secondWalk(children[i], x); + } + } + } + + function apportion(node, previousSibling, ancestor) { + if (previousSibling) { + var vip = node, + vop = node, + vim = previousSibling, + vom = node.parent.children[0], + sip = vip._tree.mod, + sop = vop._tree.mod, + sim = vim._tree.mod, + som = vom._tree.mod, + shift; + while (vim = d3_layout_treeRight(vim), vip = d3_layout_treeLeft(vip), vim && vip) { + vom = d3_layout_treeLeft(vom); + vop = d3_layout_treeRight(vop); + vop._tree.ancestor = node; + shift = vim._tree.prelim + sim - vip._tree.prelim - sip + separation(vim, vip); + if (shift > 0) { + d3_layout_treeMove(d3_layout_treeAncestor(vim, node, ancestor), node, shift); + sip += shift; + sop += shift; + } + sim += vim._tree.mod; + sip += vip._tree.mod; + som += vom._tree.mod; + sop += vop._tree.mod; + } + if (vim && !d3_layout_treeRight(vop)) { + vop._tree.thread = vim; + vop._tree.mod += sim - sop; + } + if (vip && !d3_layout_treeLeft(vom)) { + vom._tree.thread = vip; + vom._tree.mod += sip - som; + ancestor = node; + } + } + return ancestor; + } + + // Initialize temporary layout variables. + d3_layout_treeVisitAfter(root, function(node, previousSibling) { + node._tree = { + ancestor: node, + prelim: 0, + mod: 0, + change: 0, + shift: 0, + number: previousSibling ? previousSibling._tree.number + 1 : 0 + }; + }); + + // Compute the layout using Buchheim et al.'s algorithm. + firstWalk(root); + secondWalk(root, -root._tree.prelim); + + // Compute the left-most, right-most, and depth-most nodes for extents. + var left = d3_layout_treeSearch(root, d3_layout_treeLeftmost), + right = d3_layout_treeSearch(root, d3_layout_treeRightmost), + deep = d3_layout_treeSearch(root, d3_layout_treeDeepest), + x0 = left.x - separation(left, right) / 2, + x1 = right.x + separation(right, left) / 2, + y1 = deep.depth || 1; + + // Clear temporary layout variables; transform x and y. + d3_layout_treeVisitAfter(root, function(node) { + node.x = (node.x - x0) / (x1 - x0) * size[0]; + node.y = node.depth / y1 * size[1]; + delete node._tree; + }); + + return nodes; + } + + tree.separation = function(x) { + if (!arguments.length) return separation; + separation = x; + return tree; + }; + + tree.size = function(x) { + if (!arguments.length) return size; + size = x; + return tree; + }; + + return d3_layout_hierarchyRebind(tree, hierarchy); +}; + +function d3_layout_treeSeparation(a, b) { + return a.parent == b.parent ? 1 : 2; +} + +// function d3_layout_treeSeparationRadial(a, b) { +// return (a.parent == b.parent ? 1 : 2) / a.depth; +// } + +function d3_layout_treeLeft(node) { + return node.children ? node.children[0] : node._tree.thread; +} + +function d3_layout_treeRight(node) { + return node.children ? node.children[node.children.length - 1] : node._tree.thread; +} + +function d3_layout_treeSearch(node, compare) { + var children = node.children; + if (children) { + var child, + n = children.length, + i = -1; + while (++i < n) { + if (compare(child = d3_layout_treeSearch(children[i], compare), node) > 0) { + node = child; + } + } + } + return node; +} + +function d3_layout_treeRightmost(a, b) { + return a.x - b.x; +} + +function d3_layout_treeLeftmost(a, b) { + return b.x - a.x; +} + +function d3_layout_treeDeepest(a, b) { + return a.depth - b.depth; +} + +function d3_layout_treeVisitAfter(node, callback) { + function visit(node, previousSibling) { + var children = node.children; + if (children) { + var child, + previousChild = null, + i = -1, + n = children.length; + while (++i < n) { + child = children[i]; + visit(child, previousChild); + previousChild = child; + } + } + callback(node, previousSibling); + } + visit(node, null); +} + +function d3_layout_treeShift(node) { + var shift = 0, + change = 0, + children = node.children, + i = children.length, + child; + while (--i >= 0) { + child = children[i]._tree; + child.prelim += shift; + child.mod += shift; + shift += child.shift + (change += child.change); + } +} + +function d3_layout_treeMove(ancestor, node, shift) { + ancestor = ancestor._tree; + node = node._tree; + var change = shift / (node.number - ancestor.number); + ancestor.change += change; + node.change -= change; + node.shift += shift; + node.prelim += shift; + node.mod += shift; +} + +function d3_layout_treeAncestor(vim, node, ancestor) { + return vim._tree.ancestor.parent == node.parent + ? vim._tree.ancestor + : ancestor; +} +// Squarified Treemaps by Mark Bruls, Kees Huizing, and Jarke J. van Wijk +// Modified to support a target aspect ratio by Jeff Heer +d3.layout.treemap = function() { + var hierarchy = d3.layout.hierarchy(), + round = Math.round, + size = [1, 1], // width, height + padding = null, + pad = d3_layout_treemapPadNull, + sticky = false, + stickies, + ratio = 0.5 * (1 + Math.sqrt(5)); // golden ratio + + // Compute the area for each child based on value & scale. + function scale(children, k) { + var i = -1, + n = children.length, + child, + area; + while (++i < n) { + area = (child = children[i]).value * (k < 0 ? 0 : k); + child.area = isNaN(area) || area <= 0 ? 0 : area; + } + } + + // Recursively arranges the specified node's children into squarified rows. + function squarify(node) { + if (!node.children) return; + var rect = pad(node), + row = [], + children = node.children.slice(), // copy-on-write + child, + best = Infinity, // the best row score so far + score, // the current row score + u = Math.min(rect.dx, rect.dy), // initial orientation + n; + scale(children, rect.dx * rect.dy / node.value); + row.area = 0; + while ((n = children.length) > 0) { + row.push(child = children[n - 1]); + row.area += child.area; + if ((score = worst(row, u)) <= best) { // continue with this orientation + children.pop(); + best = score; + } else { // abort, and try a different orientation + row.area -= row.pop().area; + position(row, u, rect, false); + u = Math.min(rect.dx, rect.dy); + row.length = row.area = 0; + best = Infinity; + } + } + if (row.length) { + position(row, u, rect, true); + row.length = row.area = 0; + } + node.children.forEach(squarify); + } + + // Recursively resizes the specified node's children into existing rows. + // Preserves the existing layout! + function stickify(node) { + if (!node.children) return; + var rect = pad(node), + children = node.children.slice(), // copy-on-write + child, + row = []; + scale(children, rect.dx * rect.dy / node.value); + row.area = 0; + while (child = children.pop()) { + row.push(child); + row.area += child.area; + if (child.z != null) { + position(row, child.z ? rect.dx : rect.dy, rect, !children.length); + row.length = row.area = 0; + } + } + node.children.forEach(stickify); + } + + // Computes the score for the specified row, as the worst aspect ratio. + function worst(row, u) { + var s = row.area, + r, + rmax = 0, + rmin = Infinity, + i = -1, + n = row.length; + while (++i < n) { + if (!(r = row[i].area)) continue; + if (r < rmin) rmin = r; + if (r > rmax) rmax = r; + } + s *= s; + u *= u; + return s + ? Math.max((u * rmax * ratio) / s, s / (u * rmin * ratio)) + : Infinity; + } + + // Positions the specified row of nodes. Modifies `rect`. + function position(row, u, rect, flush) { + var i = -1, + n = row.length, + x = rect.x, + y = rect.y, + v = u ? round(row.area / u) : 0, + o; + if (u == rect.dx) { // horizontal subdivision + if (flush || v > rect.dy) v = v ? rect.dy : 0; // over+underflow + while (++i < n) { + o = row[i]; + o.x = x; + o.y = y; + o.dy = v; + x += o.dx = v ? round(o.area / v) : 0; + } + o.z = true; + o.dx += rect.x + rect.dx - x; // rounding error + rect.y += v; + rect.dy -= v; + } else { // vertical subdivision + if (flush || v > rect.dx) v = v ? rect.dx : 0; // over+underflow + while (++i < n) { + o = row[i]; + o.x = x; + o.y = y; + o.dx = v; + y += o.dy = v ? round(o.area / v) : 0; + } + o.z = false; + o.dy += rect.y + rect.dy - y; // rounding error + rect.x += v; + rect.dx -= v; + } + } + + function treemap(d) { + var nodes = stickies || hierarchy(d), + root = nodes[0]; + root.x = 0; + root.y = 0; + root.dx = size[0]; + root.dy = size[1]; + if (stickies) hierarchy.revalue(root); + scale([root], root.dx * root.dy / root.value); + (stickies ? stickify : squarify)(root); + if (sticky) stickies = nodes; + return nodes; + } + + treemap.size = function(x) { + if (!arguments.length) return size; + size = x; + return treemap; + }; + + treemap.padding = function(x) { + if (!arguments.length) return padding; + + function padFunction(node) { + var p = x.call(treemap, node, node.depth); + return p == null + ? d3_layout_treemapPadNull(node) + : d3_layout_treemapPad(node, typeof p === "number" ? [p, p, p, p] : p); + } + + function padConstant(node) { + return d3_layout_treemapPad(node, x); + } + + var type; + pad = (padding = x) == null ? d3_layout_treemapPadNull + : (type = typeof x) === "function" ? padFunction + : type === "number" ? (x = [x, x, x, x], padConstant) + : padConstant; + return treemap; + }; + + treemap.round = function(x) { + if (!arguments.length) return round != Number; + round = x ? Math.round : Number; + return treemap; + }; + + treemap.sticky = function(x) { + if (!arguments.length) return sticky; + sticky = x; + stickies = null; + return treemap; + }; + + treemap.ratio = function(x) { + if (!arguments.length) return ratio; + ratio = x; + return treemap; + }; + + return d3_layout_hierarchyRebind(treemap, hierarchy); +}; + +function d3_layout_treemapPadNull(node) { + return {x: node.x, y: node.y, dx: node.dx, dy: node.dy}; +} + +function d3_layout_treemapPad(node, padding) { + var x = node.x + padding[3], + y = node.y + padding[0], + dx = node.dx - padding[1] - padding[3], + dy = node.dy - padding[0] - padding[2]; + if (dx < 0) { x += dx / 2; dx = 0; } + if (dy < 0) { y += dy / 2; dy = 0; } + return {x: x, y: y, dx: dx, dy: dy}; +} +})(); diff --git a/media/d3.layout.min.js b/media/d3.layout.min.js new file mode 100644 index 00000000..e49c8c71 --- /dev/null +++ b/media/d3.layout.min.js @@ -0,0 +1 @@ +(function(){function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0);return{x:c,y:d,dx:e,dy:f}}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h0&&(a=d)}return a}function U(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function T(a){return a.children?a.children[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;bd&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++he&&(e=h),d.push(h)}for(g=0;g=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++dd.dy)j=j?d.dy:0;while(++fd.dx)j=j?d.dx:0;while(++fe&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){if(!!a.children){var b=e(a),c=a.children.slice(),d,f=[];i(c,b.dx*b.dy/a.value),f.area=0;while(d=c.pop())f.push(d),f.area+=d.area,d.z!=null&&(m(f,d.z?b.dx:b.dy,b,!c.length),f.length=f.area=0);a.children.forEach(k)}}function j(a){if(!!a.children){var b=e(a),c=[],d=a.children.slice(),f,g=Infinity,h,k=Math.min(b.dx,b.dy),n;i(d,b.dx*b.dy/a.value),c.area=0;while((n=d.length)>0)c.push(f=d[n-1]),c.area+=f.area,(h=l(c,k))<=g?(d.pop(),g=h):(c.area-=c.pop().area,m(c,k,b,!1),k=Math.min(b.dx,b.dy),c.length=c.area=0,g=Infinity);c.length&&(m(c,k,b,!0),c.length=c.area=0),a.children.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c1){var d=bu(a.domain()),e,f=-1,g=b.length,h=(b[1]-b[0])/++c,i,j;while(++f0;)(j=+b[f]-i*h)>=d[0]&&e.push(j);for(--f,i=0;++i9&&(f=c*3/Math.sqrt(f),g[h]=f*d,g[h+1]=f*e));h=-1;while(++h<=i)f=(a[Math.min(i,h+1)][0]-a[Math.max(0,h-1)][0])/(6*(1+g[h]*g[h])),b.push([f||0,g[h]*f||0]);return b}function ct(a){var b=0,c=a.length-1,d=[],e=a[0],f=a[1],g=d[0]=cs(e,f);while(++b1){h=b[1],f=a[i],i++,d+="C"+(e[0]+g[0])+","+(e[1]+g[1])+","+(f[0]-h[0])+","+(f[1]-h[1])+","+f[0]+","+f[1];for(var j=2;j0;j--)e.push(c(f)*j)}else{for(;fi;g--);e=e.slice(f,g)}return e},d.tickFormat=function(a,e){arguments.length<2&&(e=bG);if(arguments.length<1)return e;var f=a/d.ticks().length,g=b===bI?(h=-1e-15,Math.floor):(h=1e-15,Math.ceil),h;return function(a){return a/c(g(b(a)+h))=c.delay&&(c.flush=c.callback(a)),c=c.next;var d=br()-b;d>24?(isFinite(d)&&(clearTimeout(bp),bp=setTimeout(bq,d)),bo=0):(bo=1,bs(bq))}function bm(a){for(var b=0,c=this.length;b0)k[--g].call(l,f);if(c>=1){r(),bk=b,d.end.dispatch.call(l,h,i),bk=0;return 1}}function p(a){if(o.active>b)return r();o.active=b;for(var e in c)(e=c[e].call(l,h,i))&&k.push(e);d.start.dispatch.call(l,h,i),q(a)||d3.timer(q,0,f);return 1}var k=[],l=this,m=a[j][i].delay,n=a[j][i].duration,o=l.__transition__||(l.__transition__={active:0,count:0});++o.count,m<=g?p(g):d3.timer(p,m,f)});return 1},0,f);return a}function be(a){arguments.length||(a=d3.ascending);return function(b,c){return a(b&&b.__data__,c&&c.__data__)}}function bc(a){h(a,bd);return a}function bb(a){return{__data__:a}}function ba(a){return function(){return Z(a,this)}}function _(a){return function(){return Y(a,this)}}function X(a){h(a,$);return a}function W(a,b,c){function g(a){return Math.round(f(a)*255)}function f(a){a>360?a-=360:a<0&&(a+=360);return a<60?d+(e-d)*a/60:a<180?e:a<240?d+(e-d)*(240-a)/60:d}var d,e;a=a%360,a<0&&(a+=360),b=b<0?0:b>1?1:b,c=c<0?0:c>1?1:c,e=c<=.5?c*(1+b):c+b-c*b,d=2*c-e;return M(g(a+120),g(a),g(a-120))}function V(a,b,c){this.h=a,this.s=b,this.l=c}function U(a,b,c){return new V(a,b,c)}function R(a){var b=parseFloat(a);return a.charAt(a.length-1)==="%"?Math.round(b*2.55):b}function Q(a,b,c){var d=Math.min(a/=255,b/=255,c/=255),e=Math.max(a,b,c),f=e-d,g,h,i=(e+d)/2;f?(h=i<.5?f/(e+d):f/(2-e-d),a==e?g=(b-c)/f+(b=1?1:a(b)}}function r(a){var b=a.lastIndexOf("."),c=b>=0?a.substring(b):(b=a.length,""),d=[];while(b>0)d.push(a.substring(b-=3,b+3));return d.reverse().join(",")+c}function q(a){return a+""}function n(a){var b={},c=[];b.add=function(a){for(var d=0;db?1:a>=b?0:NaN},d3.descending=function(a,b){return ba?1:b>=a?0:NaN},d3.min=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++cf&&(e=f)}else{while(++cf&&(e=f)}return e},d3.max=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++ce&&(e=f)}else{while(++ce&&(e=f)}return e},d3.sum=function(a,b){var c=0,d=a.length,e,f=-1;if(arguments.length===1)while(++f>1;a[e]>1;b0&&(e=f);return e},d3.last=function(a,b){var c=0,d=a.length,e=a[0],f;arguments.length===1&&(b=d3.ascending);while(++c=b.length)return a;var e=[],f=c[d++],h;for(h in a)e.push({key:h,values:g(a[h],d)});f&&e.sort(function(a,b){return f(a.key,b.key)});return e}function f(c,g){if(g>=b.length)return e?e.call(a,c):d?c.sort(d):c;var h=-1,i=c.length,j=b[g++],k,l,m={};while(++hb)d.push(f);else while((f=a+c*++e)=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/,p={g:function(a,b){return a.toPrecision(b)},e:function(a,b){return a.toExponential(b)},f:function(a,b){return a.toFixed(b)},r:function(a,b){var c=a?1+Math.floor(1e-15+Math.log(a)/Math.LN10):1;return d3.round(a,b-c).toFixed(Math.max(0,Math.min(20,b-c)))}},s=A(2),t=A(3),u={linear:function(){return z},poly:A,quad:function(){return s},cubic:function(){return t},sin:function(){return B},exp:function(){return C},circle:function(){return D},elastic:E,back:F,bounce:function(){return G}},v={"in":function(a){return a},out:x,"in-out":y,"out-in":function(a){return y(x(a))}};d3.ease=function(a){var b=a.indexOf("-"),c=b>=0?a.substring(0,b):a,d=b>=0?a.substring(b+1):"in";return w(v[d](u[c].apply(null,Array.prototype.slice.call(arguments,1))))},d3.event=null,d3.interpolate=function(a,b){var c=d3.interpolators.length,d;while(--c>=0&&!(d=d3.interpolators[c](a,b)));return d},d3.interpolateNumber=function(a,b){b-=a;return function(c){return a+b*c}},d3.interpolateRound=function(a,b){b-=a;return function(c){return Math.round(a+b*c)}},d3.interpolateString=function(a,b){var c,d,e,f=0,g=0,h=[],i=[],j,k;H.lastIndex=0;for(d=0;c=H.exec(b);++d)c.index&&h.push(b.substring(f,g=c.index)),i.push({i:h.length,x:c[0]}),h.push(null),f=H.lastIndex;f0&&(a=a.substring(0,e));return arguments.length<2?(e=this.node()[d])&&e._:this.each(function(e,f){function h(a){var c=d3.event;d3.event=a;try{b.call(g,g.__data__,f)}finally{d3.event=c}}var g=this;g[d]&&g.removeEventListener(a,g[d],c),b&&g.addEventListener(a,g[d]=h,c),h._=b})},$.each=function(a){for(var b=-1,c=this.length;++b=bT?e?"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"M0,"+e+"A"+e+","+e+" 0 1,0 0,"+ -e+"A"+e+","+e+" 0 1,0 0,"+e+"Z":"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"Z":e?"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L"+e*m+","+e*n+"A"+e+","+e+" 0 "+j+",0 "+e*k+","+e*l+"Z":"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L0,0"+"Z"}var a=bU,b=bV,c=bW,d=bX;e.innerRadius=function(b){if(!arguments.length)return a;a=d3.functor(b);return e},e.outerRadius=function(a){if(!arguments.length)return b;b=d3.functor(a);return e},e.startAngle=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.endAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return e},e.centroid=function(){var e=(a.apply(this,arguments)+b.apply(this,arguments))/2,f=(c.apply(this,arguments)+d.apply(this,arguments))/2+bS;return[Math.cos(f)*e,Math.sin(f)*e]};return e};var bS=-Math.PI/2,bT=2*Math.PI-1e-6;d3.svg.line=function(){return bY(Object)};var ca={linear:cb,"step-before":cc,"step-after":cd,basis:cj,"basis-open":ck,"basis-closed":cl,bundle:cm,cardinal:cg,"cardinal-open":ce,"cardinal-closed":cf,monotone:cv},co=[0,2/3,1/3,0],cp=[0,1/3,2/3,0],cq=[0,1/6,2/3,1/6];d3.svg.line.radial=function(){var a=bY(cw);a.radius=a.x,delete a.x,a.angle=a.y,delete a.y;return a},d3.svg.area=function(){return cx(Object)},d3.svg.area.radial=function(){var a=cx(cw);a.radius=a.x,delete a.x,a.innerRadius=a.x0,delete a.x0,a.outerRadius=a.x1,delete a.x1,a.angle=a.y,delete a.y,a.startAngle=a.y0,delete a.y0,a.endAngle=a.y1,delete a.y1;return a},d3.svg.chord=function(){function j(a,b,c,d){return"Q 0,0 "+d}function i(a,b){return"A"+a+","+a+" 0 0,1 "+b}function h(a,b){return a.a0==b.a0&&a.a1==b.a1}function g(a,b,f,g){var h=b.call(a,f,g),i=c.call(a,h,g),j=d.call(a,h,g)+bS,k=e.call(a,h,g)+bS;return{r:i,a0:j,a1:k,p0:[i*Math.cos(j),i*Math.sin(j)],p1:[i*Math.cos(k),i*Math.sin(k)]}}function f(c,d){var e=g(this,a,c,d),f=g(this,b,c,d);return"M"+e.p0+i(e.r,e.p1)+(h(e,f)?j(e.r,e.p1,e.r,e.p0):j(e.r,e.p1,f.r,f.p0)+i(f.r,f.p1)+j(f.r,f.p1,e.r,e.p0))+"Z"}var a=cA,b=cB,c=cC,d=bW,e=bX;f.radius=function(a){if(!arguments.length)return c;c=d3.functor(a);return f},f.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return f},f.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return f},f.startAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return f},f.endAngle=function(a){if(!arguments.length)return e;e=d3.functor(a);return f};return f},d3.svg.diagonal=function(){function d(d,e){var f=a.call(this,d,e),g=b.call(this,d,e),h=(f.y+g.y)/2,i=[f,{x:f.x,y:h},{x:g.x,y:h},g];i=i.map(c);return"M"+i[0]+"C"+i[1]+" "+i[2]+" "+i[3]}var a=cA,b=cB,c=cF;d.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return d},d.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return d},d.projection=function(a){if(!arguments.length)return c;c=a;return d};return d},d3.svg.diagonal.radial=function(){var a=d3.svg.diagonal(),b=cF,c=a.projection;a.projection=function(a){return arguments.length?c(cG(b=a)):b};return a},d3.svg.mouse=function(a){return cI(a,d3.event)};var cH=/WebKit/.test(navigator.userAgent)?-1:0;d3.svg.touches=function(a){var b=d3.event.touches;return b?d(b).map(function(b){var c=cI(a,b);c.identifier=b.identifier;return c}):[]},d3.svg.symbol=function(){function c(c,d){return(cL[a.call(this,c,d)]||cL.circle)(b.call(this,c,d))}var a=cK,b=cJ;c.type=function(b){if(!arguments.length)return a;a=d3.functor(b);return c},c.size=function(a){if(!arguments.length)return b;b=d3.functor(a);return c};return c};var cL={circle:function(a){var b=Math.sqrt(a/Math.PI);return"M0,"+b+"A"+b+","+b+" 0 1,1 0,"+ -b+"A"+b+","+b+" 0 1,1 0,"+b+"Z"},cross:function(a){var b=Math.sqrt(a/5)/2;return"M"+ -3*b+","+ -b+"H"+ -b+"V"+ -3*b+"H"+b+"V"+ -b+"H"+3*b+"V"+b+"H"+b+"V"+3*b+"H"+ -b+"V"+b+"H"+ -3*b+"Z"},diamond:function(a){var b=Math.sqrt(a/(2*cN)),c=b*cN;return"M0,"+ -b+"L"+c+",0"+" 0,"+b+" "+ -c+",0"+"Z"},square:function(a){var b=Math.sqrt(a)/2;return"M"+ -b+","+ -b+"L"+b+","+ -b+" "+b+","+b+" "+ -b+","+b+"Z"},"triangle-down":function(a){var b=Math.sqrt(a/cM),c=b*cM/2;return"M0,"+c+"L"+b+","+ -c+" "+ -b+","+ -c+"Z"},"triangle-up":function(a){var b=Math.sqrt(a/cM),c=b*cM/2;return"M0,"+ -c+"L"+b+","+c+" "+ -b+","+c+"Z"}};d3.svg.symbolTypes=d3.keys(cL);var cM=Math.sqrt(3),cN=Math.tan(30*Math.PI/180);d3.svg.axis=function(){function j(j){j.each(function(k,l,m){function F(a){return j.delay?a.transition().delay(j[m][l].delay).duration(j[m][l].duration).ease(j.ease()):a}var n=d3.select(this),o=a.ticks.apply(a,g),p=h==null?a.tickFormat.apply(a,g):h,q=cQ(a,o,i),r=n.selectAll(".minor").data(q,String),s=r.enter().insert("svg:line","g").attr("class","tick minor").style("opacity",1e-6),t=F(r.exit()).style("opacity",1e-6).remove(),u=F(r).style("opacity",1),v=n.selectAll("g").data(o,String),w=v.enter().insert("svg:g","path").style("opacity",1e-6),x=F(v.exit()).style("opacity",1e-6).remove(),y=F(v).style("opacity",1),z,A=bu(a.range()),B=n.selectAll(".domain").data([0]),C=B.enter().append("svg:path").attr("class","domain"),D=F(B),E=this.__chart__||a;this.__chart__=a.copy(),w.append("svg:line").attr("class","tick"),w.append("svg:text"),y.select("text").text(p);switch(b){case"bottom":z=cO,u.attr("y2",d),w.select("text").attr("dy",".71em").attr("text-anchor","middle"),y.select("line").attr("y2",c),y.select("text").attr("y",Math.max(c,0)+f),D.attr("d","M"+A[0]+","+e+"V0H"+A[1]+"V"+e);break;case"top":z=cO,u.attr("y2",-d),w.select("text").attr("text-anchor","middle"),y.select("line").attr("y2",-c),y.select("text").attr("y",-(Math.max(c,0)+f)),D.attr("d","M"+A[0]+","+ -e+"V0H"+A[1]+"V"+ -e);break;case"left":z=cP,u.attr("x2",-d),w.select("text").attr("dy",".32em").attr("text-anchor","end"),y.select("line").attr("x2",-c),y.select("text").attr("x",-(Math.max(c,0)+f)),D.attr("d","M"+ -e+","+A[0]+"H0V"+A[1]+"H"+ -e);break;case"right":z=cP,u.attr("x2",d),w.select("text").attr("dy",".32em"),y.select("line").attr("x2",c),y.select("text").attr("x",Math.max(c,0)+f),D.attr("d","M"+e+","+A[0]+"H0V"+A[1]+"H"+e)}w.call(z,E),y.call(z,a),x.call(z,a),s.call(z,E),u.call(z,a),t.call(z,a)})}var a=d3.scale.linear(),b="bottom",c=6,d=6,e=6,f=3,g=[10],h,i=0;j.scale=function(b){if(!arguments.length)return a;a=b;return j},j.orient=function(a){if(!arguments.length)return b;b=a;return j},j.ticks=function(){if(!arguments.length)return g;g=arguments;return j},j.tickFormat=function(a){if(!arguments.length)return h;h=a;return j},j.tickSize=function(a,b,f){if(!arguments.length)return c;var g=arguments.length-1;c=+a,d=g>1?+b:c,e=g>0?+arguments[g]:c;return j},j.tickPadding=function(a){if(!arguments.length)return f;f=+a;return j},j.tickSubdivide=function(a){if(!arguments.length)return i;i=+a;return j};return j},d3.behavior={},d3.behavior.drag=function(){function d(){c.apply(this,arguments),cY("dragstart")}function c(){cR=a,cS=d3.event.target,cV=cZ((cT=this).parentNode),cW=0,cU=arguments}function b(){this.on("mousedown.drag",d).on("touchstart.drag",d),d3.select(window).on("mousemove.drag",c$).on("touchmove.drag",c$).on("mouseup.drag",c_,!0).on("touchend.drag",c_,!0).on("click.drag",da,!0)}var a=d3.dispatch("drag","dragstart","dragend");b.on=function(c,d){a[c].add(d);return b};return b};var cR,cS,cT,cU,cV,cW,cX;d3.behavior.zoom=function(){function h(){d.apply(this,arguments);var b=dr(),c,e=Date.now();b.length===1&&e-dg<300&&dw(1+Math.floor(a[2]),c=b[0],df[c.identifier]),dg=e}function g(){d.apply(this,arguments);var b=d3.svg.mouse(dk);dw(d3.event.shiftKey?Math.ceil(a[2]-1):Math.floor(a[2]+1),b,dp(b))}function f(){d.apply(this,arguments),de||(de=dp(d3.svg.mouse(dk))),dw(dq()+a[2],d3.svg.mouse(dk),de)}function e(){d.apply(this,arguments),dd=dp(d3.svg.mouse(dk)),dm=!1,d3.event.preventDefault(),window.focus()}function d(){dh=a,di=b.zoom.dispatch,dj=d3.event.target,dk=this,dl=arguments}function c(){this.on("mousedown.zoom",e).on("mousewheel.zoom",f).on("DOMMouseScroll.zoom",f).on("dblclick.zoom",g).on("touchstart.zoom",h),d3.select(window).on("mousemove.zoom",dt).on("mouseup.zoom",du).on("touchmove.zoom",ds).on("touchend.zoom",dr).on("click.zoom",dv,!0)}var a=[0,0,0],b=d3.dispatch("zoom");c.on=function(a,d){b[a].add(d);return c};return c};var dc,dd,de,df={},dg=0,dh,di,dj,dk,dl,dm,dn})() \ No newline at end of file -- cgit v1.2.3-54-g00ecf From d5063bd1d2cae79df7ce6e826c7413fed61ff9db Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 5 Oct 2011 15:45:44 -0500 Subject: Add package visualizations page Why the hell not? Have fun clicking all the pretty buttons. Signed-off-by: Dan McGee --- media/archweb.css | 27 ++++++++++ media/visualize.js | 112 +++++++++++++++++++++++++++++++++++++++++ settings.py | 1 + templates/public/index.html | 8 +-- templates/visualize/index.html | 43 ++++++++++++++++ urls.py | 1 + visualize/__init__.py | 0 visualize/models.py | 0 visualize/tests.py | 0 visualize/urls.py | 9 ++++ visualize/views.py | 59 ++++++++++++++++++++++ 11 files changed, 256 insertions(+), 4 deletions(-) create mode 100644 media/visualize.js create mode 100644 templates/visualize/index.html create mode 100644 visualize/__init__.py create mode 100644 visualize/models.py create mode 100644 visualize/tests.py create mode 100644 visualize/urls.py create mode 100644 visualize/views.py diff --git a/media/archweb.css b/media/archweb.css index 0cadd7a7..eb0f0ca1 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -948,3 +948,30 @@ ul.admin-actions { #archnavbar.anb-download ul li#anb-download a { color: white !important; } + +/* visualizations page */ +.visualize-buttons { + margin: 0.5em 0.33em; +} + + .visualize-buttons button.active { + depressed: true; + } + +.visualize-chart { + position: relative; + height: 500px; + margin: 0.33em; +} + +#visualize-archrepo .treemap-cell { + border: solid 1px white; + overflow: hidden; + position: absolute; +} + + #visualize-archrepo .treemap-cell span { + padding: 3px; + font-size: 0.85em; + line-height: 1em; + } diff --git a/media/visualize.js b/media/visualize.js new file mode 100644 index 00000000..c1ea598b --- /dev/null +++ b/media/visualize.js @@ -0,0 +1,112 @@ +function packages_treemap(chart_id, orderings, default_order) { + var jq_div = $(chart_id), + color = d3.scale.category20(); + key_func = function(d) { return d.key; }, + value_package_count = function(d) { return d.count; }; + + var treemap = d3.layout.treemap() + .size([jq_div.width(), jq_div.height()]) + /*.sticky(true)*/ + .value(value_package_count) + .sort(function(a, b) { return a.key < b.key; }) + .children(function(d) { return d.data; }); + + var cell_html = function(d) { + if (d.children) { + return ""; + } + return "" + d.name + ": " + treemap.value()(d) + ""; + }; + + var d3_div = d3.select(jq_div.get(0)); + + var prop_px = function(prop, offset) { + return function(d) { + var dist = d[prop] + offset; + if (dist > 0) return dist + "px"; + else return "0px"; + }; + }; + + var cell = function() { + /* the -1 offset comes from the border width we use in the CSS */ + this.style("left", prop_px("x", 0)).style("top", prop_px("y", 0)) + .style("width", prop_px("dx", -1)).style("height", prop_px("dy", -1)); + }; + + var fetch_for_ordering = function(order) { + d3.json(order.url, function(json) { + var nodes = d3_div.data([json]).selectAll("div").data(treemap.nodes, key_func); + /* start out new nodes in the center of the picture area */ + var w_center = jq_div.width() / 2; + var h_center = jq_div.height() / 2; + nodes.enter().append("div") + .attr("class", "treemap-cell") + .attr("title", function(d) { return d.name; }) + .style("left", w_center + "px").style("top", h_center + "px") + .style("width", "0px").style("height", "0px") + .style("display", function(d) { return d.children ? "none" : null; }) + .html(cell_html); + nodes.transition().duration(1500) + .style("background-color", function(d) { return d.children ? null : color(d[order.color_attr]); }) + .call(cell); + nodes.exit().transition().duration(1500).remove(); + }); + }; + + /* start the callback for the default order */ + fetch_for_ordering(orderings[default_order]); + + var make_scale_button = function(name, valuefunc) { + var button_id = chart_id + "-" + name; + /* upon button click, attach new value function and redraw all boxes + * accordingly */ + d3.select(button_id).on("click", function() { + d3_div.selectAll("div") + .data(treemap.value(valuefunc), key_func) + .html(cell_html) + .transition().duration(1500).call(cell); + + /* drop off the '#' sign to convert id to a class prefix */ + d3.selectAll("." + chart_id.substring(1) + "-scaleby") + .classed("active", false); + d3.select(button_id).classed("active", true); + }); + }; + + /* each scale button tweaks our value, e.g. net size function */ + make_scale_button("count", value_package_count); + make_scale_button("flagged", function(d) { return d.flagged; }); + make_scale_button("csize", function(d) { return d.csize; }); + make_scale_button("isize", function(d) { return d.isize; }); + + var make_group_button = function(name, order) { + var button_id = chart_id + "-" + name; + d3.select(button_id).on("click", function() { + fetch_for_ordering(order); + + /* drop off the '#' sign to convert id to a class prefix */ + d3.selectAll("." + chart_id.substring(1) + "-groupby") + .classed("active", false); + d3.select(button_id).classed("active", true); + }); + }; + + $.each(orderings, function(k, v) { + make_group_button(k, v); + }); + + var resize_timeout = null; + var real_resize = function() { + resize_timeout = null; + d3_div.selectAll("div") + .data(treemap.size([jq_div.width(), jq_div.height()]), key_func) + .call(cell); + }; + $(window).resize(function() { + if (resize_timeout) { + clearTimeout(resize_timeout); + } + resize_timeout = setTimeout(real_resize, 200); + }); +} diff --git a/settings.py b/settings.py index 18437098..51f9fcf6 100644 --- a/settings.py +++ b/settings.py @@ -109,6 +109,7 @@ 'public', 'south', # database migration support 'releng', + 'visualize', ) PGP_SERVER = 'pgp.mit.edu:11371' diff --git a/templates/public/index.html b/templates/public/index.html index bea19e0f..b63876ac 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -26,10 +26,10 @@

        A simple, lightweight distribution

        Our strong community is diverse and helpful, and we pride ourselves on the range of skillsets and uses for Arch that stem from it. Please - check out our forums + check out our forums and mailing lists - to get your feet wet. Also glance through our wiki if you want to learn more about Arch.

        @@ -174,8 +174,8 @@

        Development

        title="View/search the package repository database">Packages
      • Package Groups
      • -
      • Bug Tracker
      • +
      • Visualizations
      • SVN Repositories
      • + +

        Visualizations of Packaging Data

        + +

        Package Treemap

        + +
        +
        + Scale Using: + + + + +
        +
        + Group By: + + +
        +
        +
        +

    + +{% load cdn %}{% jquery %} + + + + + +{% endblock %} diff --git a/urls.py b/urls.py index c9faf165..cdae51bf 100644 --- a/urls.py +++ b/urls.py @@ -76,6 +76,7 @@ (r'^packages/', include('packages.urls')), (r'^releng/', include('releng.urls')), (r'^todo/', include('todolists.urls')), + (r'^visualize/', include('visualize.urls')), (r'^opensearch/packages/$', 'packages.views.opensearch', {}, 'opensearch-packages'), (r'^todolists/$','todolists.views.public_list'), diff --git a/visualize/__init__.py b/visualize/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/visualize/models.py b/visualize/models.py new file mode 100644 index 00000000..e69de29b diff --git a/visualize/tests.py b/visualize/tests.py new file mode 100644 index 00000000..e69de29b diff --git a/visualize/urls.py b/visualize/urls.py new file mode 100644 index 00000000..57ee0626 --- /dev/null +++ b/visualize/urls.py @@ -0,0 +1,9 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('visualize.views', + (r'^$', 'index', {}, 'visualize-index'), + (r'^by_arch/$', 'by_arch', {}, 'visualize-byarch'), + (r'^by_repo/$', 'by_repo', {}, 'visualize-byrepo'), +) + +# vim: set ts=4 sw=4 et: diff --git a/visualize/views.py b/visualize/views.py new file mode 100644 index 00000000..68f5d4a5 --- /dev/null +++ b/visualize/views.py @@ -0,0 +1,59 @@ +from django.db.models import Count, Sum +from django.http import HttpResponse +from django.utils import simplejson +from django.views.decorators.cache import cache_page +from django.views.generic.simple import direct_to_template + +from main.models import Package, Arch, Repo + +def index(request): + return direct_to_template(request, 'visualize/index.html', {}) + +def arch_repo_data(): + qs = Package.objects.select_related().values( + 'arch__name', 'repo__name').annotate( + count=Count('pk'), csize=Sum('compressed_size'), + isize=Sum('installed_size'), + flagged=Count('flag_date')).order_by() + arches = Arch.objects.values_list('name', flat=True) + repos = Repo.objects.values_list('name', flat=True) + + # now transform these results into two mappings: one ordered (repo, arch), + # and one ordered (arch, repo). + arch_groups = dict((a, { 'name': a, 'key': ':%s' % a, 'arch': a, 'repo': None, 'data': [] }) for a in arches) + repo_groups = dict((r, { 'name': r, 'key': '%s:' % r, 'arch': None, 'repo': r, 'data': [] }) for r in repos) + for row in qs: + arch = row['arch__name'] + repo = row['repo__name'] + values = { + 'arch': arch, + 'repo': repo, + 'name': '%s (%s)' % (repo, arch), + 'key': '%s:%s' % (repo, arch), + 'csize': row['csize'], + 'isize': row['isize'], + 'count': row['count'], + 'flagged': row['flagged'], + } + arch_groups[arch]['data'].append(values) + repo_groups[repo]['data'].append(values) + + data = { + 'by_arch': { 'name': 'Architectures', 'data': arch_groups.values() }, + 'by_repo': { 'name': 'Repositories', 'data': repo_groups.values() }, + } + return data + +@cache_page(1800) +def by_arch(request): + data = arch_repo_data() + to_json = simplejson.dumps(data['by_arch'], ensure_ascii=False) + return HttpResponse(to_json, mimetype='application/json') + +@cache_page(1800) +def by_repo(request): + data = arch_repo_data() + to_json = simplejson.dumps(data['by_repo'], ensure_ascii=False) + return HttpResponse(to_json, mimetype='application/json') + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From f867f9469f3baead53dbf8dc7547a318541efece Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 5 Oct 2011 15:51:06 -0500 Subject: JSLint cleanups Signed-off-by: Dan McGee --- media/visualize.js | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/media/visualize.js b/media/visualize.js index c1ea598b..c47033ab 100644 --- a/media/visualize.js +++ b/media/visualize.js @@ -1,8 +1,8 @@ function packages_treemap(chart_id, orderings, default_order) { var jq_div = $(chart_id), color = d3.scale.category20(); - key_func = function(d) { return d.key; }, - value_package_count = function(d) { return d.count; }; + var key_func = function(d) { return d.key; }; + var value_package_count = function(d) { return d.count; }; var treemap = d3.layout.treemap() .size([jq_div.width(), jq_div.height()]) @@ -23,8 +23,12 @@ function packages_treemap(chart_id, orderings, default_order) { var prop_px = function(prop, offset) { return function(d) { var dist = d[prop] + offset; - if (dist > 0) return dist + "px"; - else return "0px"; + if (dist > 0) { + return dist + "px"; + } + else { + return "0px"; + } }; }; @@ -36,10 +40,11 @@ function packages_treemap(chart_id, orderings, default_order) { var fetch_for_ordering = function(order) { d3.json(order.url, function(json) { - var nodes = d3_div.data([json]).selectAll("div").data(treemap.nodes, key_func); + var nodes = d3_div.data([json]).selectAll("div") + .data(treemap.nodes, key_func); /* start out new nodes in the center of the picture area */ var w_center = jq_div.width() / 2; - var h_center = jq_div.height() / 2; + h_center = jq_div.height() / 2; nodes.enter().append("div") .attr("class", "treemap-cell") .attr("title", function(d) { return d.name; }) -- cgit v1.2.3-54-g00ecf From f6527810b0d28db8a07a26e84148d1229b18d29e Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Thu, 6 Oct 2011 10:20:30 -0500 Subject: Make border style and color consistent on different pages Use dotted and #bbb where appropriate as opposed to the varying styles we had before. Addresses FS#25834. Signed-off-by: Dan McGee --- media/archweb.css | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/media/archweb.css b/media/archweb.css index eb0f0ca1..ea2f3fb5 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -273,7 +273,7 @@ dl { margin-bottom: 4px; padding: 8px 0px 4px; font-weight: bold; - border-top: 1px solid #888; + border-top: 1px dotted #bbb; } dl dt { @@ -568,7 +568,7 @@ div#donor-list ul { /* download page */ #arch-downloads h3 { - border-bottom: 1px dotted #aaa; + border-bottom: 1px dotted #bbb; } table#download-torrents .cpu-arch { @@ -840,7 +840,7 @@ form#dash-pkg-notify { padding: 1em 0 0; margin-top: 1em; font-size: 0.85em; - border-top: 1px dotted #aaa; + border-top: 1px dotted #bbb; } form#dash-pkg-notify label { -- cgit v1.2.3-54-g00ecf From a01a11cfad84bd44c1f5aeffcc0665bd93889e1d Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 7 Oct 2011 11:31:55 -0500 Subject: Cache static /jsi18n/ resource for one week Rather than the default middleware page cache length of only minutes. This will save clients a lot of requests when browsing packages. Signed-off-by: Dan McGee --- urls.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/urls.py b/urls.py index cdae51bf..edd51b52 100644 --- a/urls.py +++ b/urls.py @@ -5,6 +5,8 @@ from django.contrib import admin from django.views.generic import TemplateView +from django.views.decorators.cache import cache_page +from django.views.i18n import null_javascript_catalog from feeds import PackageFeed, NewsFeed import sitemaps @@ -65,7 +67,8 @@ # Includes and other remaining stuff urlpatterns += patterns('', - (r'^jsi18n/$', 'django.views.i18n.null_javascript_catalog'), + # cache this static JS resource for 1 week rather than default 5 minutes + (r'^jsi18n/$', cache_page(604800)(null_javascript_catalog)), (r'^admin/', include(admin.site.urls)), (r'^devel/', include('devel.urls')), (r'^feeds/', include(feeds_patterns)), -- cgit v1.2.3-54-g00ecf From 71e57570c262fffb11ca6e0dc97342119198f740 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 11 Oct 2011 19:29:15 -0500 Subject: Pylint suggested and other cleanups Signed-off-by: Dan McGee --- main/middleware.py | 2 +- mirrors/admin.py | 5 +++-- news/views.py | 2 +- packages/models.py | 2 +- packages/views.py | 4 ++-- public/utils.py | 2 +- public/views.py | 11 +++++------ urls.py | 15 ++++++++------- visualize/views.py | 14 ++++++++++++-- 9 files changed, 34 insertions(+), 23 deletions(-) diff --git a/main/middleware.py b/main/middleware.py index f893c795..f417b545 100644 --- a/main/middleware.py +++ b/main/middleware.py @@ -4,7 +4,7 @@ from django.conf import settings from django.core.cache import cache -from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age +from django.utils.cache import learn_cache_key, patch_response_headers, get_max_age class UpdateCacheMiddleware(object): """ diff --git a/mirrors/admin.py b/mirrors/admin.py index 0632872d..3786d8d2 100644 --- a/mirrors/admin.py +++ b/mirrors/admin.py @@ -33,14 +33,15 @@ class MirrorUrlInlineAdmin(admin.TabularInline): extra = 3 # ripped off from django.forms.fields, adding netmask ability -ipv4nm_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}(/(\d|[1-2]\d|3[0-2])){0,1}$') +IPV4NM_RE = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}(/(\d|[1-2]\d|3[0-2])){0,1}$') + class IPAddressNetmaskField(forms.fields.RegexField): default_error_messages = { 'invalid': u'Enter a valid IPv4 address, possibly including netmask.', } def __init__(self, *args, **kwargs): - super(IPAddressNetmaskField, self).__init__(ipv4nm_re, *args, **kwargs) + super(IPAddressNetmaskField, self).__init__(IPV4NM_RE, *args, **kwargs) class MirrorRsyncForm(forms.ModelForm): class Meta: diff --git a/news/views.py b/news/views.py index 990ee154..7ac009ba 100644 --- a/news/views.py +++ b/news/views.py @@ -32,7 +32,7 @@ def news_list(request): class NewsForm(forms.ModelForm): class Meta: model = News - exclude=('id', 'slug', 'author', 'postdate') + exclude = ('id', 'slug', 'author', 'postdate') def find_unique_slug(newsitem): '''Attempt to find a unique slug for this news item.''' diff --git a/packages/models.py b/packages/models.py index d2fe1878..4cd3b1b5 100644 --- a/packages/models.py +++ b/packages/models.py @@ -62,7 +62,7 @@ def packages(self): # TODO: delayed import to avoid circular reference from main.models import Package return Package.objects.normal().filter(pkgbase=self.pkgbase, - pkgver=self.pkgver, pkgrel=self.pkgrel, epoch=pkg.epoch, + pkgver=self.pkgver, pkgrel=self.pkgrel, epoch=self.epoch, arch=self.arch, repo=self.repo) @property diff --git a/packages/views.py b/packages/views.py index 61e4d290..a8216c7a 100644 --- a/packages/views.py +++ b/packages/views.py @@ -19,7 +19,7 @@ from datetime import datetime from operator import attrgetter -import string +from string import Template from urllib import urlencode from main.models import Package, PackageFile, Arch, Repo @@ -575,7 +575,7 @@ def download(request, name, repo, arch): 'repo': pkg.repo.name.lower(), 'file': pkg.filename, } - url = string.Template('${host}${repo}/os/${arch}/${file}').substitute(values) + url = Template('${host}${repo}/os/${arch}/${file}').substitute(values) return redirect(url) def arch_differences(request): diff --git a/public/utils.py b/public/utils.py index 5900c674..30c76ac1 100644 --- a/public/utils.py +++ b/public/utils.py @@ -1,6 +1,6 @@ from operator import attrgetter -from main.models import Arch, Package, Repo +from main.models import Arch, Package from main.utils import cache_function, groupby_preserve_order, PackageStandin class RecentUpdate(object): diff --git a/public/views.py b/public/views.py index 821e4d5c..14dd6353 100644 --- a/public/views.py +++ b/public/views.py @@ -5,7 +5,6 @@ from django.conf import settings from django.contrib.auth.models import User -from django.db.models import Q from django.http import Http404 from django.views.generic import list_detail from django.views.generic.simple import direct_to_template @@ -34,18 +33,18 @@ def index(request): }, } -def userlist(request, type='devs'): +def userlist(request, user_type='devs'): users = User.objects.order_by('username').select_related('userprofile') - if type == 'devs': + if user_type == 'devs': users = users.filter(is_active=True, groups__name="Developers") - elif type == 'tus': + elif user_type == 'tus': users = users.filter(is_active=True, groups__name="Trusted Users") - elif type == 'fellows': + elif user_type == 'fellows': users = users.filter(is_active=False, groups__name__in=["Developers", "Trusted Users"]) else: raise Http404 - context = USER_LISTS[type].copy() + context = USER_LISTS[user_type].copy() context['users'] = users return direct_to_template(request, 'public/userlist.html', context) diff --git a/urls.py b/urls.py index edd51b52..575910ea 100644 --- a/urls.py +++ b/urls.py @@ -1,6 +1,7 @@ import os.path -from django.conf.urls.defaults import * +# Stupid Django. Don't remove these "unused" handler imports +from django.conf.urls.defaults import handler500, handler404, include, patterns from django.conf import settings from django.contrib import admin @@ -11,7 +12,7 @@ from feeds import PackageFeed, NewsFeed import sitemaps -sitemaps = { +our_sitemaps = { 'base': sitemaps.BaseSitemap, 'news': sitemaps.NewsSitemap, 'packages': sitemaps.PackagesSitemap, @@ -36,9 +37,9 @@ # Sitemaps urlpatterns += patterns('django.contrib.sitemaps.views', (r'^sitemap.xml$', 'index', - {'sitemaps': sitemaps}), + {'sitemaps': our_sitemaps}), (r'^sitemap-(?P
    .+)\.xml$', 'sitemap', - {'sitemaps': sitemaps}), + {'sitemaps': our_sitemaps}), ) # Authentication / Admin @@ -58,9 +59,9 @@ {}, 'page-art'), (r'^svn/$', TemplateView.as_view(template_name='public/svn.html'), {}, 'page-svn'), - (r'^developers/$', 'userlist', { 'type':'devs' }, 'page-devs'), - (r'^trustedusers/$', 'userlist', { 'type':'tus' }, 'page-tus'), - (r'^fellows/$', 'userlist', { 'type':'fellows' }, 'page-fellows'), + (r'^developers/$', 'userlist', { 'user_type':'devs' }, 'page-devs'), + (r'^trustedusers/$', 'userlist', { 'user_type':'tus' }, 'page-tus'), + (r'^fellows/$', 'userlist', { 'user_type':'fellows' }, 'page-fellows'), (r'^donate/$', 'donate', {}, 'page-donate'), (r'^download/$', 'download', {}, 'page-download'), ) diff --git a/visualize/views.py b/visualize/views.py index 68f5d4a5..f2b1d63b 100644 --- a/visualize/views.py +++ b/visualize/views.py @@ -18,10 +18,20 @@ def arch_repo_data(): arches = Arch.objects.values_list('name', flat=True) repos = Repo.objects.values_list('name', flat=True) + def build_map(name, arch, repo): + key = '%s:%s' % (repo or '', arch or '') + return { + 'key': key, + 'name': name, + 'arch': arch, + 'repo': repo, + 'data': [], + } + # now transform these results into two mappings: one ordered (repo, arch), # and one ordered (arch, repo). - arch_groups = dict((a, { 'name': a, 'key': ':%s' % a, 'arch': a, 'repo': None, 'data': [] }) for a in arches) - repo_groups = dict((r, { 'name': r, 'key': '%s:' % r, 'arch': None, 'repo': r, 'data': [] }) for r in repos) + arch_groups = dict((a, build_map(a, a, None)) for a in arches) + repo_groups = dict((r, build_map(r, None, r)) for r in repos) for row in qs: arch = row['arch__name'] repo = row['repo__name'] -- cgit v1.2.3-54-g00ecf From 0d693fa1fb788a61359415f56dc487f4aa504a55 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 12 Oct 2011 08:47:26 -0500 Subject: Add hidden name and desc fields to package search Not linked from anywhere just yet, but they are available if you know they exist and can be used in the standard query string. Signed-off-by: Dan McGee --- packages/views.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/packages/views.py b/packages/views.py index a8216c7a..a740e689 100644 --- a/packages/views.py +++ b/packages/views.py @@ -191,6 +191,8 @@ def valid_value(self, value): class PackageSearchForm(forms.Form): repo = forms.MultipleChoiceField(required=False) arch = forms.MultipleChoiceField(required=False) + name = forms.CharField(required=False) + desc = forms.CharField(required=False) q = forms.CharField(required=False) maintainer = forms.ChoiceField(required=False) packager = forms.ChoiceField(required=False) @@ -262,15 +264,24 @@ def search(request, page=None): elif form.cleaned_data['signed'] == 'Unsigned': packages = packages.filter(pgp_signature__isnull=True) - if form.cleaned_data['q']: - query = form.cleaned_data['q'] - q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) - packages = packages.filter(q) if form.cleaned_data['last_update']: lu = form.cleaned_data['last_update'] packages = packages.filter(last_update__gte= datetime(lu.year, lu.month, lu.day, 0, 0)) + if form.cleaned_data['name']: + name = form.cleaned_data['name'] + packages = packages.filter(pkgname__icontains=name) + + if form.cleaned_data['desc']: + desc = form.cleaned_data['desc'] + packages = packages.filter(pkgdesc__icontains=desc) + + if form.cleaned_data['q']: + query = form.cleaned_data['q'] + q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) + packages = packages.filter(q) + asked_limit = form.cleaned_data['limit'] if asked_limit and asked_limit < 0: limit = None -- cgit v1.2.3-54-g00ecf From 21a8fec980eb0613d3cce8aae7d6bfe6680c038a Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 12 Oct 2011 08:54:51 -0500 Subject: Package search and sort code cleanup Move initializations closer to where they are actually needed, and remove the sorting on multiple columns when a sort field is passed in. We don't do this for the default sort, so let's not do it here either. Signed-off-by: Dan McGee --- packages/views.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/packages/views.py b/packages/views.py index a740e689..dab06919 100644 --- a/packages/views.py +++ b/packages/views.py @@ -293,24 +293,22 @@ def search(request, page=None): else: form = PackageSearchForm() - current_query = request.GET.urlencode() - page_dict = { - 'search_form': form, - 'current_query': current_query - } allowed_sort = ["arch", "repo", "pkgname", "pkgbase", "compressed_size", "installed_size", "build_date", "last_update", "flag_date"] allowed_sort += ["-" + s for s in allowed_sort] sort = request.GET.get('sort', None) - # TODO: sorting by multiple fields makes using a DB index much harder if sort in allowed_sort: - packages = packages.order_by( - request.GET['sort'], 'repo', 'arch', 'pkgname') + packages = packages.order_by(sort) page_dict['sort'] = sort else: packages = packages.order_by('pkgname') + current_query = request.GET.urlencode() + page_dict = { + 'search_form': form, + 'current_query': current_query + } return list_detail.object_list(request, packages, template_name="packages/search.html", page=page, -- cgit v1.2.3-54-g00ecf From 16cac1ad88cb2fb653cf7abcb91e6f51c7510e27 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 12 Oct 2011 09:03:28 -0500 Subject: Revert movement of search initialization code I'm stupid and didn't realize it was referenced before the location I moved it to. Signed-off-by: Dan McGee --- packages/views.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/views.py b/packages/views.py index dab06919..5114c87f 100644 --- a/packages/views.py +++ b/packages/views.py @@ -293,6 +293,11 @@ def search(request, page=None): else: form = PackageSearchForm() + current_query = request.GET.urlencode() + page_dict = { + 'search_form': form, + 'current_query': current_query + } allowed_sort = ["arch", "repo", "pkgname", "pkgbase", "compressed_size", "installed_size", "build_date", "last_update", "flag_date"] @@ -304,11 +309,6 @@ def search(request, page=None): else: packages = packages.order_by('pkgname') - current_query = request.GET.urlencode() - page_dict = { - 'search_form': form, - 'current_query': current_query - } return list_detail.object_list(request, packages, template_name="packages/search.html", page=page, -- cgit v1.2.3-54-g00ecf From 1cde4d45da91abae2525785c4d93e9ec648ec416 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Wed, 12 Oct 2011 09:08:28 -0500 Subject: Fix invalid markup on dev dashboard Signed-off-by: Dan McGee --- templates/devel/index.html | 1 - 1 file changed, 1 deletion(-) diff --git a/templates/devel/index.html b/templates/devel/index.html index b2da70cf..d3f7ec3b 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -78,7 +78,6 @@

    Package Todo Lists

    Package Count Incomplete Count - {% for todo in todos %} -- cgit v1.2.3-54-g00ecf From d9abb069ec5976414982a2c344d7e55413224a22 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Fri, 14 Oct 2011 10:03:06 -0500 Subject: Update d3 JS resources to latest version This fixes an issue I discovered in attribute transition values in IE. Signed-off-by: Dan McGee --- media/d3.js | 270 ++++++++++++++++++++++++++++++++++--------------- media/d3.layout.js | 113 +++++++++++---------- media/d3.layout.min.js | 2 +- media/d3.min.js | 4 +- 4 files changed, 251 insertions(+), 138 deletions(-) diff --git a/media/d3.js b/media/d3.js index 9b77bec1..23edb6b1 100644 --- a/media/d3.js +++ b/media/d3.js @@ -10,7 +10,7 @@ try { d3_style_setProperty.call(this, name, value + "", priority); }; } -d3 = {version: "2.3.0"}; // semver +d3 = {version: "2.4.3"}; // semver var d3_array = d3_arraySlice; // conversion for NodeLists function d3_arrayCopy(pseudoarray) { @@ -59,6 +59,24 @@ d3.ascending = function(a, b) { d3.descending = function(a, b) { return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; }; +d3.mean = function(array, f) { + var n = array.length, + a, + m = 0, + i = -1, + j = 0; + if (arguments.length === 1) { + while (++i < n) if (d3_number(a = array[i])) m += (a - m) / ++j; + } else { + while (++i < n) if (d3_number(a = f.call(array, array[i], i))) m += (a - m) / ++j; + } + return j ? m : undefined; +}; +d3.median = function(array, f) { + if (arguments.length > 1) array = array.map(f); + array = array.filter(d3_number); + return array.length ? d3.quantile(array.sort(d3.ascending), .5) : undefined; +}; d3.min = function(array, f) { var i = -1, n = array.length, @@ -87,6 +105,9 @@ d3.max = function(array, f) { } return a; }; +function d3_number(x) { + return x != null && !isNaN(x); +} d3.sum = function(array, f) { var s = 0, n = array.length, @@ -472,10 +493,11 @@ d3.format = function(specifier) { comma = match[7], precision = match[8], type = match[9], - percentage = false, + scale = 1, + suffix = "", integer = false; - if (precision) precision = precision.substring(1); + if (precision) precision = +precision.substring(1); if (zfill) { fill = "0"; // TODO align = "="; @@ -484,22 +506,36 @@ d3.format = function(specifier) { switch (type) { case "n": comma = true; type = "g"; break; - case "%": percentage = true; type = "f"; break; - case "p": percentage = true; type = "r"; break; - case "d": integer = true; precision = "0"; break; + case "%": scale = 100; suffix = "%"; type = "f"; break; + case "p": scale = 100; suffix = "%"; type = "r"; break; + case "d": integer = true; precision = 0; break; + case "s": scale = -1; type = "r"; break; } + // If no precision is specified for r, fallback to general notation. + if (type == "r" && !precision) type = "g"; + type = d3_format_types[type] || d3_format_typeDefault; return function(value) { - var number = percentage ? value * 100 : +value, - negative = (number < 0) && (number = -number) ? "\u2212" : sign; // Return the empty string for floats formatted as ints. - if (integer && (number % 1)) return ""; + if (integer && (value % 1)) return ""; - // Convert the input value to the desired precision. - value = type(number, precision); + // Convert negative to positive, and record the sign prefix. + var negative = (value < 0) && (value = -value) ? "\u2212" : sign; + + // Apply the scale, computing it from the value's exponent for si format. + if (scale < 0) { + var prefix = d3.formatPrefix(value, precision); + value *= prefix.scale; + suffix = prefix.symbol; + } else { + value *= scale; + } + + // Convert to the desired precision. + value = type(value, precision); // If the fill character is 0, the sign and group is applied after the fill. if (zfill) { @@ -516,9 +552,8 @@ d3.format = function(specifier) { var length = value.length; if (length < width) value = new Array(width - length + 1).join(fill) + value; } - if (percentage) value += "%"; - return value; + return value + suffix; }; }; @@ -529,12 +564,13 @@ var d3_format_types = { g: function(x, p) { return x.toPrecision(p); }, e: function(x, p) { return x.toExponential(p); }, f: function(x, p) { return x.toFixed(p); }, - r: function(x, p) { - var n = x ? 1 + Math.floor(1e-15 + Math.log(x) / Math.LN10) : 1; - return d3.round(x, p - n).toFixed(Math.max(0, Math.min(20, p - n))); - } + r: function(x, p) { return d3.round(x, p = d3_format_precision(x, p)).toFixed(Math.max(0, Math.min(20, p))); } }; +function d3_format_precision(x, p) { + return p - (x ? 1 + Math.floor(Math.log(x + Math.pow(10, 1 + Math.floor(Math.log(x) / Math.LN10) - p)) / Math.LN10) : 1); +} + function d3_format_typeDefault(x) { return x + ""; } @@ -547,6 +583,26 @@ function d3_format_group(value) { while (i > 0) t.push(value.substring(i -= 3, i + 3)); return t.reverse().join(",") + f; } +var d3_formatPrefixes = ["y","z","a","f","p","n","μ","m","","k","M","G","T","P","E","Z","Y"].map(d3_formatPrefix); + +d3.formatPrefix = function(value, precision) { + var i = 0; + if (value) { + if (value < 0) value *= -1; + if (precision) value = d3.round(value, d3_format_precision(value, precision)); + i = 1 + Math.floor(1e-12 + Math.log(value) / Math.LN10); + i = Math.max(-24, Math.min(24, Math.floor((i <= 0 ? i + 1 : i - 1) / 3) * 3)); + } + return d3_formatPrefixes[8 + i / 3]; +}; + +function d3_formatPrefix(d, i) { + return { + scale: Math.pow(10, (8 - i) * 3), + symbol: d + }; +} + /* * TERMS OF USE - EASING EQUATIONS * @@ -780,10 +836,10 @@ d3.interpolateRgb = function(a, b) { bg = b.g - ag, bb = b.b - ab; return function(t) { - return "rgb(" + Math.round(ar + br * t) - + "," + Math.round(ag + bg * t) - + "," + Math.round(ab + bb * t) - + ")"; + return "#" + + d3_rgb_hex(Math.round(ar + br * t)) + + d3_rgb_hex(Math.round(ag + bg * t)) + + d3_rgb_hex(Math.round(ab + bb * t)); }; }; @@ -867,7 +923,8 @@ function d3_uninterpolateClamp(a, b) { } d3.rgb = function(r, g, b) { return arguments.length === 1 - ? d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb) + ? (r instanceof d3_Rgb ? d3_rgb(r.r, r.g, r.b) + : d3_rgb_parse("" + r, d3_rgb, d3_hsl_rgb)) : d3_rgb(~~r, ~~g, ~~b); }; @@ -892,17 +949,17 @@ d3_Rgb.prototype.brighter = function(k) { if (g && g < i) g = i; if (b && b < i) b = i; return d3_rgb( - Math.min(255, Math.floor(r / k)), - Math.min(255, Math.floor(g / k)), - Math.min(255, Math.floor(b / k))); + Math.min(255, Math.floor(r / k)), + Math.min(255, Math.floor(g / k)), + Math.min(255, Math.floor(b / k))); }; d3_Rgb.prototype.darker = function(k) { k = Math.pow(0.7, arguments.length ? k : 1); return d3_rgb( - Math.max(0, Math.floor(k * this.r)), - Math.max(0, Math.floor(k * this.g)), - Math.max(0, Math.floor(k * this.b))); + Math.floor(k * this.r), + Math.floor(k * this.g), + Math.floor(k * this.b)); }; d3_Rgb.prototype.hsl = function() { @@ -914,7 +971,9 @@ d3_Rgb.prototype.toString = function() { }; function d3_rgb_hex(v) { - return v < 0x10 ? "0" + v.toString(16) : v.toString(16); + return v < 0x10 + ? "0" + Math.max(0, v).toString(16) + : Math.min(255, v).toString(16); } function d3_rgb_parse(format, rgb, hsl) { @@ -1151,7 +1210,8 @@ for (var d3_rgb_name in d3_rgb_names) { } d3.hsl = function(h, s, l) { return arguments.length === 1 - ? d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl) + ? (h instanceof d3_Hsl ? d3_hsl(h.h, h.s, h.l) + : d3_rgb_parse("" + h, d3_rgb_hsl, d3_hsl)) : d3_hsl(+h, +s, +l); }; @@ -1180,7 +1240,7 @@ d3_Hsl.prototype.rgb = function() { }; d3_Hsl.prototype.toString = function() { - return "hsl(" + this.h + "," + this.s * 100 + "%," + this.l * 100 + "%)"; + return this.rgb().toString(); }; function d3_hsl_rgb(h, s, l) { @@ -1331,6 +1391,21 @@ d3_selectionPrototype.attr = function(name, value) { : (name.local ? attrConstantNS : attrConstant))); }; d3_selectionPrototype.classed = function(name, value) { + var names = name.split(d3_selection_classedWhitespace), + n = names.length, + i = -1; + if (arguments.length > 1) { + while (++i < n) d3_selection_classed.call(this, names[i], value); + return this; + } else { + while (++i < n) if (!d3_selection_classed.call(this, names[i])) return false; + return true; + } +}; + +var d3_selection_classedWhitespace = /\s+/g; + +function d3_selection_classed(name, value) { var re = new RegExp("(^|\\s+)" + d3.requote(name) + "(\\s+|$)", "g"); // If no value is specified, return the first value. @@ -1375,7 +1450,7 @@ d3_selectionPrototype.classed = function(name, value) { ? classedFunction : value ? classedAdd : classedRemove); -}; +} d3_selectionPrototype.style = function(name, value, priority) { if (arguments.length < 3) priority = ""; @@ -1856,10 +1931,28 @@ function d3_transition(groups, id) { return groups; } +var d3_transitionRemove = {}; + +function d3_transitionNull(d, i, a) { + return a != "" && d3_transitionRemove; +} + function d3_transitionTween(b) { - return typeof b === "function" - ? function(d, i, a) { var v = b.call(this, d, i) + ""; return a != v && d3.interpolate(a, v); } - : (b = b + "", function(d, i, a) { return a != b && d3.interpolate(a, b); }); + + function transitionFunction(d, i, a) { + var v = b.call(this, d, i); + return v == null + ? a != "" && d3_transitionRemove + : a != v && d3.interpolate(a, v); + } + + function transitionString(d, i, a) { + return a != b && d3.interpolate(a, b); + } + + return typeof b === "function" ? transitionFunction + : b == null ? d3_transitionNull + : (b += "", transitionString); } var d3_transitionPrototype = [], @@ -1899,6 +1992,7 @@ d3_transitionPrototype.select = function(selector) { d3_transitionPrototype.selectAll = function(selector) { var subgroups = [], subgroup, + subnodes, node; if (typeof selector !== "function") selector = d3_selection_selectorAll(selector); @@ -1906,9 +2000,10 @@ d3_transitionPrototype.selectAll = function(selector) { for (var j = -1, m = this.length; ++j < m;) { for (var group = this[j], i = -1, n = group.length; ++i < n;) { if (node = group[i]) { - subgroups.push(subgroup = selector.call(node.node, node.node.__data__, i)); - for (var k = -1, o = subgroup.length; ++k < o;) { - subgroup[k] = {node: subgroup[k], delay: node.delay, duration: node.duration}; + subnodes = selector.call(node.node, node.node.__data__, i); + subgroups.push(subgroup = []); + for (var k = -1, o = subnodes.length; ++k < o;) { + subgroup.push({node: subnodes[k], delay: node.delay, duration: node.duration}); } } } @@ -1920,24 +2015,24 @@ d3_transitionPrototype.attr = function(name, value) { return this.attrTween(name, d3_transitionTween(value)); }; -d3_transitionPrototype.attrTween = function(name, tween) { - name = d3.ns.qualify(name); +d3_transitionPrototype.attrTween = function(nameNS, tween) { + var name = d3.ns.qualify(nameNS); function attrTween(d, i) { var f = tween.call(this, d, i, this.getAttribute(name)); - return f && function(t) { - this.setAttribute(name, f(t)); - }; + return f === d3_transitionRemove + ? (this.removeAttribute(name), null) + : f && function(t) { this.setAttribute(name, f(t)); }; } function attrTweenNS(d, i) { var f = tween.call(this, d, i, this.getAttributeNS(name.space, name.local)); - return f && function(t) { - this.setAttributeNS(name.space, name.local, f(t)); - }; + return f === d3_transitionRemove + ? (this.removeAttributeNS(name.space, name.local), null) + : f && function(t) { this.setAttributeNS(name.space, name.local, f(t)); }; } - return this.tween("attr." + name, name.local ? attrTweenNS : attrTween); + return this.tween("attr." + nameNS, name.local ? attrTweenNS : attrTween); }; d3_transitionPrototype.style = function(name, value, priority) { if (arguments.length < 3) priority = ""; @@ -1948,9 +2043,9 @@ d3_transitionPrototype.styleTween = function(name, tween, priority) { if (arguments.length < 3) priority = ""; return this.tween("style." + name, function(d, i) { var f = tween.call(this, d, i, window.getComputedStyle(this, null).getPropertyValue(name)); - return f && function(t) { - this.style.setProperty(name, f(t), priority); - }; + return f === d3_transitionRemove + ? (this.style.removeProperty(name), null) + : f && function(t) { this.style.setProperty(name, f(t), priority); }; }); }; d3_transitionPrototype.text = function(value) { @@ -2113,9 +2208,12 @@ function d3_scale_nice(domain, nice) { dx = x0; x0 = x1; x1 = dx; } - nice = nice(x1 - x0); - domain[i0] = nice.floor(x0); - domain[i1] = nice.ceil(x1); + if (dx = x1 - x0) { + nice = nice(dx); + domain[i0] = nice.floor(x0); + domain[i1] = nice.ceil(x1); + } + return domain; } @@ -3147,16 +3245,17 @@ function d3_svg_area(projection) { x1 = d3_svg_lineX, y0 = 0, y1 = d3_svg_lineY, - interpolate = "linear", - interpolator = d3_svg_lineInterpolators[interpolate], + interpolate, + i0, + i1, tension = .7; function area(d) { if (d.length < 1) return null; var points0 = d3_svg_linePoints(this, d, x0, y0), points1 = d3_svg_linePoints(this, d, x0 === x1 ? d3_svg_areaX(points0) : x1, y0 === y1 ? d3_svg_areaY(points0) : y1); - return "M" + interpolator(projection(points1), tension) - + "L" + interpolator(projection(points0.reverse()), tension) + return "M" + i0(projection(points1), tension) + + "L" + i1(projection(points0.reverse()), tension) + "Z"; } @@ -3198,7 +3297,8 @@ function d3_svg_area(projection) { area.interpolate = function(x) { if (!arguments.length) return interpolate; - interpolator = d3_svg_lineInterpolators[interpolate = x]; + i0 = d3_svg_lineInterpolators[interpolate = x]; + i1 = i0.reverse || i0; return area; }; @@ -3208,9 +3308,12 @@ function d3_svg_area(projection) { return area; }; - return area; + return area.interpolate("linear"); } +d3_svg_lineStepBefore.reverse = d3_svg_lineStepAfter; +d3_svg_lineStepAfter.reverse = d3_svg_lineStepBefore; + d3.svg.area = function() { return d3_svg_area(Object); }; @@ -3545,6 +3648,20 @@ d3.svg.axis = function() { selection.each(function(d, i, j) { var g = d3.select(this); + // If selection is a transition, create subtransitions. + var transition = selection.delay ? function(o) { + var id = d3_transitionInheritId; + try { + d3_transitionInheritId = selection.id; + return o.transition() + .delay(selection[j][i].delay) + .duration(selection[j][i].duration) + .ease(selection.ease()); + } finally { + d3_transitionInheritId = id; + } + } : Object; + // Ticks. var ticks = scale.ticks.apply(scale, tickArguments_), tickFormat = tickFormat_ == null ? scale.tickFormat.apply(scale, tickArguments_) : tickFormat_; @@ -3580,37 +3697,33 @@ d3.svg.axis = function() { switch (orient) { case "bottom": { tickTransform = d3_svg_axisX; - subtickUpdate.attr("y2", tickMinorSize); - tickEnter.select("text").attr("dy", ".71em").attr("text-anchor", "middle"); - tickUpdate.select("line").attr("y2", tickMajorSize); - tickUpdate.select("text").attr("y", Math.max(tickMajorSize, 0) + tickPadding); + subtickUpdate.attr("x2", 0).attr("y2", tickMinorSize); + tickUpdate.select("line").attr("x2", 0).attr("y2", tickMajorSize); + tickUpdate.select("text").attr("x", 0).attr("y", Math.max(tickMajorSize, 0) + tickPadding).attr("dy", ".71em").attr("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + tickEndSize + "V0H" + range[1] + "V" + tickEndSize); break; } case "top": { tickTransform = d3_svg_axisX; - subtickUpdate.attr("y2", -tickMinorSize); - tickEnter.select("text").attr("text-anchor", "middle"); - tickUpdate.select("line").attr("y2", -tickMajorSize); - tickUpdate.select("text").attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)); + subtickUpdate.attr("x2", 0).attr("y2", -tickMinorSize); + tickUpdate.select("line").attr("x2", 0).attr("y2", -tickMajorSize); + tickUpdate.select("text").attr("x", 0).attr("y", -(Math.max(tickMajorSize, 0) + tickPadding)).attr("dy", "0em").attr("text-anchor", "middle"); pathUpdate.attr("d", "M" + range[0] + "," + -tickEndSize + "V0H" + range[1] + "V" + -tickEndSize); break; } case "left": { tickTransform = d3_svg_axisY; - subtickUpdate.attr("x2", -tickMinorSize); - tickEnter.select("text").attr("dy", ".32em").attr("text-anchor", "end"); - tickUpdate.select("line").attr("x2", -tickMajorSize); - tickUpdate.select("text").attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)); + subtickUpdate.attr("x2", -tickMinorSize).attr("y2", 0); + tickUpdate.select("line").attr("x2", -tickMajorSize).attr("y2", 0); + tickUpdate.select("text").attr("x", -(Math.max(tickMajorSize, 0) + tickPadding)).attr("y", 0).attr("dy", ".32em").attr("text-anchor", "end"); pathUpdate.attr("d", "M" + -tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + -tickEndSize); break; } case "right": { tickTransform = d3_svg_axisY; - subtickUpdate.attr("x2", tickMinorSize); - tickEnter.select("text").attr("dy", ".32em"); - tickUpdate.select("line").attr("x2", tickMajorSize); - tickUpdate.select("text").attr("x", Math.max(tickMajorSize, 0) + tickPadding); + subtickUpdate.attr("x2", tickMinorSize).attr("y2", 0); + tickUpdate.select("line").attr("x2", tickMajorSize).attr("y2", 0); + tickUpdate.select("text").attr("x", Math.max(tickMajorSize, 0) + tickPadding).attr("y", 0).attr("dy", ".32em").attr("text-anchor", "start"); pathUpdate.attr("d", "M" + tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + tickEndSize); break; } @@ -3623,13 +3736,6 @@ d3.svg.axis = function() { subtickEnter.call(tickTransform, scale0); subtickUpdate.call(tickTransform, scale); subtickExit.call(tickTransform, scale); - - function transition(o) { - return selection.delay ? o.transition() - .delay(selection[j][i].delay) - .duration(selection[j][i].duration) - .ease(selection.ease()) : o; - } }); } diff --git a/media/d3.layout.js b/media/d3.layout.js index bcfddc04..2bfb9d32 100644 --- a/media/d3.layout.js +++ b/media/d3.layout.js @@ -560,9 +560,9 @@ d3.layout.partition = function() { node.y = node.depth * dy; node.dx = dx; node.dy = dy; - if (children) { + if (children && (n = children.length)) { var i = -1, - n = children.length, + n, c, d; dx = node.value ? dx / node.value : 0; @@ -576,9 +576,9 @@ d3.layout.partition = function() { function depth(node) { var children = node.children, d = 0; - if (children) { + if (children && (n = children.length)) { var i = -1, - n = children.length; + n; while (++i < n) d = Math.max(d, depth(children[i])); } return 1 + d; @@ -1477,9 +1477,9 @@ d3.layout.tree = function() { function secondWalk(node, x) { node.x = node._tree.prelim + x; var children = node.children; - if (children) { + if (children && (n = children.length)) { var i = -1, - n = children.length; + n; x += node._tree.mod; while (++i < n) { secondWalk(children[i], x); @@ -1584,18 +1584,21 @@ function d3_layout_treeSeparation(a, b) { // } function d3_layout_treeLeft(node) { - return node.children ? node.children[0] : node._tree.thread; + var children = node.children; + return children && children.length ? children[0] : node._tree.thread; } function d3_layout_treeRight(node) { - return node.children ? node.children[node.children.length - 1] : node._tree.thread; + var children = node.children, + n; + return children && (n = children.length) ? children[n - 1] : node._tree.thread; } function d3_layout_treeSearch(node, compare) { var children = node.children; - if (children) { + if (children && (n = children.length)) { var child, - n = children.length, + n, i = -1; while (++i < n) { if (compare(child = d3_layout_treeSearch(children[i], compare), node) > 0) { @@ -1621,11 +1624,11 @@ function d3_layout_treeDeepest(a, b) { function d3_layout_treeVisitAfter(node, callback) { function visit(node, previousSibling) { var children = node.children; - if (children) { + if (children && (n = children.length)) { var child, previousChild = null, i = -1, - n = children.length; + n; while (++i < n) { child = children[i]; visit(child, previousChild); @@ -1693,57 +1696,61 @@ d3.layout.treemap = function() { // Recursively arranges the specified node's children into squarified rows. function squarify(node) { - if (!node.children) return; - var rect = pad(node), - row = [], - children = node.children.slice(), // copy-on-write - child, - best = Infinity, // the best row score so far - score, // the current row score - u = Math.min(rect.dx, rect.dy), // initial orientation - n; - scale(children, rect.dx * rect.dy / node.value); - row.area = 0; - while ((n = children.length) > 0) { - row.push(child = children[n - 1]); - row.area += child.area; - if ((score = worst(row, u)) <= best) { // continue with this orientation - children.pop(); - best = score; - } else { // abort, and try a different orientation - row.area -= row.pop().area; - position(row, u, rect, false); - u = Math.min(rect.dx, rect.dy); + var children = node.children; + if (children && children.length) { + var rect = pad(node), + row = [], + remaining = children.slice(), // copy-on-write + child, + best = Infinity, // the best row score so far + score, // the current row score + u = Math.min(rect.dx, rect.dy), // initial orientation + n; + scale(remaining, rect.dx * rect.dy / node.value); + row.area = 0; + while ((n = remaining.length) > 0) { + row.push(child = remaining[n - 1]); + row.area += child.area; + if ((score = worst(row, u)) <= best) { // continue with this orientation + remaining.pop(); + best = score; + } else { // abort, and try a different orientation + row.area -= row.pop().area; + position(row, u, rect, false); + u = Math.min(rect.dx, rect.dy); + row.length = row.area = 0; + best = Infinity; + } + } + if (row.length) { + position(row, u, rect, true); row.length = row.area = 0; - best = Infinity; } + children.forEach(squarify); } - if (row.length) { - position(row, u, rect, true); - row.length = row.area = 0; - } - node.children.forEach(squarify); } // Recursively resizes the specified node's children into existing rows. // Preserves the existing layout! function stickify(node) { - if (!node.children) return; - var rect = pad(node), - children = node.children.slice(), // copy-on-write - child, - row = []; - scale(children, rect.dx * rect.dy / node.value); - row.area = 0; - while (child = children.pop()) { - row.push(child); - row.area += child.area; - if (child.z != null) { - position(row, child.z ? rect.dx : rect.dy, rect, !children.length); - row.length = row.area = 0; + var children = node.children; + if (children && children.length) { + var rect = pad(node), + remaining = children.slice(), // copy-on-write + child, + row = []; + scale(remaining, rect.dx * rect.dy / node.value); + row.area = 0; + while (child = remaining.pop()) { + row.push(child); + row.area += child.area; + if (child.z != null) { + position(row, child.z ? rect.dx : rect.dy, rect, !remaining.length); + row.length = row.area = 0; + } } + children.forEach(stickify); } - node.children.forEach(stickify); } // Computes the score for the specified row, as the worst aspect ratio. diff --git a/media/d3.layout.min.js b/media/d3.layout.min.js index e49c8c71..c7016b5c 100644 --- a/media/d3.layout.min.js +++ b/media/d3.layout.min.js @@ -1 +1 @@ -(function(){function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0);return{x:c,y:d,dx:e,dy:f}}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h0&&(a=d)}return a}function U(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function T(a){return a.children?a.children[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;bd&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++he&&(e=h),d.push(h)}for(g=0;g=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++dd.dy)j=j?d.dy:0;while(++fd.dx)j=j?d.dx:0;while(++fe&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){if(!!a.children){var b=e(a),c=a.children.slice(),d,f=[];i(c,b.dx*b.dy/a.value),f.area=0;while(d=c.pop())f.push(d),f.area+=d.area,d.z!=null&&(m(f,d.z?b.dx:b.dy,b,!c.length),f.length=f.area=0);a.children.forEach(k)}}function j(a){if(!!a.children){var b=e(a),c=[],d=a.children.slice(),f,g=Infinity,h,k=Math.min(b.dx,b.dy),n;i(d,b.dx*b.dy/a.value),c.area=0;while((n=d.length)>0)c.push(f=d[n-1]),c.area+=f.area,(h=l(c,k))<=g?(d.pop(),g=h):(c.area-=c.pop().area,m(c,k,b,!1),k=Math.min(b.dx,b.dy),c.length=c.area=0,g=Infinity);c.length&&(m(c,k,b,!0),c.length=c.area=0),a.children.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e&&(i=e.length)){var f,g=null,h=-1,i;while(++h0&&(a=d)}return a}function U(a){var b=a.children,c;return b&&(c=b.length)?b[c-1]:a._tree.thread}function T(a){var b=a.children;return b&&b.length?b[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;bd&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++he&&(e=h),d.push(h)}for(g=0;g=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c&&(e=c.length)){var d=-1,e;b+=a._tree.mod;while(++dd.dy)j=j?d.dy:0;while(++fd.dx)j=j?d.dx:0;while(++fe&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){var b=a.children;if(b&&b.length){var c=e(a),d=b.slice(),f,g=[];i(d,c.dx*c.dy/a.value),g.area=0;while(f=d.pop())g.push(f),g.area+=f.area,f.z!=null&&(m(g,f.z?c.dx:c.dy,c,!d.length),g.length=g.area=0);b.forEach(k)}}function j(a){var b=a.children;if(b&&b.length){var c=e(a),d=[],f=b.slice(),g,h=Infinity,k,n=Math.min(c.dx,c.dy),o;i(f,c.dx*c.dy/a.value),d.area=0;while((o=f.length)>0)d.push(g=f[o-1]),d.area+=g.area,(k=l(d,n))<=h?(f.pop(),h=k):(d.area-=d.pop().area,m(d,n,c,!1),n=Math.min(c.dx,c.dy),d.length=d.area=0,h=Infinity);d.length&&(m(d,n,c,!0),d.length=d.area=0),b.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c1){var d=bu(a.domain()),e,f=-1,g=b.length,h=(b[1]-b[0])/++c,i,j;while(++f0;)(j=+b[f]-i*h)>=d[0]&&e.push(j);for(--f,i=0;++i9&&(f=c*3/Math.sqrt(f),g[h]=f*d,g[h+1]=f*e));h=-1;while(++h<=i)f=(a[Math.min(i,h+1)][0]-a[Math.max(0,h-1)][0])/(6*(1+g[h]*g[h])),b.push([f||0,g[h]*f||0]);return b}function ct(a){var b=0,c=a.length-1,d=[],e=a[0],f=a[1],g=d[0]=cs(e,f);while(++b1){h=b[1],f=a[i],i++,d+="C"+(e[0]+g[0])+","+(e[1]+g[1])+","+(f[0]-h[0])+","+(f[1]-h[1])+","+f[0]+","+f[1];for(var j=2;j0;j--)e.push(c(f)*j)}else{for(;fi;g--);e=e.slice(f,g)}return e},d.tickFormat=function(a,e){arguments.length<2&&(e=bG);if(arguments.length<1)return e;var f=a/d.ticks().length,g=b===bI?(h=-1e-15,Math.floor):(h=1e-15,Math.ceil),h;return function(a){return a/c(g(b(a)+h))=c.delay&&(c.flush=c.callback(a)),c=c.next;var d=br()-b;d>24?(isFinite(d)&&(clearTimeout(bp),bp=setTimeout(bq,d)),bo=0):(bo=1,bs(bq))}function bm(a){for(var b=0,c=this.length;b0)k[--g].call(l,f);if(c>=1){r(),bk=b,d.end.dispatch.call(l,h,i),bk=0;return 1}}function p(a){if(o.active>b)return r();o.active=b;for(var e in c)(e=c[e].call(l,h,i))&&k.push(e);d.start.dispatch.call(l,h,i),q(a)||d3.timer(q,0,f);return 1}var k=[],l=this,m=a[j][i].delay,n=a[j][i].duration,o=l.__transition__||(l.__transition__={active:0,count:0});++o.count,m<=g?p(g):d3.timer(p,m,f)});return 1},0,f);return a}function be(a){arguments.length||(a=d3.ascending);return function(b,c){return a(b&&b.__data__,c&&c.__data__)}}function bc(a){h(a,bd);return a}function bb(a){return{__data__:a}}function ba(a){return function(){return Z(a,this)}}function _(a){return function(){return Y(a,this)}}function X(a){h(a,$);return a}function W(a,b,c){function g(a){return Math.round(f(a)*255)}function f(a){a>360?a-=360:a<0&&(a+=360);return a<60?d+(e-d)*a/60:a<180?e:a<240?d+(e-d)*(240-a)/60:d}var d,e;a=a%360,a<0&&(a+=360),b=b<0?0:b>1?1:b,c=c<0?0:c>1?1:c,e=c<=.5?c*(1+b):c+b-c*b,d=2*c-e;return M(g(a+120),g(a),g(a-120))}function V(a,b,c){this.h=a,this.s=b,this.l=c}function U(a,b,c){return new V(a,b,c)}function R(a){var b=parseFloat(a);return a.charAt(a.length-1)==="%"?Math.round(b*2.55):b}function Q(a,b,c){var d=Math.min(a/=255,b/=255,c/=255),e=Math.max(a,b,c),f=e-d,g,h,i=(e+d)/2;f?(h=i<.5?f/(e+d):f/(2-e-d),a==e?g=(b-c)/f+(b=1?1:a(b)}}function r(a){var b=a.lastIndexOf("."),c=b>=0?a.substring(b):(b=a.length,""),d=[];while(b>0)d.push(a.substring(b-=3,b+3));return d.reverse().join(",")+c}function q(a){return a+""}function n(a){var b={},c=[];b.add=function(a){for(var d=0;db?1:a>=b?0:NaN},d3.descending=function(a,b){return ba?1:b>=a?0:NaN},d3.min=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++cf&&(e=f)}else{while(++cf&&(e=f)}return e},d3.max=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++ce&&(e=f)}else{while(++ce&&(e=f)}return e},d3.sum=function(a,b){var c=0,d=a.length,e,f=-1;if(arguments.length===1)while(++f>1;a[e]>1;b0&&(e=f);return e},d3.last=function(a,b){var c=0,d=a.length,e=a[0],f;arguments.length===1&&(b=d3.ascending);while(++c=b.length)return a;var e=[],f=c[d++],h;for(h in a)e.push({key:h,values:g(a[h],d)});f&&e.sort(function(a,b){return f(a.key,b.key)});return e}function f(c,g){if(g>=b.length)return e?e.call(a,c):d?c.sort(d):c;var h=-1,i=c.length,j=b[g++],k,l,m={};while(++hb)d.push(f);else while((f=a+c*++e)=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/,p={g:function(a,b){return a.toPrecision(b)},e:function(a,b){return a.toExponential(b)},f:function(a,b){return a.toFixed(b)},r:function(a,b){var c=a?1+Math.floor(1e-15+Math.log(a)/Math.LN10):1;return d3.round(a,b-c).toFixed(Math.max(0,Math.min(20,b-c)))}},s=A(2),t=A(3),u={linear:function(){return z},poly:A,quad:function(){return s},cubic:function(){return t},sin:function(){return B},exp:function(){return C},circle:function(){return D},elastic:E,back:F,bounce:function(){return G}},v={"in":function(a){return a},out:x,"in-out":y,"out-in":function(a){return y(x(a))}};d3.ease=function(a){var b=a.indexOf("-"),c=b>=0?a.substring(0,b):a,d=b>=0?a.substring(b+1):"in";return w(v[d](u[c].apply(null,Array.prototype.slice.call(arguments,1))))},d3.event=null,d3.interpolate=function(a,b){var c=d3.interpolators.length,d;while(--c>=0&&!(d=d3.interpolators[c](a,b)));return d},d3.interpolateNumber=function(a,b){b-=a;return function(c){return a+b*c}},d3.interpolateRound=function(a,b){b-=a;return function(c){return Math.round(a+b*c)}},d3.interpolateString=function(a,b){var c,d,e,f=0,g=0,h=[],i=[],j,k;H.lastIndex=0;for(d=0;c=H.exec(b);++d)c.index&&h.push(b.substring(f,g=c.index)),i.push({i:h.length,x:c[0]}),h.push(null),f=H.lastIndex;f0&&(a=a.substring(0,e));return arguments.length<2?(e=this.node()[d])&&e._:this.each(function(e,f){function h(a){var c=d3.event;d3.event=a;try{b.call(g,g.__data__,f)}finally{d3.event=c}}var g=this;g[d]&&g.removeEventListener(a,g[d],c),b&&g.addEventListener(a,g[d]=h,c),h._=b})},$.each=function(a){for(var b=-1,c=this.length;++b=bT?e?"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"M0,"+e+"A"+e+","+e+" 0 1,0 0,"+ -e+"A"+e+","+e+" 0 1,0 0,"+e+"Z":"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"Z":e?"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L"+e*m+","+e*n+"A"+e+","+e+" 0 "+j+",0 "+e*k+","+e*l+"Z":"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L0,0"+"Z"}var a=bU,b=bV,c=bW,d=bX;e.innerRadius=function(b){if(!arguments.length)return a;a=d3.functor(b);return e},e.outerRadius=function(a){if(!arguments.length)return b;b=d3.functor(a);return e},e.startAngle=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.endAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return e},e.centroid=function(){var e=(a.apply(this,arguments)+b.apply(this,arguments))/2,f=(c.apply(this,arguments)+d.apply(this,arguments))/2+bS;return[Math.cos(f)*e,Math.sin(f)*e]};return e};var bS=-Math.PI/2,bT=2*Math.PI-1e-6;d3.svg.line=function(){return bY(Object)};var ca={linear:cb,"step-before":cc,"step-after":cd,basis:cj,"basis-open":ck,"basis-closed":cl,bundle:cm,cardinal:cg,"cardinal-open":ce,"cardinal-closed":cf,monotone:cv},co=[0,2/3,1/3,0],cp=[0,1/3,2/3,0],cq=[0,1/6,2/3,1/6];d3.svg.line.radial=function(){var a=bY(cw);a.radius=a.x,delete a.x,a.angle=a.y,delete a.y;return a},d3.svg.area=function(){return cx(Object)},d3.svg.area.radial=function(){var a=cx(cw);a.radius=a.x,delete a.x,a.innerRadius=a.x0,delete a.x0,a.outerRadius=a.x1,delete a.x1,a.angle=a.y,delete a.y,a.startAngle=a.y0,delete a.y0,a.endAngle=a.y1,delete a.y1;return a},d3.svg.chord=function(){function j(a,b,c,d){return"Q 0,0 "+d}function i(a,b){return"A"+a+","+a+" 0 0,1 "+b}function h(a,b){return a.a0==b.a0&&a.a1==b.a1}function g(a,b,f,g){var h=b.call(a,f,g),i=c.call(a,h,g),j=d.call(a,h,g)+bS,k=e.call(a,h,g)+bS;return{r:i,a0:j,a1:k,p0:[i*Math.cos(j),i*Math.sin(j)],p1:[i*Math.cos(k),i*Math.sin(k)]}}function f(c,d){var e=g(this,a,c,d),f=g(this,b,c,d);return"M"+e.p0+i(e.r,e.p1)+(h(e,f)?j(e.r,e.p1,e.r,e.p0):j(e.r,e.p1,f.r,f.p0)+i(f.r,f.p1)+j(f.r,f.p1,e.r,e.p0))+"Z"}var a=cA,b=cB,c=cC,d=bW,e=bX;f.radius=function(a){if(!arguments.length)return c;c=d3.functor(a);return f},f.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return f},f.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return f},f.startAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return f},f.endAngle=function(a){if(!arguments.length)return e;e=d3.functor(a);return f};return f},d3.svg.diagonal=function(){function d(d,e){var f=a.call(this,d,e),g=b.call(this,d,e),h=(f.y+g.y)/2,i=[f,{x:f.x,y:h},{x:g.x,y:h},g];i=i.map(c);return"M"+i[0]+"C"+i[1]+" "+i[2]+" "+i[3]}var a=cA,b=cB,c=cF;d.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return d},d.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return d},d.projection=function(a){if(!arguments.length)return c;c=a;return d};return d},d3.svg.diagonal.radial=function(){var a=d3.svg.diagonal(),b=cF,c=a.projection;a.projection=function(a){return arguments.length?c(cG(b=a)):b};return a},d3.svg.mouse=function(a){return cI(a,d3.event)};var cH=/WebKit/.test(navigator.userAgent)?-1:0;d3.svg.touches=function(a){var b=d3.event.touches;return b?d(b).map(function(b){var c=cI(a,b);c.identifier=b.identifier;return c}):[]},d3.svg.symbol=function(){function c(c,d){return(cL[a.call(this,c,d)]||cL.circle)(b.call(this,c,d))}var a=cK,b=cJ;c.type=function(b){if(!arguments.length)return a;a=d3.functor(b);return c},c.size=function(a){if(!arguments.length)return b;b=d3.functor(a);return c};return c};var cL={circle:function(a){var b=Math.sqrt(a/Math.PI);return"M0,"+b+"A"+b+","+b+" 0 1,1 0,"+ -b+"A"+b+","+b+" 0 1,1 0,"+b+"Z"},cross:function(a){var b=Math.sqrt(a/5)/2;return"M"+ -3*b+","+ -b+"H"+ -b+"V"+ -3*b+"H"+b+"V"+ -b+"H"+3*b+"V"+b+"H"+b+"V"+3*b+"H"+ -b+"V"+b+"H"+ -3*b+"Z"},diamond:function(a){var b=Math.sqrt(a/(2*cN)),c=b*cN;return"M0,"+ -b+"L"+c+",0"+" 0,"+b+" "+ -c+",0"+"Z"},square:function(a){var b=Math.sqrt(a)/2;return"M"+ -b+","+ -b+"L"+b+","+ -b+" "+b+","+b+" "+ -b+","+b+"Z"},"triangle-down":function(a){var b=Math.sqrt(a/cM),c=b*cM/2;return"M0,"+c+"L"+b+","+ -c+" "+ -b+","+ -c+"Z"},"triangle-up":function(a){var b=Math.sqrt(a/cM),c=b*cM/2;return"M0,"+ -c+"L"+b+","+c+" "+ -b+","+c+"Z"}};d3.svg.symbolTypes=d3.keys(cL);var cM=Math.sqrt(3),cN=Math.tan(30*Math.PI/180);d3.svg.axis=function(){function j(j){j.each(function(k,l,m){function F(a){return j.delay?a.transition().delay(j[m][l].delay).duration(j[m][l].duration).ease(j.ease()):a}var n=d3.select(this),o=a.ticks.apply(a,g),p=h==null?a.tickFormat.apply(a,g):h,q=cQ(a,o,i),r=n.selectAll(".minor").data(q,String),s=r.enter().insert("svg:line","g").attr("class","tick minor").style("opacity",1e-6),t=F(r.exit()).style("opacity",1e-6).remove(),u=F(r).style("opacity",1),v=n.selectAll("g").data(o,String),w=v.enter().insert("svg:g","path").style("opacity",1e-6),x=F(v.exit()).style("opacity",1e-6).remove(),y=F(v).style("opacity",1),z,A=bu(a.range()),B=n.selectAll(".domain").data([0]),C=B.enter().append("svg:path").attr("class","domain"),D=F(B),E=this.__chart__||a;this.__chart__=a.copy(),w.append("svg:line").attr("class","tick"),w.append("svg:text"),y.select("text").text(p);switch(b){case"bottom":z=cO,u.attr("y2",d),w.select("text").attr("dy",".71em").attr("text-anchor","middle"),y.select("line").attr("y2",c),y.select("text").attr("y",Math.max(c,0)+f),D.attr("d","M"+A[0]+","+e+"V0H"+A[1]+"V"+e);break;case"top":z=cO,u.attr("y2",-d),w.select("text").attr("text-anchor","middle"),y.select("line").attr("y2",-c),y.select("text").attr("y",-(Math.max(c,0)+f)),D.attr("d","M"+A[0]+","+ -e+"V0H"+A[1]+"V"+ -e);break;case"left":z=cP,u.attr("x2",-d),w.select("text").attr("dy",".32em").attr("text-anchor","end"),y.select("line").attr("x2",-c),y.select("text").attr("x",-(Math.max(c,0)+f)),D.attr("d","M"+ -e+","+A[0]+"H0V"+A[1]+"H"+ -e);break;case"right":z=cP,u.attr("x2",d),w.select("text").attr("dy",".32em"),y.select("line").attr("x2",c),y.select("text").attr("x",Math.max(c,0)+f),D.attr("d","M"+e+","+A[0]+"H0V"+A[1]+"H"+e)}w.call(z,E),y.call(z,a),x.call(z,a),s.call(z,E),u.call(z,a),t.call(z,a)})}var a=d3.scale.linear(),b="bottom",c=6,d=6,e=6,f=3,g=[10],h,i=0;j.scale=function(b){if(!arguments.length)return a;a=b;return j},j.orient=function(a){if(!arguments.length)return b;b=a;return j},j.ticks=function(){if(!arguments.length)return g;g=arguments;return j},j.tickFormat=function(a){if(!arguments.length)return h;h=a;return j},j.tickSize=function(a,b,f){if(!arguments.length)return c;var g=arguments.length-1;c=+a,d=g>1?+b:c,e=g>0?+arguments[g]:c;return j},j.tickPadding=function(a){if(!arguments.length)return f;f=+a;return j},j.tickSubdivide=function(a){if(!arguments.length)return i;i=+a;return j};return j},d3.behavior={},d3.behavior.drag=function(){function d(){c.apply(this,arguments),cY("dragstart")}function c(){cR=a,cS=d3.event.target,cV=cZ((cT=this).parentNode),cW=0,cU=arguments}function b(){this.on("mousedown.drag",d).on("touchstart.drag",d),d3.select(window).on("mousemove.drag",c$).on("touchmove.drag",c$).on("mouseup.drag",c_,!0).on("touchend.drag",c_,!0).on("click.drag",da,!0)}var a=d3.dispatch("drag","dragstart","dragend");b.on=function(c,d){a[c].add(d);return b};return b};var cR,cS,cT,cU,cV,cW,cX;d3.behavior.zoom=function(){function h(){d.apply(this,arguments);var b=dr(),c,e=Date.now();b.length===1&&e-dg<300&&dw(1+Math.floor(a[2]),c=b[0],df[c.identifier]),dg=e}function g(){d.apply(this,arguments);var b=d3.svg.mouse(dk);dw(d3.event.shiftKey?Math.ceil(a[2]-1):Math.floor(a[2]+1),b,dp(b))}function f(){d.apply(this,arguments),de||(de=dp(d3.svg.mouse(dk))),dw(dq()+a[2],d3.svg.mouse(dk),de)}function e(){d.apply(this,arguments),dd=dp(d3.svg.mouse(dk)),dm=!1,d3.event.preventDefault(),window.focus()}function d(){dh=a,di=b.zoom.dispatch,dj=d3.event.target,dk=this,dl=arguments}function c(){this.on("mousedown.zoom",e).on("mousewheel.zoom",f).on("DOMMouseScroll.zoom",f).on("dblclick.zoom",g).on("touchstart.zoom",h),d3.select(window).on("mousemove.zoom",dt).on("mouseup.zoom",du).on("touchmove.zoom",ds).on("touchend.zoom",dr).on("click.zoom",dv,!0)}var a=[0,0,0],b=d3.dispatch("zoom");c.on=function(a,d){b[a].add(d);return c};return c};var dc,dd,de,df={},dg=0,dh,di,dj,dk,dl,dm,dn})() \ No newline at end of file +(function(){function dE(a,b,c){function i(a,b){var c=a.__domain||(a.__domain=a.domain()),d=a.range().map(function(a){return(a-b)/h});a.domain(c).domain(d.map(a.invert))}var d=Math.pow(2,(dq[2]=a)-c[2]),e=dq[0]=b[0]-d*c[0],f=dq[1]=b[1]-d*c[1],g=d3.event,h=Math.pow(2,a);d3.event={scale:h,translate:[e,f],transform:function(a,b){a&&i(a,e),b&&i(b,f)}};try{dr.apply(dt,du)}finally{d3.event=g}g.preventDefault()}function dD(){dw&&ds===d3.event.target&&(d3.event.stopPropagation(),d3.event.preventDefault(),dw=!1,ds=null)}function dC(){dl&&(dv&&ds===d3.event.target&&(dw=!0),dB(),dl=null)}function dB(){dm=null,dl&&(dv=!0,dE(dq[2],d3.svg.mouse(dt),dl))}function dA(){var a=d3.svg.touches(dt);switch(a.length){case 1:var b=a[0];dE(dq[2],b,dn[b.identifier]);break;case 2:var c=a[0],d=a[1],e=[(c[0]+d[0])/2,(c[1]+d[1])/2],f=dn[c.identifier],g=dn[d.identifier],h=[(f[0]+g[0])/2,(f[1]+g[1])/2,f[2]];dE(Math.log(d3.event.scale)/Math.LN2+f[2],e,h)}}function dz(){var a=d3.svg.touches(dt),b=-1,c=a.length,d;while(++b1){var d=bC(a.domain()),e,f=-1,g=b.length,h=(b[1]-b[0])/++c,i,j;while(++f0;)(j=+b[f]-i*h)>=d[0]&&e.push(j);for(--f,i=0;++i9&&(f=c*3/Math.sqrt(f),g[h]=f*d,g[h+1]=f*e));h=-1;while(++h<=i)f=(a[Math.min(i,h+1)][0]-a[Math.max(0,h-1)][0])/(6*(1+g[h]*g[h])),b.push([f||0,g[h]*f||0]);return b}function cB(a){var b=0,c=a.length-1,d=[],e=a[0],f=a[1],g=d[0]=cA(e,f);while(++b1){h=b[1],f=a[i],i++,d+="C"+(e[0]+g[0])+","+(e[1]+g[1])+","+(f[0]-h[0])+","+(f[1]-h[1])+","+f[0]+","+f[1];for(var j=2;j0;j--)e.push(c(f)*j)}else{for(;fi;g--);e=e.slice(f,g)}return e},d.tickFormat=function(a,e){arguments.length<2&&(e=bO);if(arguments.length<1)return e;var f=a/d.ticks().length,g=b===bQ?(h=-1e-15,Math.floor):(h=1e-15,Math.ceil),h;return function(a){return a/c(g(b(a)+h))=c.delay&&(c.flush=c.callback(a)),c=c.next;var d=bz()-b;d>24?(isFinite(d)&&(clearTimeout(bx),bx=setTimeout(by,d)),bw=0):(bw=1,bA(by))}function bu(a){for(var b=0,c=this.length;b0)k[--g].call(l,f);if(c>=1){r(),bs=b,d.end.dispatch.call(l,h,i),bs=0;return 1}}function p(a){if(o.active>b)return r();o.active=b;for(var e in c)(e=c[e].call(l,h,i))&&k.push(e);d.start.dispatch.call(l,h,i),q(a)||d3.timer(q,0,f);return 1}var k=[],l=this,m=a[j][i].delay,n=a[j][i].duration,o=l.__transition__||(l.__transition__={active:0,count:0});++o.count,m<=g?p(g):d3.timer(p,m,f)});return 1},0,f);return a}function bk(a){arguments.length||(a=d3.ascending);return function(b,c){return a(b&&b.__data__,c&&c.__data__)}}function bi(a){h(a,bj);return a}function bh(a){return{__data__:a}}function bg(a,b){function h(){(b.apply(this,arguments)?f:g).call(this)}function g(){if(b=this.classList)return b.remove(a);var b=this.className,d=b.baseVal!=null,e=d?b.baseVal:b;e=m(e.replace(c," ")),d?b.baseVal=e:this.className=e}function f(){if(b=this.classList)return b.add(a);var b=this.className,d=b.baseVal!=null,e=d?b.baseVal:b;c.lastIndex=0,c.test(e)||(e=m(e+" "+a),d?b.baseVal=e:this.className=e)}var c=new RegExp("(^|\\s+)"+d3.requote(a)+"(\\s+|$)","g");if(arguments.length<2){var d=this.node();if(e=d.classList)return e.contains(a);var e=d.className;c.lastIndex=0;return c.test(e.baseVal!=null?e.baseVal:e)}return this.each(typeof b=="function"?h:b?f:g)}function be(a){return function(){return bb(a,this)}}function bd(a){return function(){return ba(a,this)}}function _(a){h(a,bc);return a}function $(a,b,c){function g(a){return Math.round(f(a)*255)}function f(a){a>360?a-=360:a<0&&(a+=360);return a<60?d+(e-d)*a/60:a<180?e:a<240?d+(e-d)*(240-a)/60:d}var d,e;a=a%360,a<0&&(a+=360),b=b<0?0:b>1?1:b,c=c<0?0:c>1?1:c,e=c<=.5?c*(1+b):c+b-c*b,d=2*c-e;return Q(g(a+120),g(a),g(a-120))}function Z(a,b,c){this.h=a,this.s=b,this.l=c}function Y(a,b,c){return new Z(a,b,c)}function V(a){var b=parseFloat(a);return a.charAt(a.length-1)==="%"?Math.round(b*2.55):b}function U(a,b,c){var d=Math.min(a/=255,b/=255,c/=255),e=Math.max(a,b,c),f=e-d,g,h,i=(e+d)/2;f?(h=i<.5?f/(e+d):f/(2-e-d),a==e?g=(b-c)/f+(b=1?1:a(b)}}function v(a,b){return{scale:Math.pow(10,(8-b)*3),symbol:a}}function t(a){var b=a.lastIndexOf("."),c=b>=0?a.substring(b):(b=a.length,""),d=[];while(b>0)d.push(a.substring(b-=3,b+3));return d.reverse().join(",")+c}function s(a){return a+""}function r(a,b){return b-(a?1+Math.floor(Math.log(a+Math.pow(10,1+Math.floor(Math.log(a)/Math.LN10)-b))/Math.LN10):1)}function o(a){var b={},c=[];b.add=function(a){for(var d=0;db?1:a>=b?0:NaN},d3.descending=function(a,b){return ba?1:b>=a?0:NaN},d3.mean=function(a,b){var c=a.length,d,e=0,f=-1,g=0;if(arguments.length===1)while(++f1&&(a=a.map(b)),a=a.filter(j);return a.length?d3.quantile(a.sort(d3.ascending),.5):undefined},d3.min=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++cf&&(e=f)}else{while(++cf&&(e=f)}return e},d3.max=function(a,b){var c=-1,d=a.length,e,f;if(arguments.length===1){while(++ce&&(e=f)}else{while(++ce&&(e=f)}return e},d3.sum=function(a,b){var c=0,d=a.length,e,f=-1;if(arguments.length===1)while(++f>1;a[e]>1;b0&&(e=f);return e},d3.last=function(a,b){var c=0,d=a.length,e=a[0],f;arguments.length===1&&(b=d3.ascending);while(++c=b.length)return a;var e=[],f=c[d++],h;for(h in a)e.push({key:h,values:g(a[h],d)});f&&e.sort(function(a,b){return f(a.key,b.key)});return e}function f(c,g){if(g>=b.length)return e?e.call(a,c):d?c.sort(d):c;var h=-1,i=c.length,j=b[g++],k,l,m={};while(++hb)d.push(f);else while((f=a+c*++e)=^]))?([+\- ])?(#)?(0)?([0-9]+)?(,)?(\.[0-9]+)?([a-zA-Z%])?/,q={g:function(a,b){return a.toPrecision(b)},e:function(a,b){return a.toExponential(b)},f:function(a,b){return a.toFixed(b)},r:function(a,b){return d3.round(a,b=r(a,b)).toFixed(Math.max(0,Math.min(20,b)))}},u=["y","z","a","f","p","n","μ","m","","k","M","G","T","P","E","Z","Y"].map(v);d3.formatPrefix=function(a,b){var c=0;a&&(a<0&&(a*=-1),b&&(a=d3.round(a,r(a,b))),c=1+Math.floor(1e-12+Math.log(a)/Math.LN10),c=Math.max(-24,Math.min(24,Math.floor((c<=0?c+1:c-1)/3)*3)));return u[8+c/3]};var w=E(2),x=E(3),y={linear:function(){return D},poly:E,quad:function(){return w},cubic:function(){return x},sin:function(){return F},exp:function(){return G},circle:function(){return H},elastic:I,back:J,bounce:function(){return K}},z={"in":function(a){return a},out:B,"in-out":C,"out-in":function(a){return C(B(a))}};d3.ease=function(a){var b=a.indexOf("-"),c=b>=0?a.substring(0,b):a,d=b>=0?a.substring(b+1):"in";return A(z[d](y[c].apply(null,Array.prototype.slice.call(arguments,1))))},d3.event=null,d3.interpolate=function(a,b){var c=d3.interpolators.length,d;while(--c>=0&&!(d=d3.interpolators[c](a,b)));return d},d3.interpolateNumber=function(a,b){b-=a;return function(c){return a+b*c}},d3.interpolateRound=function(a,b){b-=a;return function(c){return Math.round(a+b*c)}},d3.interpolateString=function(a,b){var c,d,e,f=0,g=0,h=[],i=[],j,k;L.lastIndex=0;for(d=0;c=L.exec(b);++d)c.index&&h.push(b.substring(f,g=c.index)),i.push({i:h.length,x:c[0]}),h.push(null),f=L.lastIndex;f1){while(++e0&&(a=a.substring(0,e));return arguments.length<2?(e=this.node()[d])&&e._:this.each(function(e,f){function h(a){var c=d3.event;d3.event=a;try{b.call(g,g.__data__,f)}finally{d3.event=c}}var g=this;g[d]&&g.removeEventListener(a,g[d],c),b&&g.addEventListener(a,g[d]=h,c),h._=b})},bc.each=function(a){for(var b=-1,c=this.length;++b=b_?e?"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"M0,"+e+"A"+e+","+e+" 0 1,0 0,"+ -e+"A"+e+","+e+" 0 1,0 0,"+e+"Z":"M0,"+f+"A"+f+","+f+" 0 1,1 0,"+ -f+"A"+f+","+f+" 0 1,1 0,"+f+"Z":e?"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L"+e*m+","+e*n+"A"+e+","+e+" 0 "+j+",0 "+e*k+","+e*l+"Z":"M"+f*k+","+f*l+"A"+f+","+f+" 0 "+j+",1 "+f*m+","+f*n+"L0,0"+"Z"}var a=ca,b=cb,c=cc,d=cd;e.innerRadius=function(b){if(!arguments.length)return a;a=d3.functor(b);return e},e.outerRadius=function(a){if(!arguments.length)return b;b=d3.functor(a);return e},e.startAngle=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.endAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return e},e.centroid=function(){var e=(a.apply(this,arguments)+b.apply(this,arguments))/2,f=(c.apply(this,arguments)+d.apply(this,arguments))/2+b$;return[Math.cos(f)*e,Math.sin(f)*e]};return e};var b$=-Math.PI/2,b_=2*Math.PI-1e-6;d3.svg.line=function(){return ce(Object)};var ci={linear:cj,"step-before":ck,"step-after":cl,basis:cr,"basis-open":cs,"basis-closed":ct,bundle:cu,cardinal:co,"cardinal-open":cm,"cardinal-closed":cn,monotone:cD},cw=[0,2/3,1/3,0],cx=[0,1/3,2/3,0],cy=[0,1/6,2/3,1/6];d3.svg.line.radial=function(){var a=ce(cE);a.radius=a.x,delete a.x,a.angle=a.y,delete a.y;return a},ck.reverse=cl,cl.reverse=ck,d3.svg.area=function(){return cF(Object)},d3.svg.area.radial=function(){var a=cF(cE);a.radius=a.x,delete a.x,a.innerRadius=a.x0,delete a.x0,a.outerRadius=a.x1,delete a.x1,a.angle=a.y,delete a.y,a.startAngle=a.y0,delete a.y0,a.endAngle=a.y1,delete a.y1;return a},d3.svg.chord=function(){function j(a,b,c,d){return"Q 0,0 "+d}function i(a,b){return"A"+a+","+a+" 0 0,1 "+b}function h(a,b){return a.a0==b.a0&&a.a1==b.a1}function g(a,b,f,g){var h=b.call(a,f,g),i=c.call(a,h,g),j=d.call(a,h,g)+b$,k=e.call(a,h,g)+b$;return{r:i,a0:j,a1:k,p0:[i*Math.cos(j),i*Math.sin(j)],p1:[i*Math.cos(k),i*Math.sin(k)]}}function f(c,d){var e=g(this,a,c,d),f=g(this,b,c,d);return"M"+e.p0+i(e.r,e.p1)+(h(e,f)?j(e.r,e.p1,e.r,e.p0):j(e.r,e.p1,f.r,f.p0)+i(f.r,f.p1)+j(f.r,f.p1,e.r,e.p0))+"Z"}var a=cI,b=cJ,c=cK,d=cc,e=cd;f.radius=function(a){if(!arguments.length)return c;c=d3.functor(a);return f},f.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return f},f.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return f},f.startAngle=function(a){if(!arguments.length)return d;d=d3.functor(a);return f},f.endAngle=function(a){if(!arguments.length)return e;e=d3.functor(a);return f};return f},d3.svg.diagonal=function(){function d(d,e){var f=a.call(this,d,e),g=b.call(this,d,e),h=(f.y+g.y)/2,i=[f,{x:f.x,y:h},{x:g.x,y:h},g];i=i.map(c);return"M"+i[0]+"C"+i[1]+" "+i[2]+" "+i[3]}var a=cI,b=cJ,c=cN;d.source=function(b){if(!arguments.length)return a;a=d3.functor(b);return d},d.target=function(a){if(!arguments.length)return b;b=d3.functor(a);return d},d.projection=function(a){if(!arguments.length)return c;c=a;return d};return d},d3.svg.diagonal.radial=function(){var a=d3.svg.diagonal(),b=cN,c=a.projection;a.projection=function(a){return arguments.length?c(cO(b=a)):b};return a},d3.svg.mouse=function(a){return cQ(a,d3.event)};var cP=/WebKit/.test(navigator.userAgent)?-1:0;d3.svg.touches=function(a){var b=d3.event.touches;return b?d(b).map(function(b){var c=cQ(a,b);c.identifier=b.identifier;return c}):[]},d3.svg.symbol=function(){function c(c,d){return(cT[a.call(this,c,d)]||cT.circle)(b.call(this,c,d))}var a=cS,b=cR;c.type=function(b){if(!arguments.length)return a;a=d3.functor(b);return c},c.size=function(a){if(!arguments.length)return b;b=d3.functor(a);return c};return c};var cT={circle:function(a){var b=Math.sqrt(a/Math.PI);return"M0,"+b+"A"+b+","+b+" 0 1,1 0,"+ -b+"A"+b+","+b+" 0 1,1 0,"+b+"Z"},cross:function(a){var b=Math.sqrt(a/5)/2;return"M"+ -3*b+","+ -b+"H"+ -b+"V"+ -3*b+"H"+b+"V"+ -b+"H"+3*b+"V"+b+"H"+b+"V"+3*b+"H"+ -b+"V"+b+"H"+ -3*b+"Z"},diamond:function(a){var b=Math.sqrt(a/(2*cV)),c=b*cV;return"M0,"+ -b+"L"+c+",0"+" 0,"+b+" "+ -c+",0"+"Z"},square:function(a){var b=Math.sqrt(a)/2;return"M"+ -b+","+ -b+"L"+b+","+ -b+" "+b+","+b+" "+ -b+","+b+"Z"},"triangle-down":function(a){var b=Math.sqrt(a/cU),c=b*cU/2;return"M0,"+c+"L"+b+","+ -c+" "+ -b+","+ -c+"Z"},"triangle-up":function(a){var b=Math.sqrt(a/cU),c=b*cU/2;return"M0,"+ -c+"L"+b+","+c+" "+ -b+","+c+"Z"}};d3.svg.symbolTypes=d3.keys(cT);var cU=Math.sqrt(3),cV=Math.tan(30*Math.PI/180);d3.svg.axis=function(){function j(j){j.each(function(k,l,m){var n=d3.select(this),o=j.delay?function(a){var b=bs;try{bs=j.id;return a.transition().delay(j[m][l].delay).duration(j[m][l].duration).ease(j.ease())}finally{bs=b}}:Object,p=a.ticks.apply(a,g),q=h==null?a.tickFormat.apply(a,g):h,r=cY(a,p,i),s=n.selectAll(".minor").data(r,String),t=s.enter().insert("svg:line","g").attr("class","tick minor").style("opacity",1e-6),u=o(s.exit()).style("opacity",1e-6).remove(),v=o(s).style("opacity",1),w=n.selectAll("g").data(p,String),x=w.enter().insert("svg:g","path").style("opacity",1e-6),y=o(w.exit()).style("opacity",1e-6).remove(),z=o(w).style("opacity",1),A,B=bC(a.range()),C=n.selectAll(".domain").data([0]),D=C.enter().append("svg:path").attr("class","domain"),E=o(C),F=this.__chart__||a;this.__chart__=a.copy(),x.append("svg:line").attr("class","tick"),x.append("svg:text"),z.select("text").text(q);switch(b){case"bottom":A=cW,v.attr("x2",0).attr("y2",d),z.select("line").attr("x2",0).attr("y2",c),z.select("text").attr("x",0).attr("y",Math.max(c,0)+f).attr("dy",".71em").attr("text-anchor","middle"),E.attr("d","M"+B[0]+","+e+"V0H"+B[1]+"V"+e);break;case"top":A=cW,v.attr("x2",0).attr("y2",-d),z.select("line").attr("x2",0).attr("y2",-c),z.select("text").attr("x",0).attr("y",-(Math.max(c,0)+f)).attr("dy","0em").attr("text-anchor","middle"),E.attr("d","M"+B[0]+","+ -e+"V0H"+B[1]+"V"+ -e);break;case"left":A=cX,v.attr("x2",-d).attr("y2",0),z.select("line").attr("x2",-c).attr("y2",0),z.select("text").attr("x",-(Math.max(c,0)+f)).attr("y",0).attr("dy",".32em").attr("text-anchor","end"),E.attr("d","M"+ -e+","+B[0]+"H0V"+B[1]+"H"+ -e);break;case"right":A=cX,v.attr("x2",d).attr("y2",0),z.select("line").attr("x2",c).attr("y2",0),z.select("text").attr("x",Math.max(c,0)+f).attr("y",0).attr("dy",".32em").attr("text-anchor","start"),E.attr("d","M"+e+","+B[0]+"H0V"+B[1]+"H"+e)}x.call(A,F),z.call(A,a),y.call(A,a),t.call(A,F),v.call(A,a),u.call(A,a)})}var a=d3.scale.linear(),b="bottom",c=6,d=6,e=6,f=3,g=[10],h,i=0;j.scale=function(b){if(!arguments.length)return a;a=b;return j},j.orient=function(a){if(!arguments.length)return b;b=a;return j},j.ticks=function(){if(!arguments.length)return g;g=arguments;return j},j.tickFormat=function(a){if(!arguments.length)return h;h=a;return j},j.tickSize=function(a,b,f){if(!arguments.length)return c;var g=arguments.length-1;c=+a,d=g>1?+b:c,e=g>0?+arguments[g]:c;return j},j.tickPadding=function(a){if(!arguments.length)return f;f=+a;return j},j.tickSubdivide=function(a){if(!arguments.length)return i;i=+a;return j};return j},d3.behavior={},d3.behavior.drag=function(){function d(){c.apply(this,arguments),de("dragstart")}function c(){cZ=a,c$=d3.event.target,db=df((c_=this).parentNode),dc=0,da=arguments}function b(){this.on("mousedown.drag",d).on("touchstart.drag",d),d3.select(window).on("mousemove.drag",dg).on("touchmove.drag",dg).on("mouseup.drag",dh,!0).on("touchend.drag",dh,!0).on("click.drag",di,!0)}var a=d3.dispatch("drag","dragstart","dragend");b.on=function(c,d){a[c].add(d);return b};return b};var cZ,c$,c_,da,db,dc,dd;d3.behavior.zoom=function(){function h(){d.apply(this,arguments);var b=dz(),c,e=Date.now();b.length===1&&e-dp<300&&dE(1+Math.floor(a[2]),c=b[0],dn[c.identifier]),dp=e}function g(){d.apply(this,arguments);var b=d3.svg.mouse(dt);dE(d3.event.shiftKey?Math.ceil(a[2]-1):Math.floor(a[2]+1),b,dx(b))}function f(){d.apply(this,arguments),dm||(dm=dx(d3.svg.mouse(dt))),dE(dy()+a[2],d3.svg.mouse(dt),dm)}function e(){d.apply(this,arguments),dl=dx(d3.svg.mouse(dt)),dv=!1,d3.event.preventDefault(),window.focus()}function d(){dq=a,dr=b.zoom.dispatch,ds=d3.event.target,dt=this,du=arguments}function c(){this.on("mousedown.zoom",e).on("mousewheel.zoom",f).on("DOMMouseScroll.zoom",f).on("dblclick.zoom",g).on("touchstart.zoom",h),d3.select(window).on("mousemove.zoom",dB).on("mouseup.zoom",dC).on("touchmove.zoom",dA).on("touchend.zoom",dz).on("click.zoom",dD,!0)}var a=[0,0,0],b=d3.dispatch("zoom");c.on=function(a,d){b[a].add(d);return c};return c};var dk,dl,dm,dn={},dp=0,dq,dr,ds,dt,du,dv,dw})() \ No newline at end of file -- cgit v1.2.3-54-g00ecf From fa38fa74d4d4186647b6e8f5c0e9661ad65d2957 Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 18 Oct 2011 10:55:06 -0500 Subject: Add architecture to package details and files page titles And remove the not totally necessary "Package Details" text as that seems like a reasonable assumption for the base page. Signed-off-by: Dan McGee --- templates/packages/details.html | 2 +- templates/packages/files.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/packages/details.html b/templates/packages/details.html index 1016b43a..2998592f 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -2,7 +2,7 @@ {% load cache %} {% load package_extras %} -{% block title %}Arch Linux - {{ pkg.pkgname }} {{ pkg.full_version }} - Package Details{% endblock %} +{% block title %}Arch Linux - {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}){% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% load package_extras %} diff --git a/templates/packages/files.html b/templates/packages/files.html index 362e62cd..50ab6e83 100644 --- a/templates/packages/files.html +++ b/templates/packages/files.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{% block title %}Arch Linux - {{ pkg.pkgname }} {{ pkg.full_version }} - Package File List{% endblock %} +{% block title %}Arch Linux - {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}) - File List{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block content %} -- cgit v1.2.3-54-g00ecf From 23020b33a0f8bf48c76d82726f143a09fca0fd1f Mon Sep 17 00:00:00 2001 From: Dan McGee Date: Tue, 18 Oct 2011 10:59:14 -0500 Subject: Refresh title and other aspects of flag package templates * Update to contain version and architecture * Update some page text to be more descriptive * Add a meta tag to not directly index these pages in search engines Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/packages/flag.html | 7 ++++--- templates/packages/flag_confirmed.html | 6 ++++-- templates/packages/flagged.html | 5 +++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/templates/packages/flag.html b/templates/packages/flag.html index 34dfe34e..5fc9c91d 100644 --- a/templates/packages/flag.html +++ b/templates/packages/flag.html @@ -1,18 +1,19 @@ {% extends "base.html" %} {% load package_extras %} -{% block title %}Arch Linux - Flag Package - {{ package.pkgname }}{% endblock %} +{% block title %}Arch Linux - Flag Package - {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %} +{% block head %}<meta name="robots" content="noindex"/>{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block content %} <div id="pkg-flag" class="box"> - <h2>Flag Package: {{ package.pkgname }}</h2> + <h2>Flag Package: {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }})</h2> <p>If you notice a package is out-of-date (i.e., there is a newer <strong>stable</strong> release available), then please notify us using the form below. Do <em>not</em> report bugs via this form!</p> - <p>Note that all of the following packages will be marked out of date:</p> + <p>Note that the following {{ packages|length }} package{{ packages|pluralize }} will be marked out of date:</p> <ul> {% for pkg in packages %} <li>{% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})</li> diff --git a/templates/packages/flag_confirmed.html b/templates/packages/flag_confirmed.html index 398212f8..6274adbb 100644 --- a/templates/packages/flag_confirmed.html +++ b/templates/packages/flag_confirmed.html @@ -1,14 +1,16 @@ {% extends "base.html" %} {% load package_extras %} -{% block title %}Arch Linux - Package Flagged - {{ package.pkgname }}{% endblock %} +{% block title %}Arch Linux - Package Flagged - {{ package.pkgname }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %} +{% block head %}<meta name="robots" content="noindex"/>{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block content %} <div id="pkg-flag" class="box"> <h2>Package Flagged - {{ package.pkgname }}</h2> - <p>Thank you, the maintainers have been notified the following packages are out-of-date:</p> + <p>Thank you, the maintainers have been notified the following + {{ packages|length }} package{{ packages|pluralize }} are out-of-date:</p> <ul> {% for pkg in packages %} <li>{% pkg_details_link pkg %} {{ pkg.full_version }} [{{ pkg.repo.name|lower }}] ({{ pkg.arch.name }})</li> diff --git a/templates/packages/flagged.html b/templates/packages/flagged.html index a99a6924..bbe0fad5 100644 --- a/templates/packages/flagged.html +++ b/templates/packages/flagged.html @@ -1,12 +1,13 @@ {% extends "base.html" %} {% load package_extras %} -{% block title %}Arch Linux - Flag Package - {{ pkg.pkgname }}{% endblock %} +{% block title %}Arch Linux - Flag Package - {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}){% endblock %} +{% block head %}<meta name="robots" content="noindex"/>{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block content %} <div id="pkg-flagged-error" class="box"> - <h2>Error: Package already flagged</h2> + <h2>Package {{ pkg.pkgname }} {{ pkg.full_version }} ({{ pkg.arch.name }}) already flagged</h2> <p><strong>{{pkg.pkgname}}</strong> has already been flagged out-of-date.</p> -- cgit v1.2.3-54-g00ecf From e31d7f864ddfbce49eda91aa01654b76dcd009b9 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 19 Oct 2011 11:04:42 -0500 Subject: Use admin_media_prefix helper everywhere in search template Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/packages/search.html | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/templates/packages/search.html b/templates/packages/search.html index 4744aa88..800b883d 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -1,10 +1,12 @@ {% extends "base.html" %} {% load package_extras %} +{% load adminmedia %} + {% block title %}Arch Linux - Package Database{% endblock %} {% block navbarclass %}anb-packages{% endblock %} {% block head %} -<link rel="stylesheet" type="text/css" href="/media/admin_media/css/widgets.css" /> +<link rel="stylesheet" type="text/css" href="{% admin_media_prefix %}css/widgets.css" /> {% endblock %} {% block content %} @@ -170,7 +172,7 @@ <h3>Package Search</h3> </div> <script type="text/javascript" src="/jsi18n/"></script> -{% load adminmedia %}<script type="text/javascript" src="{% admin_media_prefix %}js/core.js"></script> <script type="text/javascript">window.__admin_media_prefix__ = "{% filter escapejs %}{% admin_media_prefix %}{% endfilter %}";</script> +<script type="text/javascript" src="{% admin_media_prefix %}js/core.js"></script> {{search_form.media}} {% endblock %} -- cgit v1.2.3-54-g00ecf From 002574cce1d9756ba28a87a038f6906b566f2e2a Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 21 Oct 2011 18:16:49 -0500 Subject: Accept 40 hex char PGP key signatures only Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 17 ++++++----------- main/templatetags/pgp.py | 5 ++--- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/main/models.py b/main/models.py index ec1f0d2c..bdb84624 100644 --- a/main/models.py +++ b/main/models.py @@ -36,18 +36,13 @@ def to_python(self, value): if value.startswith('0x'): value = value[2:] value = value.split('/')[-1] - return value + # make all (hex letters) uppercase + return value.upper() def formfield(self, **kwargs): # override so we don't set max_length form field attribute return models.Field.formfield(self, **kwargs) -def validate_pgp_key_length(value): - if len(value) not in (8, 16, 40): - raise ValidationError( - u'Ensure this value has 8, 16, or 40 characters (it has %d).' % len(value), - 'pgp_key_value') - class UserProfile(models.Model): notify = models.BooleanField( "Send notifications", @@ -66,10 +61,10 @@ class UserProfile(models.Model): help_text="Required field") other_contact = models.CharField(max_length=100, null=True, blank=True) pgp_key = PGPKeyField(max_length=40, null=True, blank=True, - verbose_name="PGP key", validators=[RegexValidator(r'^[0-9A-F]+$', - "Ensure this value consists of only hex characters.", 'hex_char'), - validate_pgp_key_length], - help_text="PGP Key ID or fingerprint (8, 16, or 40 hex digits)") + verbose_name="PGP key fingerprint", + validators=[RegexValidator(r'^[0-9A-F]{40}$', + "Ensure this value consists of 40 hex characters.", 'hex_char')], + help_text="consists of 40 hex digits; use `gpg --fingerprint`") website = models.CharField(max_length=200, null=True, blank=True) yob = models.IntegerField("Year of birth", null=True, blank=True) location = models.CharField(max_length=50, null=True, blank=True) diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py index 956de892..f875c11e 100644 --- a/main/templatetags/pgp.py +++ b/main/templatetags/pgp.py @@ -4,7 +4,6 @@ register = template.Library() def format_key(key_id): - print len(key_id) if len(key_id) in (8, 20): return u'0x%s' % key_id elif len(key_id) == 40: @@ -24,7 +23,7 @@ def pgp_key_link(key_id): return format_key(key_id) url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \ (pgp_server, key_id) - values = (url, key_id, format_key(key_id)) - return '<a href="%s" title="PGP key search for 0x%s">%s</a>' % values + values = (url, format_key(key_id), key_id[-8:]) + return '<a href="%s" title="PGP key search for %s">0x%s</a>' % values # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 2d5777b11d229d115a31a6c82236570002c2dd57 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 21 Oct 2011 18:49:00 -0500 Subject: Add a generate_keyring command This grabs all the PGP keys from the developer profiles and adds them to the keyrings. Obviously we may want to do more in the future such as filter by groups, active status, etc. but this is just a first iteration. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/generate_keyring.py | 59 +++++++++++++++++++++++++++ devel/management/commands/rematch_packager.py | 2 +- main/models.py | 2 +- 3 files changed, 61 insertions(+), 2 deletions(-) create mode 100644 devel/management/commands/generate_keyring.py diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py new file mode 100644 index 00000000..b95d5a8e --- /dev/null +++ b/devel/management/commands/generate_keyring.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" +generate_keyring command + +Assemble a GPG keyring with all known developer keys. + +Usage: ./manage.py generate_keyring <keyserver> <keyring_path> +""" + +from django.core.management.base import BaseCommand, CommandError +from django.db.models import Q + +import logging +import subprocess +import sys + +from main.models import UserProfile + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s -> %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + stream=sys.stderr) +logger = logging.getLogger() + +class Command(BaseCommand): + args = "<keyserver> <keyring_path>" + help = "Assemble a GPG keyring with all known developer keys." + + def handle(self, *args, **options): + v = int(options.get('verbosity', None)) + if v == 0: + logger.level = logging.ERROR + elif v == 1: + logger.level = logging.INFO + elif v == 2: + logger.level = logging.DEBUG + + if len(args) != 2: + raise CommandError("keyserver and keyring_path must be provided") + + return generate_keyring(args[0], args[1]) + +def generate_keyring(keyserver, keyring): + logger.info("getting all known key IDs") + + exclude = Q(pgp_key__isnull=True) & Q(pgp_key__exact="") + key_ids = UserProfile.objects.exclude( + exclude).values_list("pgp_key", flat=True) + logger.info("%d keys fetched from user profiles", len(key_ids)) + + gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring, + "--keyserver", keyserver, "--recv-keys"] + logger.info("running command: %r", gpg_cmd) + gpg_cmd.extend(key_ids) + subprocess.check_call(gpg_cmd) + logger.info("keyring at %s successfully updated", keyring) + +# vim: set ts=4 sw=4 et: diff --git a/devel/management/commands/rematch_packager.py b/devel/management/commands/rematch_packager.py index ba6e6a54..461d83ab 100644 --- a/devel/management/commands/rematch_packager.py +++ b/devel/management/commands/rematch_packager.py @@ -24,7 +24,7 @@ logger = logging.getLogger() class Command(NoArgsCommand): - help = "Runs a check on all active mirror URLs to determine if they are reachable via IPv4 and/or v6." + help = "Match all packages with a packager_str but NULL packager_id to a packager if we can find one." def handle_noargs(self, **options): v = int(options.get('verbosity', None)) diff --git a/main/models.py b/main/models.py index bdb84624..440201da 100644 --- a/main/models.py +++ b/main/models.py @@ -27,7 +27,7 @@ class PGPKeyField(models.CharField): _south_introspects = True def to_python(self, value): - if value == '': + if value == '' or value is None: return None value = super(PGPKeyField, self).to_python(value) # remove all spaces -- cgit v1.2.3-54-g00ecf From 30b0c7b1a990ffbb244d103ac3e6022d4be60ec5 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Mon, 24 Oct 2011 16:28:13 -0500 Subject: Add icons for Apple iOS web application bookmarks These will be used for an icon if adding a bookmark as a Home Screen icon on iOS devices. We add the three recommended sizes for old iPhone screen size, the new iPhone resolution, and the iPad. Signed-off-by: Dan McGee <dan@archlinux.org> --- media/logos/apple-touch-icon-114x114.png | Bin 0 -> 3240 bytes media/logos/apple-touch-icon-57x57.png | Bin 0 -> 1638 bytes media/logos/apple-touch-icon-72x72.png | Bin 0 -> 2076 bytes templates/base.html | 3 +++ 4 files changed, 3 insertions(+) create mode 100644 media/logos/apple-touch-icon-114x114.png create mode 100644 media/logos/apple-touch-icon-57x57.png create mode 100644 media/logos/apple-touch-icon-72x72.png diff --git a/media/logos/apple-touch-icon-114x114.png b/media/logos/apple-touch-icon-114x114.png new file mode 100644 index 00000000..e6365ee2 Binary files /dev/null and b/media/logos/apple-touch-icon-114x114.png differ diff --git a/media/logos/apple-touch-icon-57x57.png b/media/logos/apple-touch-icon-57x57.png new file mode 100644 index 00000000..d2d78262 Binary files /dev/null and b/media/logos/apple-touch-icon-57x57.png differ diff --git a/media/logos/apple-touch-icon-72x72.png b/media/logos/apple-touch-icon-72x72.png new file mode 100644 index 00000000..170656e0 Binary files /dev/null and b/media/logos/apple-touch-icon-72x72.png differ diff --git a/templates/base.html b/templates/base.html index 1a217505..3b509070 100644 --- a/templates/base.html +++ b/templates/base.html @@ -8,6 +8,9 @@ <link rel="stylesheet" type="text/css" href="/media/archweb-print.css" media="print" /> <link rel="icon" type="image/x-icon" href="/media/favicon.ico" /> <link rel="shortcut icon" type="image/x-icon" href="/media/favicon.ico" /> + <link rel="apple-touch-icon" href="/media/logos/apple-touch-icon-57x57.png" /> + <link rel="apple-touch-icon" sizes="72x72" href="/media/logos/apple-touch-icon-72x72.png" /> + <link rel="apple-touch-icon" sizes="114x114" href="/media/logos/apple-touch-icon-114x114.png" /> <link rel="search" type="application/opensearchdescription+xml" href="{% url opensearch-packages as osp %}{{ osp }}" title="Arch Linux Packages" /> {% block head %}{% endblock %} </head> -- cgit v1.2.3-54-g00ecf From d9a1bfea827e90b88927b2eb598ad7e0355a4ee9 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Mon, 24 Oct 2011 16:30:12 -0500 Subject: Add a logo-only SVG file Signed-off-by: Dan McGee <dan@archlinux.org> --- media/logos/archlinux-logo-only.svg | 76 +++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 media/logos/archlinux-logo-only.svg diff --git a/media/logos/archlinux-logo-only.svg b/media/logos/archlinux-logo-only.svg new file mode 100644 index 00000000..82a19110 --- /dev/null +++ b/media/logos/archlinux-logo-only.svg @@ -0,0 +1,76 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<!-- Created with Inkscape (http://www.inkscape.org/) --> + +<svg + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns="http://www.w3.org/2000/svg" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + version="1.0" + width="200" + height="200" + id="svg2424" + inkscape:version="0.48.2 r9819" + sodipodi:docname="archlinux-logo-only.svg" + inkscape:export-filename="/home/dmcgee/projects/archweb/media/logos/apple-touch-icon-72x72.png" + inkscape:export-xdpi="32.400002" + inkscape:export-ydpi="32.400002"> + <metadata + id="metadata3206"> + <rdf:RDF> + <cc:Work + rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type + rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> + <dc:title></dc:title> + </cc:Work> + </rdf:RDF> + </metadata> + <sodipodi:namedview + pagecolor="#ffffff" + bordercolor="#666666" + borderopacity="1" + objecttolerance="10" + gridtolerance="10" + guidetolerance="10" + inkscape:pageopacity="0" + inkscape:pageshadow="2" + inkscape:window-width="1366" + inkscape:window-height="716" + id="namedview3204" + showgrid="false" + units="px" + inkscape:zoom="2.0066667" + inkscape:cx="234.71761" + inkscape:cy="99.708458" + inkscape:window-x="0" + inkscape:window-y="256" + inkscape:window-maximized="1" + inkscape:current-layer="svg2424" /> + <defs + id="defs2426" /> + <path + d="M 99.982978,9.4299079 C 91.920693,29.201751 87.057917,42.128656 78.081625,61.311918 83.585216,67.143195 90.340614,73.933635 101.31135,81.606952 89.516733,76.756627 81.471355,71.884513 75.458912,66.827107 63.970969,90.7953 45.972667,124.94368 9.4483145,190.57009 38.155229,173.99182 60.408321,163.77892 81.147131,159.87687 c -0.890533,-3.82576 -1.396835,-7.9676 -1.362448,-12.29473 l 0.03406,-0.91557 c 0.455512,-18.39852 10.022891,-32.53528 21.356367,-31.57611 11.33348,0.95916 20.14288,16.65456 19.68737,35.05308 -0.0857,3.45517 -0.47603,6.79044 -1.15808,9.87502 20.51365,4.01105 42.52879,14.20216 70.84729,30.55153 -5.58386,-10.27831 -10.56793,-19.54295 -15.32754,-28.37161 -7.49716,-5.80948 -15.31706,-13.37379 -31.26818,-21.5594 10.96388,2.84479 18.81388,6.13649 24.9328,9.80965 C 120.49649,60.352755 116.57776,48.374114 99.982978,9.4299079 z" + id="path2518" + style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" + inkscape:connector-curvature="0" /> + <g + id="text2638" + style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono" + transform="matrix(0.8746356,0,0,0.8746356,-26.046795,-109.83508)"> + <path + d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0" + id="path3940" + style="fill:#1793d1;fill-opacity:1" + inkscape:connector-curvature="0" /> + <path + d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0" + id="path3942" + style="fill:#1793d1;fill-opacity:1" + inkscape:connector-curvature="0" /> + </g> +</svg> -- cgit v1.2.3-54-g00ecf From a8f8f199ab7f6186d6c7b63a8d99cfc946cd9b9c Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 25 Oct 2011 08:56:40 -0500 Subject: Save SVG as plain, not Inkscape Signed-off-by: Dan McGee <dan@archlinux.org> --- media/logos/archlinux-logo-only.svg | 43 ++++++------------------------------- 1 file changed, 6 insertions(+), 37 deletions(-) diff --git a/media/logos/archlinux-logo-only.svg b/media/logos/archlinux-logo-only.svg index 82a19110..09be94a7 100644 --- a/media/logos/archlinux-logo-only.svg +++ b/media/logos/archlinux-logo-only.svg @@ -7,17 +7,10 @@ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" - xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" - xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" version="1.0" width="200" height="200" - id="svg2424" - inkscape:version="0.48.2 r9819" - sodipodi:docname="archlinux-logo-only.svg" - inkscape:export-filename="/home/dmcgee/projects/archweb/media/logos/apple-touch-icon-72x72.png" - inkscape:export-xdpi="32.400002" - inkscape:export-ydpi="32.400002"> + id="svg2424"> <metadata id="metadata3206"> <rdf:RDF> @@ -30,47 +23,23 @@ </cc:Work> </rdf:RDF> </metadata> - <sodipodi:namedview - pagecolor="#ffffff" - bordercolor="#666666" - borderopacity="1" - objecttolerance="10" - gridtolerance="10" - guidetolerance="10" - inkscape:pageopacity="0" - inkscape:pageshadow="2" - inkscape:window-width="1366" - inkscape:window-height="716" - id="namedview3204" - showgrid="false" - units="px" - inkscape:zoom="2.0066667" - inkscape:cx="234.71761" - inkscape:cy="99.708458" - inkscape:window-x="0" - inkscape:window-y="256" - inkscape:window-maximized="1" - inkscape:current-layer="svg2424" /> <defs id="defs2426" /> <path d="M 99.982978,9.4299079 C 91.920693,29.201751 87.057917,42.128656 78.081625,61.311918 83.585216,67.143195 90.340614,73.933635 101.31135,81.606952 89.516733,76.756627 81.471355,71.884513 75.458912,66.827107 63.970969,90.7953 45.972667,124.94368 9.4483145,190.57009 38.155229,173.99182 60.408321,163.77892 81.147131,159.87687 c -0.890533,-3.82576 -1.396835,-7.9676 -1.362448,-12.29473 l 0.03406,-0.91557 c 0.455512,-18.39852 10.022891,-32.53528 21.356367,-31.57611 11.33348,0.95916 20.14288,16.65456 19.68737,35.05308 -0.0857,3.45517 -0.47603,6.79044 -1.15808,9.87502 20.51365,4.01105 42.52879,14.20216 70.84729,30.55153 -5.58386,-10.27831 -10.56793,-19.54295 -15.32754,-28.37161 -7.49716,-5.80948 -15.31706,-13.37379 -31.26818,-21.5594 10.96388,2.84479 18.81388,6.13649 24.9328,9.80965 C 120.49649,60.352755 116.57776,48.374114 99.982978,9.4299079 z" id="path2518" - style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" - inkscape:connector-curvature="0" /> + style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" /> <g + transform="matrix(0.8746356,0,0,0.8746356,-26.046795,-109.83508)" id="text2638" - style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono" - transform="matrix(0.8746356,0,0,0.8746356,-26.046795,-109.83508)"> + style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono"> <path d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0" id="path3940" - style="fill:#1793d1;fill-opacity:1" - inkscape:connector-curvature="0" /> + style="fill:#1793d1;fill-opacity:1" /> <path d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0" id="path3942" - style="fill:#1793d1;fill-opacity:1" - inkscape:connector-curvature="0" /> + style="fill:#1793d1;fill-opacity:1" /> </g> </svg> -- cgit v1.2.3-54-g00ecf From 1b83844b30d3271b8fb50757d827c7b8fe8b5585 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 26 Oct 2011 03:25:15 -0500 Subject: Ensure PGP signature values are not trimmed This makes them totally unusable for any real purpose down the road. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index cf597577..a8e3219e 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -73,7 +73,7 @@ def handle(self, arch=None, filename=None, **options): class Pkg(object): """An interim 'container' object for holding Arch package data.""" bare = ( 'name', 'base', 'arch', 'desc', 'filename', - 'md5sum', 'sha256sum', 'pgpsig', 'url', 'packager' ) + 'md5sum', 'sha256sum', 'url', 'packager' ) number = ( 'csize', 'isize' ) collections = ( 'depends', 'optdepends', 'conflicts', 'provides', 'replaces', 'groups', 'license', 'files' ) @@ -85,6 +85,7 @@ def __init__(self, repo): self.ver = None self.rel = None self.epoch = 0 + self.pgpsig = None for k in self.bare + self.number: setattr(self, k, None) for k in self.collections: @@ -99,6 +100,9 @@ def populate(self, values): setattr(self, k, v[0][:254]) elif k in self.number: setattr(self, k, long(v[0])) + elif k == 'pgpsig': + # do NOT prune this value at all + setattr(self, k, v[0]) elif k == 'version': match = self.version_re.match(v[0]) self.ver = match.group(3) -- cgit v1.2.3-54-g00ecf From c692c96ff57272a2e59b7c2173dfb78cfa11f3ab Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 26 Oct 2011 19:18:26 -0500 Subject: Don't require mirrorlist URLs to have ISOs Signed-off-by: Dan McGee <dan@archlinux.org> --- mirrors/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mirrors/views.py b/mirrors/views.py index 6135cee3..417e26ee 100644 --- a/mirrors/views.py +++ b/mirrors/views.py @@ -57,7 +57,7 @@ def find_mirrors(request, countries=None, protocols=None, use_status=False, is_download=True).values_list('protocol', flat=True) qset = MirrorUrl.objects.select_related().filter( protocol__protocol__in=protocols, - mirror__public=True, mirror__active=True, mirror__isos=True + mirror__public=True, mirror__active=True, ) if countries and 'all' not in countries: qset = qset.filter(Q(country__in=countries) | -- cgit v1.2.3-54-g00ecf From 2c8b7ad07b63a3048089be78c26c1574f15dd582 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 27 Oct 2011 00:25:40 -0500 Subject: Add some legacy URL redirects Amazing that we still see hits on these URLs... Signed-off-by: Dan McGee <dan@archlinux.org> --- urls.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/urls.py b/urls.py index 575910ea..adbc8870 100644 --- a/urls.py +++ b/urls.py @@ -36,6 +36,8 @@ # Sitemaps urlpatterns += patterns('django.contrib.sitemaps.views', + # Thanks Django, we can't cache these longer because of + # https://code.djangoproject.com/ticket/2713 (r'^sitemap.xml$', 'index', {'sitemaps': our_sitemaps}), (r'^sitemap-(?P<section>.+)\.xml$', 'sitemap', @@ -86,6 +88,19 @@ (r'^todolists/$','todolists.views.public_list'), ) +legacy_urls = ( + ('^about.php', '/about/'), + ('^changelog.php', '/packages/?sort=-last_update'), + ('^download.php', '/download/'), + ('^index.php', '/'), + ('^logos.php', '/art/'), + ('^news.php', '/news/'), +) + +for old_url, new_url in legacy_urls: + urlpatterns += patterns('django.views.generic.simple', + (old_url, 'redirect_to', {'url': new_url})) + if settings.DEBUG == True: urlpatterns += patterns('', (r'^media/(.*)$', 'django.views.static.serve', -- cgit v1.2.3-54-g00ecf From f5c7b419cdda049ea8d9bfb0137fb53296ceef55 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 27 Oct 2011 12:06:47 -0500 Subject: Prettify filesizes in package visualization chart Add a general purpose formatter and mark up each value function with an 'is_size' attribute so we can add additional display formatting if asked for. Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.js | 17 +++++++++++++++++ media/visualize.js | 22 +++++++++++++++++----- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index 2414331d..a51ae460 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -232,3 +232,20 @@ function signoff_package() { }); return false; } + +/* visualizations */ +function format_filesize(size, decimals) { + /*var labels = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];*/ + var labels = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; + var label = 0; + + while (size > 2048.0 && label < labels.length - 1) { + label++; + size /= 1024.0; + } + if (decimals === undefined) { + decimals = 2; + } + + return size.toFixed(decimals) + ' ' + labels[label]; +} diff --git a/media/visualize.js b/media/visualize.js index c47033ab..d9196d4d 100644 --- a/media/visualize.js +++ b/media/visualize.js @@ -2,7 +2,14 @@ function packages_treemap(chart_id, orderings, default_order) { var jq_div = $(chart_id), color = d3.scale.category20(); var key_func = function(d) { return d.key; }; - var value_package_count = function(d) { return d.count; }; + var value_package_count = function(d) { return d.count; }, + value_flagged_count = function(d) { return d.flagged; }, + value_compressed_size = function(d) { return d.csize; }, + value_installed_size = function(d) { return d.isize; }; + + /* tag the function so when we display, we can format filesizes */ + value_package_count.is_size = value_flagged_count.is_size = false; + value_compressed_size.is_size = value_installed_size.is_size = true; var treemap = d3.layout.treemap() .size([jq_div.width(), jq_div.height()]) @@ -15,7 +22,12 @@ function packages_treemap(chart_id, orderings, default_order) { if (d.children) { return ""; } - return "<span>" + d.name + ": " + treemap.value()(d) + "</span>"; + var valuefunc = treemap.value(); + var value = valuefunc(d); + if (valuefunc.is_size && value !== undefined) { + value = format_filesize(value); + } + return "<span>" + d.name + ": " + value + "</span>"; }; var d3_div = d3.select(jq_div.get(0)); @@ -81,9 +93,9 @@ function packages_treemap(chart_id, orderings, default_order) { /* each scale button tweaks our value, e.g. net size function */ make_scale_button("count", value_package_count); - make_scale_button("flagged", function(d) { return d.flagged; }); - make_scale_button("csize", function(d) { return d.csize; }); - make_scale_button("isize", function(d) { return d.isize; }); + make_scale_button("flagged", value_flagged_count); + make_scale_button("csize", value_compressed_size); + make_scale_button("isize", value_installed_size); var make_group_button = function(name, order) { var button_id = chart_id + "-" + name; -- cgit v1.2.3-54-g00ecf From 09ea244420a4f8687c9b0122d867dabc5d7d577c Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 1 Nov 2011 16:45:12 -0500 Subject: Update donation text and links We are finally set up through SPI and Click&Pledge. Add the necessary text, buttons, and links for our new donation home. Signed-off-by: Dan McGee <dan@archlinux.org> --- media/donate.gif | Bin 2951 -> 0 bytes templates/public/donate.html | 37 ++++++++++--------------------------- templates/public/index.html | 25 ++++--------------------- 3 files changed, 14 insertions(+), 48 deletions(-) delete mode 100644 media/donate.gif diff --git a/media/donate.gif b/media/donate.gif deleted file mode 100644 index d637428b..00000000 Binary files a/media/donate.gif and /dev/null differ diff --git a/templates/public/donate.html b/templates/public/donate.html index b5fc43df..0dcfcf2a 100644 --- a/templates/public/donate.html +++ b/templates/public/donate.html @@ -23,33 +23,16 @@ <h2>Donate to Arch Linux</h2> <h3>Monetary donations</h3> -{% comment %} - <p>Financial contributions are accepted via <a title="Click to donate now" - href="https://www.paypal.com/cgi-bin/webscr?cmd=_xclick&business=aaronmgriffin@gmail.com&currency_code=USD">PayPal</a>. - Funds are used for server hardware upgrades, conventions, schwag giveaways and more.</p> - - <div id="paypal-button"> - <!-- paypal code --> - <form id="paypal-form" name="_xclick" action="https://www.paypal.com/cgi-bin/webscr" method="post"> - <p><input type="hidden" name="cmd" value="_xclick"/></p> - <p><input type="hidden" name="business" value="aaronmgriffin@gmail.com"/></p> - <p><input type="hidden" name="currency_code" value="USD"/></p> - <p><input type="hidden" name="tax" value="0"/></p> - <p><input type="hidden" name="lc" value="US"/></p> - <p><input type="hidden" name="bn" value="PP-DonationsBF"/></p> - <p><input type="hidden" name="item_name" value="Arch Linux Donation"/></p> - <p><input type="hidden" name="no_shipping" value="1"/></p> - <p><input type="hidden" name="cn" value="Suggestions/Comments"/></p> - <p><input type="hidden" name="no_note" value="1"/></p> - <p><input type="image" src="/media/donate.gif" name="submit" - title="Make a PayPal donation to the Arch Linux project" - alt="Make a PayPal donation to the Arch Linux project" - style="background:transparent;border:none;" /></p> - </form> - </div> -{% endcomment %} - <p>At the moment, we are currently not taking monetary donations, but we - may accept them again in the future.</p> + <p>Financial contributions are accepted via <a href="https://co.clickandpledge.com/Default.aspx?WID=47294" title="Donate via Click&Pledge to Arch Linux"/>Click&Pledge</a>. + Arch Linux is a member project of the + <a href="http://www.spi-inc.org/">Software in the Public Interest, Inc.</a> + non-profit corporation. Funds are used for hosting costs, server hardware + upgrades, and more. You are encouraged to learn more about the SPI, as well + as <a href="http://www.spi-inc.org/donations/">how donations work</a>.</p> + + <a href="https://co.clickandpledge.com/Default.aspx?WID=47294"> + <img src="http://images.clickandpledge.com/flair/buttons/210x34/CP_EN_BK_S_001.gif" alt="Online donation system by ClickandPledge" title="Online donation system by ClickandPledge"/> + </a> <h3>Commercial sponsors and contributions</h3> diff --git a/templates/public/index.html b/templates/public/index.html index b63876ac..06a6a0c3 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -206,28 +206,11 @@ <h4>About</h4> </div><!-- #nav-sidebar --> -{% comment %} -<div id="home-paypal-button" class="widget"> - - <form id="paypal-form" name="_xclick" action="https://www.paypal.com/cgi-bin/webscr" method="post"> - <p><input type="hidden" name="cmd" value="_xclick"/></p> - <p><input type="hidden" name="business" value="aaronmgriffin@gmail.com"/></p> - <p><input type="hidden" name="currency_code" value="USD"/></p> - <p><input type="hidden" name="tax" value="0"/></p> - <p><input type="hidden" name="lc" value="US"/></p> - <p><input type="hidden" name="bn" value="PP-DonationsBF"/></p> - <p><input type="hidden" name="item_name" value="Arch Linux Donation"/></p> - <p><input type="hidden" name="no_shipping" value="1"/></p> - <p><input type="hidden" name="cn" value="Suggestions/Comments"/></p> - <p><input type="hidden" name="no_note" value="1"/></p> - <p><input type="image" src="/media/donate.gif" name="submit" - title="Make a PayPal donation to the Arch Linux project" - alt="Make a PayPal donation to the Arch Linux project" - style="background:transparent;border:none;" /></p> - </form> - +<div id="home-donate-button" class="widget"> + <a href="https://co.clickandpledge.com/Default.aspx?WID=47294"> + <img src="http://images.clickandpledge.com/flair/buttons/210x34/CP_EN_BK_S_001.gif" alt="Donate via Click&Pledge to Arch Linux" title="Donate via Click&Pledge to Arch Linux"/> + </a> </div> -{% endcomment %} <div id="arch-sponsors" class="widget"> -- cgit v1.2.3-54-g00ecf From ade2c08899abf77f6d836432c9988ad3f9652a95 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 1 Nov 2011 16:47:37 -0500 Subject: Really ensure we don't catch any NULL or blank values Fuck you too, Django. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/generate_keyring.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py index b95d5a8e..35ab8874 100644 --- a/devel/management/commands/generate_keyring.py +++ b/devel/management/commands/generate_keyring.py @@ -8,7 +8,6 @@ """ from django.core.management.base import BaseCommand, CommandError -from django.db.models import Q import logging import subprocess @@ -44,9 +43,10 @@ def handle(self, *args, **options): def generate_keyring(keyserver, keyring): logger.info("getting all known key IDs") - exclude = Q(pgp_key__isnull=True) & Q(pgp_key__exact="") - key_ids = UserProfile.objects.exclude( - exclude).values_list("pgp_key", flat=True) + # Screw you Django, for not letting one natively do value != <empty string> + key_ids = UserProfile.objects.filter(user__is_active=True, + pgp_key__isnull=False).extra(where=["pgp_key != ''"]).values_list( + "pgp_key", flat=True) logger.info("%d keys fetched from user profiles", len(key_ids)) gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring, -- cgit v1.2.3-54-g00ecf From bc0e9d3e85e01e327ee5c58ecbbda8c93894caa6 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 1 Nov 2011 16:56:40 -0500 Subject: Add created column to Donor model Signed-off-by: Dan McGee <dan@archlinux.org> --- main/admin.py | 5 +- .../0054_auto__add_field_donor_created.py | 157 +++++++++++++++++++++ main/models.py | 11 +- 3 files changed, 169 insertions(+), 4 deletions(-) create mode 100644 main/migrations/0054_auto__add_field_donor_created.py diff --git a/main/admin.py b/main/admin.py index e86e5cab..e5da9fb9 100644 --- a/main/admin.py +++ b/main/admin.py @@ -4,9 +4,10 @@ from main.models import Arch, Donor, Package, Repo, Todolist, UserProfile class DonorAdmin(admin.ModelAdmin): - list_display = ('name', 'visible') - list_filter = ('visible',) + list_display = ('name', 'visible', 'created') + list_filter = ('visible', 'created') search_fields = ('name',) + exclude = ('created',) class ArchAdmin(admin.ModelAdmin): list_display = ('name', 'agnostic') diff --git a/main/migrations/0054_auto__add_field_donor_created.py b/main/migrations/0054_auto__add_field_donor_created.py new file mode 100644 index 00000000..f4d5b157 --- /dev/null +++ b/main/migrations/0054_auto__add_field_donor_created.py @@ -0,0 +1,157 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding field 'Donor.created' + db.add_column('donors', 'created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.date(2000, 1, 1)), keep_default=False) + + + def backwards(self, orm): + # Deleting field 'Donor.created' + db.delete_column('donors', 'created') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index 440201da..780453c0 100644 --- a/main/models.py +++ b/main/models.py @@ -1,10 +1,11 @@ from django.db import models +from django.db.models.signals import pre_save from django.core.validators import RegexValidator from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.forms import ValidationError -from main.utils import cache_function, make_choice +from main.utils import cache_function, make_choice, set_created_field from packages.models import PackageRelation from packages.models import Signoff as PackageSignoff @@ -104,13 +105,15 @@ class Donor(models.Model): name = models.CharField(max_length=255, unique=True) visible = models.BooleanField(default=True, help_text="Should we show this donor on the public page?") + created = models.DateTimeField() def __unicode__(self): return self.name class Meta: db_table = 'donors' - ordering = ['name'] + ordering = ('name',) + get_latest_by = 'when' class Arch(models.Model): name = models.CharField(max_length=255, unique=True) @@ -177,6 +180,7 @@ class Package(models.Model): flag_date = models.DateTimeField(null=True) objects = PackageManager() + class Meta: db_table = 'packages' ordering = ('pkgname',) @@ -481,6 +485,7 @@ class TodolistPkg(models.Model): list = models.ForeignKey(Todolist) pkg = models.ForeignKey(Package) complete = models.BooleanField(default=False) + class Meta: db_table = 'todolist_pkgs' unique_together = (('list','pkg'),) @@ -498,5 +503,7 @@ def set_todolist_fields(sender, **kwargs): dispatch_uid="main.models") pre_save.connect(set_todolist_fields, sender=Todolist, dispatch_uid="main.models") +pre_save.connect(set_created_field, sender=Donor, + dispatch_uid="main.models") # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 6932102850bba29557d9bf6208b0ffd8a31d3081 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 1 Nov 2011 17:15:28 -0500 Subject: Add Click&Pledge image locally Signed-off-by: Dan McGee <dan@archlinux.org> --- media/CP_EN_BK_S_001.gif | Bin 0 -> 3036 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 media/CP_EN_BK_S_001.gif diff --git a/media/CP_EN_BK_S_001.gif b/media/CP_EN_BK_S_001.gif new file mode 100644 index 00000000..41cf0885 Binary files /dev/null and b/media/CP_EN_BK_S_001.gif differ -- cgit v1.2.3-54-g00ecf From f26dcbf7285c62f0baa1c64ee813ad0ceff5c288 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 1 Nov 2011 17:19:22 -0500 Subject: Use copy of donate image in /media/ This ensures it gets served over HTTPS if the user was on a secure session to begin with. Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/public/donate.html | 2 +- templates/public/index.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/public/donate.html b/templates/public/donate.html index 0dcfcf2a..514c1430 100644 --- a/templates/public/donate.html +++ b/templates/public/donate.html @@ -31,7 +31,7 @@ <h3>Monetary donations</h3> as <a href="http://www.spi-inc.org/donations/">how donations work</a>.</p> <a href="https://co.clickandpledge.com/Default.aspx?WID=47294"> - <img src="http://images.clickandpledge.com/flair/buttons/210x34/CP_EN_BK_S_001.gif" alt="Online donation system by ClickandPledge" title="Online donation system by ClickandPledge"/> + <img src="{% cdnprefix %}/media/CP_EN_BK_S_001.gif" alt="Donate via Click&Pledge to Arch Linux" title="Donate via Click&Pledge to Arch Linux"/> </a> <h3>Commercial sponsors and contributions</h3> diff --git a/templates/public/index.html b/templates/public/index.html index 06a6a0c3..854bd447 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -208,7 +208,7 @@ <h4>About</h4> <div id="home-donate-button" class="widget"> <a href="https://co.clickandpledge.com/Default.aspx?WID=47294"> - <img src="http://images.clickandpledge.com/flair/buttons/210x34/CP_EN_BK_S_001.gif" alt="Donate via Click&Pledge to Arch Linux" title="Donate via Click&Pledge to Arch Linux"/> + <img src="{% cdnprefix %}/media/CP_EN_BK_S_001.gif" alt="Donate via Click&Pledge to Arch Linux" title="Donate via Click&Pledge to Arch Linux"/> </a> </div> -- cgit v1.2.3-54-g00ecf From d8e34919811728149a12e30d438318a3c1036a83 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 15:50:06 -0500 Subject: Use UTC now everywhere Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/devel/views.py b/devel/views.py index ebae3b32..694ed6dc 100644 --- a/devel/views.py +++ b/devel/views.py @@ -80,9 +80,9 @@ def clock(request): devs = User.objects.filter(is_active=True).order_by( 'username').select_related('userprofile') - # now annotate each dev object with their current time now = datetime.now() utc_now = datetime.utcnow().replace(tzinfo=pytz.utc) + # now annotate each dev object with their current time for dev in devs: tz = pytz.timezone(dev.userprofile.time_zone) dev.current_time = utc_now.astimezone(tz) @@ -147,12 +147,12 @@ def report(request, report, username=None): if report == 'old': title = 'Packages last built more than two years ago' - cutoff = datetime.now() - timedelta(days=365 * 2) + cutoff = datetime.utcnow() - timedelta(days=365 * 2) packages = packages.filter( build_date__lt=cutoff).order_by('build_date') elif report == 'long-out-of-date': title = 'Packages marked out-of-date more than 90 days ago' - cutoff = datetime.now() - timedelta(days=90) + cutoff = datetime.utcnow() - timedelta(days=90) packages = packages.filter( flag_date__lt=cutoff).order_by('flag_date') elif report == 'big': -- cgit v1.2.3-54-g00ecf From ac2278423a3d449fdfe8c813f1f2d391ef9aff08 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 14:59:00 -0500 Subject: Many signoff page improvements Add a new 'SignoffSpecification' model which will capture metadata regarding a specific package if it differs from the norm- e.g. more or less than 2 required signoffs, is known to be bad, a comment from the maintainer, etc. The groundwork is laid here; much of this will still need to be wired up in the future. Enhance the view with a lot more JS prettiness and add revoking of signoffs. The signoff page can be filtered and the links and all the fun stuff are totally dynamic now. Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 11 -- media/archweb.css | 1 - media/archweb.js | 72 ++++++-- .../0010_auto__add_signoffspecification.py | 183 +++++++++++++++++++++ packages/models.py | 45 ++++- packages/urls.py | 1 + packages/views.py | 71 +++++--- templates/packages/differences.html | 2 +- templates/packages/signoff_cell.html | 12 ++ templates/packages/signoffs.html | 50 +++--- 10 files changed, 379 insertions(+), 69 deletions(-) create mode 100644 packages/migrations/0010_auto__add_signoffspecification.py create mode 100644 templates/packages/signoff_cell.html diff --git a/main/models.py b/main/models.py index 780453c0..d55a9673 100644 --- a/main/models.py +++ b/main/models.py @@ -7,7 +7,6 @@ from main.utils import cache_function, make_choice, set_created_field from packages.models import PackageRelation -from packages.models import Signoff as PackageSignoff from datetime import datetime from itertools import groupby @@ -213,16 +212,6 @@ def maintainers(self): package_relations__pkgbase=self.pkgbase, package_relations__type=PackageRelation.MAINTAINER) - @property - def signoffs(self): - return PackageSignoff.objects.select_related('user').filter( - pkgbase=self.pkgbase, pkgver=self.pkgver, pkgrel=self.pkgrel, - epoch=self.epoch, arch=self.arch, repo=self.repo) - - def approved_for_signoff(self): - count = self.signoffs.filter(revoked__isnull=True).count() - return count >= PackageSignoff.REQUIRED - @cache_function(300) def applicable_arches(self): '''The list of (this arch) + (available agnostic arches).''' diff --git a/media/archweb.css b/media/archweb.css index ea2f3fb5..62dc4fbc 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -912,7 +912,6 @@ ul.admin-actions { #dev-signoffs .signed-username { color: #888; - margin-left: 0.5em; } /* iso testing feedback form */ diff --git a/media/archweb.js b/media/archweb.js index a51ae460..43812b33 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -139,7 +139,7 @@ function ajaxifyFiles() { /* packages/differences.html */ function filter_packages() { - // start with all rows, and then remove ones we shouldn't show + /* start with all rows, and then remove ones we shouldn't show */ var rows = $('#tbody_differences').children(); var all_rows = rows; if (!$('#id_multilib').is(':checked')) { @@ -150,12 +150,12 @@ function filter_packages() { rows = rows.filter('.' + arch); } if (!$('#id_minor').is(':checked')) { - // this check is done last because it is the most expensive + /* this check is done last because it is the most expensive */ var pat = /(.*)-(.+)/; rows = rows.filter(function(index) { var cells = $(this).children('td'); - // all this just to get the split version out of the table cell + /* all this just to get the split version out of the table cell */ var ver_a = cells.eq(2).find('span').text().match(pat); if (!ver_a) { return true; @@ -166,26 +166,26 @@ function filter_packages() { return true; } - // first check pkgver + /* first check pkgver */ if (ver_a[1] !== ver_b[1]) { return true; } - // pkgver matched, so see if rounded pkgrel matches + /* pkgver matched, so see if rounded pkgrel matches */ if (Math.floor(parseFloat(ver_a[2])) === Math.floor(parseFloat(ver_b[2]))) { return false; } - // pkgrel didn't match, so keep the row + /* pkgrel didn't match, so keep the row */ return true; }); } - // hide all rows, then show the set we care about + /* hide all rows, then show the set we care about */ all_rows.hide(); rows.show(); - // make sure we update the odd/even styling from sorting + /* make sure we update the odd/even styling from sorting */ $('.results').trigger('applyWidgets'); } -function filter_reset() { +function filter_packages_reset() { $('#id_archonly').val('both'); $('#id_multilib').removeAttr('checked'); $('#id_minor').removeAttr('checked'); @@ -213,26 +213,72 @@ function todolist_flag() { function signoff_package() { var link = this; $.getJSON(link.href, function(data) { + link = $(link); + var signoff = null; if (data.created) { - var signoff = $('<li>').addClass('signed-username').text(data.user); - $(link).append(signoff); + signoff = $('<li>').addClass('signed-username').text(data.user); + link.closest('td').children('ul').append(signoff); + } else if(data.user) { + signoff = link.closest('td').find('li').filter(function(index) { + return $(this).text() == data.user; + }); + } + console.log(signoff, data.revoked, data.user); + if (signoff && data.revoked) { + signoff.text(signoff.text() + ' (revoked)'); } /* update the approved column to reflect reality */ var approved; if (data.approved) { - approved = $(link).closest('tr').children('.signoff-no'); + approved = link.closest('tr').children('.signoff-no'); approved.text('Yes').addClass( 'signoff-yes').removeClass('signoff-no'); } else { - approved = $(link).closest('tr').children('.signoff-yes'); + approved = link.closest('tr').children('.signoff-yes'); approved.text('No').addClass( 'signoff-no').removeClass('signoff-yes'); } + link.removeAttr('title'); + /* Form our new link. The current will be something like + * '/packages/repo/arch/package/...' */ + var base_href = link.attr('href').split('/').slice(0, 5).join('/'); + if (data.revoked) { + link.text('Signoff'); + link.attr('href', base_href + '/signoff/'); + } else { + link.text('Revoke Signoff'); + link.attr('href', base_href + '/signoff/revoke/'); + } $('.results').trigger('updateCell', approved); }); return false; } +function filter_signoffs() { + /* start with all rows, and then remove ones we shouldn't show */ + var rows = $('#tbody_signoffs').children(); + var all_rows = rows; + $('#signoffs_filter .arch_filter').each(function() { + if (!$(this).is(':checked')) { + console.log($(this).val()); + rows = rows.not('.' + $(this).val()); + } + }); + if ($('#id_pending').is(':checked')) { + rows = rows.has('td.signoff-no'); + } + /* hide all rows, then show the set we care about */ + all_rows.hide(); + rows.show(); + /* make sure we update the odd/even styling from sorting */ + $('.results').trigger('applyWidgets'); +} +function filter_signoffs_reset() { + $('#signoffs_filter .arch_filter').attr('checked', 'checked'); + $('#id_pending').removeAttr('checked'); + filter_signoffs(); +} + /* visualizations */ function format_filesize(size, decimals) { /*var labels = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'];*/ diff --git a/packages/migrations/0010_auto__add_signoffspecification.py b/packages/migrations/0010_auto__add_signoffspecification.py new file mode 100644 index 00000000..da24824e --- /dev/null +++ b/packages/migrations/0010_auto__add_signoffspecification.py @@ -0,0 +1,183 @@ +# encoding: utf-8 +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.create_table('packages_signoffspecification', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('pkgbase', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)), + ('pkgver', self.gf('django.db.models.fields.CharField')(max_length=255)), + ('pkgrel', self.gf('django.db.models.fields.CharField')(max_length=255)), + ('epoch', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)), + ('arch', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Arch'])), + ('repo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.Repo'])), + ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), + ('created', self.gf('django.db.models.fields.DateTimeField')()), + ('required', self.gf('django.db.models.fields.PositiveIntegerField')(default=2)), + ('enabled', self.gf('django.db.models.fields.BooleanField')(default=True)), + ('known_bad', self.gf('django.db.models.fields.BooleanField')(default=False)), + ('comments', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), + )) + db.send_create_signal('packages', ['SignoffSpecification']) + + + def backwards(self, orm): + db.delete_table('packages_signoffspecification') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'packages.conflict': { + 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.license': { + 'Meta': {'ordering': "['name']", 'object_name': 'License'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"}) + }, + 'packages.packagegroup': { + 'Meta': {'object_name': 'PackageGroup'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"}) + }, + 'packages.packagerelation': { + 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"}) + }, + 'packages.provision': { + 'Meta': {'ordering': "['name']", 'object_name': 'Provision'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.replacement': { + 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"}) + }, + 'packages.signoffspecification': { + 'Meta': {'object_name': 'SignoffSpecification'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'known_bad': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'required': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + } + } + + complete_apps = ['packages'] diff --git a/packages/models.py b/packages/models.py index 4cd3b1b5..ad082501 100644 --- a/packages/models.py +++ b/packages/models.py @@ -38,6 +38,49 @@ def __unicode__(self): class Meta: unique_together = (('pkgbase', 'user', 'type'),) +class SignoffSpecification(models.Model): + ''' + A specification for the signoff policy for this particular revision of a + pakcage. The default is requiring two signoffs for a given package. These + are created only if necessary; e.g., if one wanted to override the + required=2 attribute, otherwise a sane default object is used. + ''' + pkgbase = models.CharField(max_length=255, db_index=True) + pkgver = models.CharField(max_length=255) + pkgrel = models.CharField(max_length=255) + epoch = models.PositiveIntegerField(default=0) + arch = models.ForeignKey('main.Arch') + repo = models.ForeignKey('main.Repo') + user = models.ForeignKey(User) + created = models.DateTimeField(editable=False) + required = models.PositiveIntegerField(default=2) + enabled = models.BooleanField(default=True) + known_bad = models.BooleanField(default=False) + comments = models.TextField(null=True, blank=True) + +class SignoffManager(models.Manager): + def get_from_package(self, pkg, user, revoked=False): + '''Utility method to pull all relevant name-version fields from a + package and create a matching signoff.''' + not_revoked = not revoked + return Signoff.objects.get( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, + revoked__isnull=not_revoked, user=user) + + def get_or_create_from_package(self, pkg, user): + '''Utility method to pull all relevant name-version fields from a + package and create a matching signoff.''' + return Signoff.objects.get_or_create( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, + revoked=None, user=user) + + def for_package(self, pkg): + return self.select_related('user').filter( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) + class Signoff(models.Model): ''' A signoff for a package (by pkgbase) at a given point in time. These are @@ -55,7 +98,7 @@ class Signoff(models.Model): revoked = models.DateTimeField(null=True) comments = models.TextField(null=True, blank=True) - REQUIRED = 2 + objects = SignoffManager() @property def packages(self): diff --git a/packages/urls.py b/packages/urls.py index d7d01170..576e3279 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -10,6 +10,7 @@ (r'^unflag/$', 'unflag'), (r'^unflag/all/$', 'unflag_all'), (r'^signoff/$', 'signoff_package'), + (r'^signoff/revoke/$', 'signoff_package', {'revoke': True}), (r'^download/$', 'download'), ) diff --git a/packages/views.py b/packages/views.py index 5114c87f..035d51cb 100644 --- a/packages/views.py +++ b/packages/views.py @@ -25,7 +25,7 @@ from main.models import Package, PackageFile, Arch, Repo from main.utils import make_choice, groupby_preserve_order, PackageStandin from mirrors.models import MirrorUrl -from .models import PackageRelation, PackageGroup, Signoff +from .models import PackageRelation, PackageGroup, SignoffSpecification, Signoff from .utils import (get_group_info, get_differences_info, get_wrong_permissions, get_current_signoffs) @@ -369,14 +369,24 @@ def unflag_all(request, name, repo, arch): pkgs.update(flag_date=None) return redirect(pkg) +DEFAULT_SIGNOFF_SPEC = SignoffSpecification(required=2) + +def approved_by_signoffs(signoffs, spec=DEFAULT_SIGNOFF_SPEC): + if signoffs: + good_signoffs = sum(1 for s in signoffs if not s.revoked) + return good_signoffs >= spec.required + return False + class PackageSignoffGroup(object): '''Encompasses all packages in testing with the same pkgbase.''' - def __init__(self, packages, target_repo=None, signoffs=None): + def __init__(self, packages, user=None): if len(packages) == 0: raise Exception self.packages = packages - self.target_repo = target_repo - self.signoffs = signoffs + self.user = user + self.target_repo = None + self.signoffs = set() + self.specification = DEFAULT_SIGNOFF_SPEC first = packages[0] self.pkgbase = first.pkgbase @@ -406,21 +416,24 @@ def package(self): def find_signoffs(self, all_signoffs): '''Look through a list of Signoff objects for ones matching this particular group and store them on the object.''' - if self.signoffs is None: - self.signoffs = [] for s in all_signoffs: if s.pkgbase != self.pkgbase: continue if self.version and not s.full_version == self.version: continue if s.arch_id == self.arch.id and s.repo_id == self.repo.id: - self.signoffs.append(s) + self.signoffs.add(s) def approved(self): - if self.signoffs: - good_signoffs = [s for s in self.signoffs if not s.revoked] - return len(good_signoffs) >= Signoff.REQUIRED - return False + return approved_by_signoffs(self.signoffs, self.specification) + + def user_signed_off(self, user=None): + '''Did a given user signoff on this package? user can be passed as an + argument, or attached to the group object itself so this can be called + from a template.''' + if user is None: + user = self.user + return user in (s.user for s in self.signoffs if not s.revoked) @permission_required('main.change_package') @never_cache @@ -443,7 +456,7 @@ def signoffs(request): grouped = groupby_preserve_order(packages, same_pkgbase_key) signoff_groups = [] for group in grouped: - signoff_group = PackageSignoffGroup(group) + signoff_group = PackageSignoffGroup(group, user=request.user) signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, "Unknown") signoff_group.find_signoffs(signoffs) @@ -451,27 +464,43 @@ def signoffs(request): signoff_groups.sort(key=attrgetter('pkgbase')) - return direct_to_template(request, 'packages/signoffs.html', - {'signoff_groups': signoff_groups}) + context = { + 'signoff_groups': signoff_groups, + 'arches': Arch.objects.all(), + } + return direct_to_template(request, 'packages/signoffs.html', context) @permission_required('main.change_package') @never_cache -def signoff_package(request, name, repo, arch): +def signoff_package(request, name, repo, arch, revoke=False): packages = get_list_or_404(Package, pkgbase=name, arch__name=arch, repo__name__iexact=repo, repo__testing=True) - pkg = packages[0] - signoff, created = Signoff.objects.get_or_create( - pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, - epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, user=request.user) + package = packages[0] + + if revoke: + try: + signoff = Signoff.objects.get_from_package( + package, request.user, False) + except Signoff.DoesNotExist: + raise Http404 + signoff.revoked = datetime.utcnow() + signoff.save() + created = False + else: + signoff, created = Signoff.objects.get_or_create_from_package( + package, request.user) + + all_signoffs = Signoff.objects.for_package(package) if request.is_ajax(): data = { 'created': created, - 'approved': pkg.approved_for_signoff(), + 'revoked': bool(signoff.revoked), + 'approved': approved_by_signoffs(all_signoffs), 'user': str(request.user), } - return HttpResponse(simplejson.dumps(data), + return HttpResponse(simplejson.dumps(data, ensure_ascii=False), mimetype='application/json') return redirect('package-signoffs') diff --git a/templates/packages/differences.html b/templates/packages/differences.html index dd1046bc..0400ea37 100644 --- a/templates/packages/differences.html +++ b/templates/packages/differences.html @@ -65,7 +65,7 @@ <h3>Filter Differences View</h3> $('.results').tablesorter({widgets: ['zebra'], sortList: [[1,0], [0,0]]}); $('#diff_filter select').change(filter_packages); $('#diff_filter input').change(filter_packages); - $('#criteria_reset').click(filter_reset); + $('#criteria_reset').click(filter_differences_reset); // fire function on page load to ensure the current form selections take effect filter_packages(); }); diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html new file mode 100644 index 00000000..fce5d551 --- /dev/null +++ b/templates/packages/signoff_cell.html @@ -0,0 +1,12 @@ +<ul> + {% for signoff in group.signoffs %} + <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li> + {% endfor %} +</ul> +{% if group.user_signed_off %} +<div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/" + title="Revoke signoff {{ group.package.pkgname }} for {{ group.package.arch }}">Revoke Signoff</a></div> +{% else %} +<div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/" + title="Signoff {{ group.package.pkgname }} for {{ group.package.arch }}">Signoff</a></div> +{% endif %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index a8aa4de2..4a2f6c99 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -12,42 +12,46 @@ <h2>Package Signoffs</h2> <p>{{ signoff_groups|length }} signoff group{{ signoff_groups|pluralize }} found. A "signoff group" consists of packages grouped by pkgbase, architecture, and repository.</p> + <div class="box filter-criteria"> + <h3>Filter Displayed Signoffs</h3> + <form id="signoffs_filter" method="post" action="."> + <fieldset> + <legend>Select filter criteria</legend> + {% for arch in arches %} + <div><label for="id_arch_{{ arch.name }}" title="Architecture {{ arch.name }}">Arch {{ arch.name }}</label> + <input type="checkbox" name="arch_{{ arch.name }}" id="id_arch_{{ arch.name }}" class="arch_filter" value="{{ arch.name }}" checked="checked"/></div> + {% endfor %} + <div><label for="id_pending" title="Packages with not enough signoffs">Only Pending Approval</label> + <input type="checkbox" name="pending" id="id_pending" value="pending"/></div> + <div ><label> </label><input title="Reset search criteria" type="button" id="criteria_reset" value="Reset"/></div> + </fieldset> + </form> + </div> + <table id="signoffs" class="results"> <thead> <tr> + <th>Package Base/Version</th> <th>Arch</th> - <th>Package Base</th> + <th>Target Repo</th> <th># of Packages</th> - <th>Version</th> <th>Last Updated</th> - <th>Target Repo</th> <th>Approved</th> - <th>Signoff</th> + <th>Signoffs</th> </tr> </thead> - <tbody> + <tbody id="tbody_signoffs"> {% for group in signoff_groups %} {% with group.package as pkg %} - <tr class="{% cycle 'odd' 'even' %}"> + <tr class="{% cycle 'odd' 'even' %} {{ pkg.arch.name }}"> + <td>{% pkg_details_link pkg %} {{ pkg.full_version }}</td> <td>{{ pkg.arch.name }}</td> - <td>{% pkg_details_link pkg %}</td> + <td>{{ group.target_repo }}</td> <td>{{ group.packages|length }}</td> - <td>{{ pkg.full_version }}</td> <td>{{ pkg.last_update|date }}</td> - <td>{{ group.target_repo }}</td> <td class="signoff-{{ group.approved|yesno }}"> {{ group.approved|yesno|capfirst }}</td> - <td> - <ul> - <li><a class="signoff-link" href="{{ pkg.get_absolute_url }}signoff/" - title="Signoff {{ pkg.pkgname }} for {{ pkg.arch }}">Signoff</a> - </li> - {% for signoff in group.signoffs %} - <li class="signed-username" title="Signed off by {{ signoff.user }}"> - {{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li> - {% endfor %} - </ul> - </td> + <td>{% include "packages/signoff_cell.html" %}</td> </tr> {% endwith %} {% endfor %} @@ -60,8 +64,12 @@ <h2>Package Signoffs</h2> <script type="text/javascript"> $(document).ready(function() { $('a.signoff-link').click(signoff_package); - $(".results").tablesorter({widgets: ['zebra'], sortList: [[1,0]], + $(".results").tablesorter({widgets: ['zebra'], sortList: [[0,0]], headers: { 6: { sorter: false } } }); + $('#signoffs_filter input').change(filter_signoffs); + $('#criteria_reset').click(filter_signoffs_reset); + // fire function on page load to ensure the current form selections take effect + filter_signoffs(); }); </script> {% endblock %} -- cgit v1.2.3-54-g00ecf From 74d2a5df5ca7ee4b6497a6e7609491d72cdbb309 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 17:18:13 -0500 Subject: Refactor more package signoff stuff This sets up some shared utility code for use in a later package signoff email report command. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/models.py | 7 +- packages/utils.py | 134 ++++++++++++++++++++++++++++++++--- packages/views.py | 100 ++------------------------ templates/packages/signoff_cell.html | 4 +- templates/packages/signoffs.html | 12 ++-- 5 files changed, 144 insertions(+), 113 deletions(-) diff --git a/packages/models.py b/packages/models.py index ad082501..3c319fe7 100644 --- a/packages/models.py +++ b/packages/models.py @@ -115,8 +115,11 @@ def full_version(self): return u'%s-%s' % (self.pkgver, self.pkgrel) def __unicode__(self): - return u'%s-%s: %s' % ( - self.pkgbase, self.full_version, self.user) + revoked = u'' + if self.revoked: + revoked = u' (revoked)' + return u'%s-%s: %s%s' % ( + self.pkgbase, self.full_version, self.user, revoked) class PackageGroup(models.Model): ''' diff --git a/packages/utils.py b/packages/utils.py index c8c1f8a6..42cfbe0f 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -1,12 +1,11 @@ -from collections import defaultdict from operator import itemgetter from django.db import connection from django.db.models import Count, Max -from main.models import Package -from main.utils import cache_function -from .models import PackageGroup, PackageRelation, Signoff +from main.models import Package, Repo +from main.utils import cache_function, groupby_preserve_order, PackageStandin +from .models import PackageGroup, PackageRelation, SignoffSpecification, Signoff @cache_function(300) def get_group_info(include_arches=None): @@ -148,8 +147,90 @@ def get_wrong_permissions(): id__in=to_fetch) return relations -def get_current_signoffs(): - '''Returns a mapping of pkgbase -> signoff objects.''' + +DEFAULT_SIGNOFF_SPEC = SignoffSpecification() + +def approved_by_signoffs(signoffs, spec=DEFAULT_SIGNOFF_SPEC): + if signoffs: + good_signoffs = sum(1 for s in signoffs if not s.revoked) + return good_signoffs >= spec.required + return False + +class PackageSignoffGroup(object): + '''Encompasses all packages in testing with the same pkgbase.''' + def __init__(self, packages, user=None): + if len(packages) == 0: + raise Exception + self.packages = packages + self.user = user + self.target_repo = None + self.signoffs = set() + self.specification = DEFAULT_SIGNOFF_SPEC + + first = packages[0] + self.pkgbase = first.pkgbase + self.arch = first.arch + self.repo = first.repo + self.version = '' + self.last_update = first.last_update + self.packager = first.packager + + version = first.full_version + if all(version == pkg.full_version for pkg in packages): + self.version = version + + @property + def package(self): + '''Try and return a relevant single package object representing this + group. Start by seeing if there is only one package, then look for the + matching package by name, finally falling back to a standin package + object.''' + if len(self.packages) == 1: + return self.packages[0] + + same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase] + if same_pkgs: + return same_pkgs[0] + + return PackageStandin(self.packages[0]) + + def find_signoffs(self, all_signoffs): + '''Look through a list of Signoff objects for ones matching this + particular group and store them on the object.''' + for s in all_signoffs: + if s.pkgbase != self.pkgbase: + continue + if self.version and not s.full_version == self.version: + continue + if s.arch_id == self.arch.id and s.repo_id == self.repo.id: + self.signoffs.add(s) + + def approved(self): + return approved_by_signoffs(self.signoffs, self.specification) + + @property + def completed(self): + return sum(1 for s in self.signoffs if not s.revoked) + + @property + def required(self): + return self.specification.required + + def user_signed_off(self, user=None): + '''Did a given user signoff on this package? user can be passed as an + argument, or attached to the group object itself so this can be called + from a template.''' + if user is None: + user = self.user + return user in (s.user for s in self.signoffs if not s.revoked) + + def __unicode__(self): + return u'%s-%s (%s): %d' % ( + self.pkgbase, self.version, self.arch, len(self.signoffs)) + +def get_current_signoffs(repos): + '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' + cursor = connection.cursor() sql = """ SELECT DISTINCT s.id FROM packages_signoff s @@ -162,14 +243,49 @@ def get_current_signoffs(): AND s.repo_id = p.repo_id ) JOIN repos r ON p.repo_id = r.id - WHERE r.testing = %s + WHERE r.id IN ( """ - cursor = connection.cursor() - cursor.execute(sql, [True]) + sql += ", ".join("%s" for r in repos) + sql += ")" + cursor.execute(sql, [r.id for r in repos]) + results = cursor.fetchall() # fetch all of the returned signoffs by ID to_fetch = [row[0] for row in results] signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch) return signoffs.values() +def get_target_repo_map(pkgbases): + package_repos = Package.objects.order_by().values_list( + 'pkgbase', 'repo__name').filter( + repo__testing=False, repo__staging=False, + pkgbase__in=pkgbases).distinct() + return dict(package_repos) + +def get_signoff_groups(repos=None): + if repos is None: + repos = Repo.objects.filter(testing=True) + + test_pkgs = Package.objects.normal().filter(repo__in=repos) + packages = test_pkgs.order_by('pkgname') + + # Collect all pkgbase values in testing repos + q_pkgbase = test_pkgs.values('pkgbase') + pkgtorepo = get_target_repo_map(q_pkgbase) + + # Collect all existing signoffs for these packages + signoffs = get_current_signoffs(repos) + + same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase) + grouped = groupby_preserve_order(packages, same_pkgbase_key) + signoff_groups = [] + for group in grouped: + signoff_group = PackageSignoffGroup(group) + signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, + "Unknown") + signoff_group.find_signoffs(signoffs) + signoff_groups.append(signoff_group) + + return signoff_groups + # vim: set ts=4 sw=4 et: diff --git a/packages/views.py b/packages/views.py index 035d51cb..e102760b 100644 --- a/packages/views.py +++ b/packages/views.py @@ -23,11 +23,11 @@ from urllib import urlencode from main.models import Package, PackageFile, Arch, Repo -from main.utils import make_choice, groupby_preserve_order, PackageStandin +from main.utils import make_choice from mirrors.models import MirrorUrl -from .models import PackageRelation, PackageGroup, SignoffSpecification, Signoff +from .models import PackageRelation, PackageGroup, Signoff from .utils import (get_group_info, get_differences_info, - get_wrong_permissions, get_current_signoffs) + get_wrong_permissions, get_signoff_groups, approved_by_signoffs) class PackageJSONEncoder(DjangoJSONEncoder): pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', @@ -369,100 +369,12 @@ def unflag_all(request, name, repo, arch): pkgs.update(flag_date=None) return redirect(pkg) -DEFAULT_SIGNOFF_SPEC = SignoffSpecification(required=2) - -def approved_by_signoffs(signoffs, spec=DEFAULT_SIGNOFF_SPEC): - if signoffs: - good_signoffs = sum(1 for s in signoffs if not s.revoked) - return good_signoffs >= spec.required - return False - -class PackageSignoffGroup(object): - '''Encompasses all packages in testing with the same pkgbase.''' - def __init__(self, packages, user=None): - if len(packages) == 0: - raise Exception - self.packages = packages - self.user = user - self.target_repo = None - self.signoffs = set() - self.specification = DEFAULT_SIGNOFF_SPEC - - first = packages[0] - self.pkgbase = first.pkgbase - self.arch = first.arch - self.repo = first.repo - self.version = '' - - version = first.full_version - if all(version == pkg.full_version for pkg in packages): - self.version = version - - @property - def package(self): - '''Try and return a relevant single package object representing this - group. Start by seeing if there is only one package, then look for the - matching package by name, finally falling back to a standin package - object.''' - if len(self.packages) == 1: - return self.packages[0] - - same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase] - if same_pkgs: - return same_pkgs[0] - - return PackageStandin(self.packages[0]) - - def find_signoffs(self, all_signoffs): - '''Look through a list of Signoff objects for ones matching this - particular group and store them on the object.''' - for s in all_signoffs: - if s.pkgbase != self.pkgbase: - continue - if self.version and not s.full_version == self.version: - continue - if s.arch_id == self.arch.id and s.repo_id == self.repo.id: - self.signoffs.add(s) - - def approved(self): - return approved_by_signoffs(self.signoffs, self.specification) - - def user_signed_off(self, user=None): - '''Did a given user signoff on this package? user can be passed as an - argument, or attached to the group object itself so this can be called - from a template.''' - if user is None: - user = self.user - return user in (s.user for s in self.signoffs if not s.revoked) - @permission_required('main.change_package') @never_cache def signoffs(request): - test_pkgs = Package.objects.normal().filter(repo__testing=True) - packages = test_pkgs.order_by('pkgname') - - # Collect all pkgbase values in testing repos - q_pkgbase = test_pkgs.values('pkgbase') - package_repos = Package.objects.order_by().values_list( - 'pkgbase', 'repo__name').filter( - repo__testing=False, repo__staging=False, - pkgbase__in=q_pkgbase).distinct() - pkgtorepo = dict(package_repos) - - # Collect all existing signoffs for these packages - signoffs = get_current_signoffs() - - same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase) - grouped = groupby_preserve_order(packages, same_pkgbase_key) - signoff_groups = [] - for group in grouped: - signoff_group = PackageSignoffGroup(group, user=request.user) - signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, - "Unknown") - signoff_group.find_signoffs(signoffs) - signoff_groups.append(signoff_group) - - signoff_groups.sort(key=attrgetter('pkgbase')) + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + for group in signoff_groups: + group.user = request.user context = { 'signoff_groups': signoff_groups, diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index fce5d551..87216193 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -5,8 +5,8 @@ </ul> {% if group.user_signed_off %} <div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/" - title="Revoke signoff {{ group.package.pkgname }} for {{ group.package.arch }}">Revoke Signoff</a></div> + title="Revoke signoff {{ group.pkgbase }} for {{ group.arch }}">Revoke Signoff</a></div> {% else %} <div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/" - title="Signoff {{ group.package.pkgname }} for {{ group.package.arch }}">Signoff</a></div> + title="Signoff {{ group.pkgbase }} for {{ group.arch }}">Signoff</a></div> {% endif %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 4a2f6c99..8d57a8c5 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -34,6 +34,7 @@ <h3>Filter Displayed Signoffs</h3> <th>Package Base/Version</th> <th>Arch</th> <th>Target Repo</th> + <th>Packager</th> <th># of Packages</th> <th>Last Updated</th> <th>Approved</th> @@ -42,18 +43,17 @@ <h3>Filter Displayed Signoffs</h3> </thead> <tbody id="tbody_signoffs"> {% for group in signoff_groups %} - {% with group.package as pkg %} - <tr class="{% cycle 'odd' 'even' %} {{ pkg.arch.name }}"> - <td>{% pkg_details_link pkg %} {{ pkg.full_version }}</td> - <td>{{ pkg.arch.name }}</td> + <tr class="{% cycle 'odd' 'even' %} {{ group.arch.name }}"> + <td>{% pkg_details_link group.package %} {{ group.version }}</td> + <td>{{ group.arch.name }}</td> <td>{{ group.target_repo }}</td> + <td>{{ group.packager|default:"Unknown" }}</td> <td>{{ group.packages|length }}</td> - <td>{{ pkg.last_update|date }}</td> + <td>{{ group.last_update|date }}</td> <td class="signoff-{{ group.approved|yesno }}"> {{ group.approved|yesno|capfirst }}</td> <td>{% include "packages/signoff_cell.html" %}</td> </tr> - {% endwith %} {% endfor %} </tbody> </table> -- cgit v1.2.3-54-g00ecf From 49ac7efd683152e4936f8013bb7a001470260034 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 17:18:55 -0500 Subject: Package signoff email report, initial revision Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/management/__init__.py | 0 packages/management/commands/__init__.py | 0 packages/management/commands/signoff_report.py | 110 +++++++++++++++++++++++++ templates/packages/signoff_report.txt | 27 ++++++ 4 files changed, 137 insertions(+) create mode 100644 packages/management/__init__.py create mode 100644 packages/management/commands/__init__.py create mode 100644 packages/management/commands/signoff_report.py create mode 100644 templates/packages/signoff_report.txt diff --git a/packages/management/__init__.py b/packages/management/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/packages/management/commands/__init__.py b/packages/management/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py new file mode 100644 index 00000000..17e58f39 --- /dev/null +++ b/packages/management/commands/signoff_report.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +""" +signoff_report command + +Send an email summarizing the state of outstanding signoffs for the given +repository. + +Usage: ./manage.py signoff_report <email> <repository> +""" + +from django.core.urlresolvers import reverse +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth.models import User +from django.contrib.sites.models import Site +from django.db.models import Count +from django.template import loader, Context + +from collections import namedtuple +from datetime import datetime, timedelta +import logging +from operator import attrgetter +import sys + +from main.models import Package, Repo +from packages.models import Signoff +from packages.utils import get_signoff_groups + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s -> %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + stream=sys.stderr) +logger = logging.getLogger() + +class Command(BaseCommand): + args = "<email> <repository>" + help = "Send a signoff report for the given repository." + + def handle(self, *args, **options): + v = int(options.get('verbosity', None)) + if v == 0: + logger.level = logging.ERROR + elif v == 1: + logger.level = logging.INFO + elif v == 2: + logger.level = logging.DEBUG + + if len(args) != 2: + raise CommandError("email and repository must be provided") + + return generate_report(args[0], args[1]) + +def generate_report(email, repo_name): + repo = Repo.objects.get(name__iexact=repo_name) + # Collect all existing signoffs for these packages + signoff_groups = sorted(get_signoff_groups([repo]), + key=attrgetter('target_repo', 'arch', 'pkgbase')) + complete = [] + incomplete = [] + new = [] + old = [] + + new_hours = 24 + old_days = 14 + now = datetime.utcnow() + new_cutoff = now - timedelta(hours=new_hours) + old_cutoff = now - timedelta(days=old_days) + + for group in signoff_groups: + if group.approved(): + complete.append(group) + else: + incomplete.append(group) + if group.package.last_update > new_cutoff: + new.append(group) + if group.package.last_update < old_cutoff: + old.append(group) + + old.sort(key=attrgetter('last_update')) + + proto = 'https' + domain = Site.objects.get_current().domain + signoffs_url = '%s://%s%s' % (proto, domain, reverse('package-signoffs')) + + # and the fun bit + Leader = namedtuple('Leader', ['user', 'count']) + leaders = Signoff.objects.filter(created__gt=new_cutoff, + revoked__isnull=True).values_list('user').annotate( + signoff_count=Count('pk')).order_by('-signoff_count')[:5] + users = User.objects.in_bulk([l[0] for l in leaders]) + leaders = (Leader(users[l[0]], l[1]) for l in leaders) + + subject = 'Signoff report for [%s]' % repo.name.lower() + t = loader.get_template('packages/signoff_report.txt') + c = Context({ + 'repo': repo, + 'signoffs_url': signoffs_url, + 'incomplete': incomplete, + 'complete': complete, + 'new': new, + 'new_hours': new_hours, + 'old': old, + 'old_days': old_days, + 'leaders': leaders, + }) + from_addr = 'Arch Website Notification <nobody@archlinux.org>' + #send_mail(subject, t.render(c), from_addr, email) + print t.render(c) + +# vim: set ts=4 sw=4 et: diff --git a/templates/packages/signoff_report.txt b/templates/packages/signoff_report.txt new file mode 100644 index 00000000..84e3fc6b --- /dev/null +++ b/templates/packages/signoff_report.txt @@ -0,0 +1,27 @@ +=== {% autoescape off %}Signoff report for [{{ repo|lower }}] === +{{ signoffs_url }} + +== New packages in [{{ repo|lower}}] in last {{ new_hours }} hours ({{ new|length }} total) == +{% for group in new %} +* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %} + +{% regroup incomplete by target_repo as by_repo %}{% for target_repo in by_repo %} +== Incomplete signoffs for [{{ target_repo.grouper|lower }}] ({{ target_repo.list|length }} total) == +{% for group in target_repo.list %} +* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}) + {{ group.completed }}/{{ group.required }} signoffs{% endfor %} +{% endfor %} + +== Completed signoffs ({{ complete|length }} total) == +{% for group in complete %} +* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %} + + +== All packages in [{{ repo|lower }}] for more than {{ old_days }} days ({{ old|length }} total) == +{% for group in old %} +* {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}), since {{ group.last_update|date }}{% endfor %} +{% endautoescape %} + +== Top five in signoffs in last {{ new_hours }} hours == +{% for leader in leaders %} +{{ forloop.counter }}. {{ leader.user }} - {{ leader.count }} signoffs{% endfor %} -- cgit v1.2.3-54-g00ecf From caa15d61a3882d1076378ae406bb2effcb63ff87 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 17:29:05 -0500 Subject: Minor tweaks to style and sorting of signoffs Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.css | 4 ++++ templates/packages/signoffs.html | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/media/archweb.css b/media/archweb.css index 62dc4fbc..f817e18d 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -895,6 +895,10 @@ ul.admin-actions { } /* dev: signoff page */ +#dev-signoffs tr:hover { + background: #ffd; +} + #dev-signoffs ul { list-style: none; margin: 0; diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 8d57a8c5..0bdc6d46 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -65,7 +65,7 @@ <h3>Filter Displayed Signoffs</h3> $(document).ready(function() { $('a.signoff-link').click(signoff_package); $(".results").tablesorter({widgets: ['zebra'], sortList: [[0,0]], - headers: { 6: { sorter: false } } }); + headers: { 7: { sorter: false } } }); $('#signoffs_filter input').change(filter_signoffs); $('#criteria_reset').click(filter_signoffs_reset); // fire function on page load to ensure the current form selections take effect -- cgit v1.2.3-54-g00ecf From 9a5410ba4b622b68306de53abfa28b5a49e30107 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 17:33:00 -0500 Subject: Make signoff_report command send email Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/management/commands/signoff_report.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py index 17e58f39..02f3d985 100644 --- a/packages/management/commands/signoff_report.py +++ b/packages/management/commands/signoff_report.py @@ -8,6 +8,7 @@ Usage: ./manage.py signoff_report <email> <repository> """ +from django.core.mail import send_mail from django.core.urlresolvers import reverse from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User @@ -104,7 +105,6 @@ def generate_report(email, repo_name): 'leaders': leaders, }) from_addr = 'Arch Website Notification <nobody@archlinux.org>' - #send_mail(subject, t.render(c), from_addr, email) - print t.render(c) + send_mail(subject, t.render(c), from_addr, [email]) # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From e3e3e498765ab416a2902adf114cd3270e3eb12e Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 2 Nov 2011 09:47:02 -0500 Subject: Add URL to todolist email Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 5 +++++ templates/todolists/email_notification.txt | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/main/models.py b/main/models.py index d55a9673..972b098c 100644 --- a/main/models.py +++ b/main/models.py @@ -470,6 +470,11 @@ class Meta: def get_absolute_url(self): return '/todo/%i/' % self.id + def get_full_url(self, proto='https'): + '''get a URL suitable for things like email including the domain''' + domain = Site.objects.get_current().domain + return '%s://%s%s' % (proto, domain, self.get_absolute_url()) + class TodolistPkg(models.Model): list = models.ForeignKey(Todolist) pkg = models.ForeignKey(Package) diff --git a/templates/todolists/email_notification.txt b/templates/todolists/email_notification.txt index 10b50f64..8b22b465 100644 --- a/templates/todolists/email_notification.txt +++ b/templates/todolists/email_notification.txt @@ -1,10 +1,11 @@ -{% autoescape off %}The todo list {{ todolist.name }} has had the following packages added to it for which you are a maintainer: +{% autoescape off %}The todo list "{{ todolist.name }}" has had the following packages added to it for which you are a maintainer: {% for tpkg in todo_packages %} * {{ tpkg.pkg.repo.name|lower }}/{{ tpkg.pkg.pkgname }} ({{ tpkg.pkg.arch.name }}) - {{ tpkg.pkg.get_full_url }}{% endfor %} Todo list information: -Creator: {{todolist.creator.get_full_name}} -Name: {{todolist.name}} +Name: {{ todolist.name }} +URL: {{ todolist.get_full_url }} +Creator: {{ todolist.creator.get_full_name }} Description: -{{todolist.description|striptags|wordwrap:69}}{% endautoescape %} +{{ todolist.description|striptags|wordwrap:78 }}{% endautoescape %} -- cgit v1.2.3-54-g00ecf From 8187b87143081a2be75032db91287f9deb9d1f89 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 19:10:07 -0500 Subject: Add signoff options form and data entry page This allows the criteria and other information about certain signoffs to be overridden as necessary. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/models.py | 52 ++++++++++++++++++++++++++------- packages/urls.py | 1 + packages/views.py | 40 ++++++++++++++++++++++++- templates/packages/signoff_cell.html | 15 ++++++++-- templates/packages/signoff_options.html | 18 ++++++++++++ templates/packages/signoffs.html | 3 +- 6 files changed, 113 insertions(+), 16 deletions(-) create mode 100644 templates/packages/signoff_options.html diff --git a/packages/models.py b/packages/models.py index 3c319fe7..a2b53a06 100644 --- a/packages/models.py +++ b/packages/models.py @@ -38,6 +38,22 @@ def __unicode__(self): class Meta: unique_together = (('pkgbase', 'user', 'type'),) + +class SignoffSpecificationManager(models.Manager): + def get_from_package(self, pkg): + '''Utility method to pull all relevant name-version fields from a + package and get a matching specification.''' + return self.get( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) + + def get_or_create_from_package(self, pkg): + '''Utility method to pull all relevant name-version fields from a + package and get or create a matching specification.''' + return self.get_or_create( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) + class SignoffSpecification(models.Model): ''' A specification for the signoff policy for this particular revision of a @@ -53,25 +69,40 @@ class SignoffSpecification(models.Model): repo = models.ForeignKey('main.Repo') user = models.ForeignKey(User) created = models.DateTimeField(editable=False) - required = models.PositiveIntegerField(default=2) - enabled = models.BooleanField(default=True) - known_bad = models.BooleanField(default=False) + required = models.PositiveIntegerField(default=2, + help_text="How many signoffs are required for this package?") + enabled = models.BooleanField(default=True, + help_text="Is this package eligible for signoffs?") + known_bad = models.BooleanField(default=False, + help_text="Is package is known to be broken in some way?") comments = models.TextField(null=True, blank=True) + objects = SignoffSpecificationManager() + + @property + def full_version(self): + if self.epoch > 0: + return u'%d:%s-%s' % (self.epoch, self.pkgver, self.pkgrel) + return u'%s-%s' % (self.pkgver, self.pkgrel) + + def __unicode__(self): + return u'%s-%s' % (self.pkgbase, self.full_version) + + class SignoffManager(models.Manager): def get_from_package(self, pkg, user, revoked=False): '''Utility method to pull all relevant name-version fields from a - package and create a matching signoff.''' + package and get a matching signoff.''' not_revoked = not revoked - return Signoff.objects.get( + return self.get( pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, revoked__isnull=not_revoked, user=user) def get_or_create_from_package(self, pkg, user): '''Utility method to pull all relevant name-version fields from a - package and create a matching signoff.''' - return Signoff.objects.get_or_create( + package and get or create a matching signoff.''' + return self.get_or_create( pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo, revoked=None, user=user) @@ -196,9 +227,8 @@ def remove_inactive_maintainers(sender, instance, created, **kwargs): post_save.connect(remove_inactive_maintainers, sender=User, dispatch_uid="packages.models") -pre_save.connect(set_created_field, sender=PackageRelation, - dispatch_uid="packages.models") -pre_save.connect(set_created_field, sender=Signoff, - dispatch_uid="packages.models") +for sender in (PackageRelation, SignoffSpecification, Signoff): + pre_save.connect(set_created_field, sender=sender, + dispatch_uid="packages.models") # vim: set ts=4 sw=4 et: diff --git a/packages/urls.py b/packages/urls.py index 576e3279..4d391a3c 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -11,6 +11,7 @@ (r'^unflag/all/$', 'unflag_all'), (r'^signoff/$', 'signoff_package'), (r'^signoff/revoke/$', 'signoff_package', {'revoke': True}), + (r'^signoff/options/$', 'signoff_options'), (r'^download/$', 'download'), ) diff --git a/packages/views.py b/packages/views.py index e102760b..66bcd3fc 100644 --- a/packages/views.py +++ b/packages/views.py @@ -25,7 +25,7 @@ from main.models import Package, PackageFile, Arch, Repo from main.utils import make_choice from mirrors.models import MirrorUrl -from .models import PackageRelation, PackageGroup, Signoff +from .models import PackageRelation, PackageGroup, SignoffSpecification, Signoff from .utils import (get_group_info, get_differences_info, get_wrong_permissions, get_signoff_groups, approved_by_signoffs) @@ -417,6 +417,44 @@ def signoff_package(request, name, repo, arch, revoke=False): return redirect('package-signoffs') +class SignoffOptionsForm(forms.ModelForm): + class Meta: + model = SignoffSpecification + fields = ('required', 'enabled', 'known_bad', 'comments') + +@permission_required('main.change_package') +@never_cache +def signoff_options(request, name, repo, arch): + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + package = packages[0] + + # TODO ensure submitter is maintainer and/or packager + + try: + spec = SignoffSpecification.objects.get_from_package(package) + except SignoffSpecification.DoesNotExist: + # create a fake one, but don't save it just yet + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, repo=package.repo) + spec.user = request.user + + if request.POST: + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + form.save() + return redirect('package-signoffs') + else: + form = SignoffOptionsForm(instance=spec) + + context = { + 'packages': packages, + 'package': package, + 'form': form, + } + return direct_to_template(request, 'packages/signoff_options.html', context) + def flaghelp(request): return direct_to_template(request, 'packages/flaghelp.html') diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index 87216193..0a630119 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -1,12 +1,23 @@ +{% spaceless %} +{% if group.signoffs %} <ul> {% for signoff in group.signoffs %} <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li> {% endfor %} </ul> +{% endif %} {% if group.user_signed_off %} -<div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/" +<div> + <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/" title="Revoke signoff {{ group.pkgbase }} for {{ group.arch }}">Revoke Signoff</a></div> {% else %} -<div><a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/" +<div> + <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/" title="Signoff {{ group.pkgbase }} for {{ group.arch }}">Signoff</a></div> {% endif %} +{% if group.packager == user %} +<div> + <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Packager Options</a> +</div> +{% endif %} +{% endspaceless %} diff --git a/templates/packages/signoff_options.html b/templates/packages/signoff_options.html new file mode 100644 index 00000000..ee9b8b47 --- /dev/null +++ b/templates/packages/signoff_options.html @@ -0,0 +1,18 @@ +{% extends "base.html" %} + +{% block title %}Arch Linux - Package Signoff Options - {{ package.pkgbase }} {{ package.full_version }} ({{ package.arch.name }}){% endblock %} +{% block head %}<meta name="robots" content="noindex"/>{% endblock %} +{% block navbarclass %}anb-packages{% endblock %} + +{% block content %} +<div id="signoff-options" class="box"> + <h2>Package Signoff Options: {{ package.pkgbase }} {{ package.full_version }} ({{ package.arch.name }})</h2> + <form id="signoff-options-form" method="post">{% csrf_token %} + <fieldset> + {{ form.as_p }} + </fieldset> + <p><label></label> <input title="Set Signoff Options" type="submit" value="Set Signoff Options" /></p> + </form> + +</div> +{% endblock %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 0bdc6d46..9bc7fd74 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -50,8 +50,7 @@ <h3>Filter Displayed Signoffs</h3> <td>{{ group.packager|default:"Unknown" }}</td> <td>{{ group.packages|length }}</td> <td>{{ group.last_update|date }}</td> - <td class="signoff-{{ group.approved|yesno }}"> - {{ group.approved|yesno|capfirst }}</td> + <td class="signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td> <td>{% include "packages/signoff_cell.html" %}</td> </tr> {% endfor %} -- cgit v1.2.3-54-g00ecf From 5f2c3bf98baabf919681525e600639643aa2c119 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 20:39:59 -0500 Subject: Signoffs changes and improvements * Better signoff report with more detail * Show signoff specification in signoffs view * Honor disabled/bad flags and display in approval column * Various other small bugfixes and tweaks Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.css | 8 +++++-- media/archweb.js | 30 ++++++++++++++++---------- packages/management/commands/signoff_report.py | 13 ++++++++++- packages/models.py | 28 +++++++++++++++++------- packages/utils.py | 16 ++++++++------ packages/views.py | 15 +++++++++---- templates/packages/signoff_cell.html | 2 ++ templates/packages/signoff_report.txt | 13 +++++++++++ templates/packages/signoffs.html | 19 ++++++++++++++-- 9 files changed, 109 insertions(+), 35 deletions(-) diff --git a/media/archweb.css b/media/archweb.css index f817e18d..303173f2 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -914,8 +914,12 @@ ul.admin-actions { color: red; } -#dev-signoffs .signed-username { - color: #888; +#dev-signoffs .signoff-bad { + color: darkorange; +} + +#dev-signoffs .signoff-disabled { + color: gray; } /* iso testing feedback form */ diff --git a/media/archweb.js b/media/archweb.js index 43812b33..a9f4e0c9 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -215,28 +215,33 @@ function signoff_package() { $.getJSON(link.href, function(data) { link = $(link); var signoff = null; + var cell = link.closest('td'); if (data.created) { signoff = $('<li>').addClass('signed-username').text(data.user); - link.closest('td').children('ul').append(signoff); + var list = cell.children('ul'); + if (list.size() == 0) { + list = $('<ul>').prependTo(cell); + } + list.append(signoff); } else if(data.user) { signoff = link.closest('td').find('li').filter(function(index) { return $(this).text() == data.user; }); } - console.log(signoff, data.revoked, data.user); if (signoff && data.revoked) { signoff.text(signoff.text() + ' (revoked)'); } /* update the approved column to reflect reality */ - var approved; - if (data.approved) { - approved = link.closest('tr').children('.signoff-no'); - approved.text('Yes').addClass( - 'signoff-yes').removeClass('signoff-no'); + var approved = link.closest('tr').children('.approval'); + approved.attr('class', ''); + if (data.known_bad) { + approved.text('Bad').addClass('signoff-bad'); + } else if (!data.enabled) { + approved.text('Disabled').addClass('signoff-disabled'); + } else if (data.approved) { + approved.text('Yes').addClass('signoff-yes'); } else { - approved = link.closest('tr').children('.signoff-yes'); - approved.text('No').addClass( - 'signoff-no').removeClass('signoff-yes'); + approved.text('No').addClass('signoff-no'); } link.removeAttr('title'); /* Form our new link. The current will be something like @@ -245,6 +250,10 @@ function signoff_package() { if (data.revoked) { link.text('Signoff'); link.attr('href', base_href + '/signoff/'); + /* should we be hiding the link? */ + if (data.known_bad || !data.enabled) { + link.remove(); + } } else { link.text('Revoke Signoff'); link.attr('href', base_href + '/signoff/revoke/'); @@ -260,7 +269,6 @@ function filter_signoffs() { var all_rows = rows; $('#signoffs_filter .arch_filter').each(function() { if (!$(this).is(':checked')) { - console.log($(this).val()); rows = rows.not('.' + $(this).val()); } }); diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py index 02f3d985..3431dada 100644 --- a/packages/management/commands/signoff_report.py +++ b/packages/management/commands/signoff_report.py @@ -56,6 +56,8 @@ def generate_report(email, repo_name): # Collect all existing signoffs for these packages signoff_groups = sorted(get_signoff_groups([repo]), key=attrgetter('target_repo', 'arch', 'pkgbase')) + disabled = [] + bad = [] complete = [] incomplete = [] new = [] @@ -68,10 +70,16 @@ def generate_report(email, repo_name): old_cutoff = now - timedelta(days=old_days) for group in signoff_groups: - if group.approved(): + spec = group.specification + if spec.known_bad: + bad.append(group) + elif not spec.enabled: + disabled.append(group) + elif group.approved(): complete.append(group) else: incomplete.append(group) + if group.package.last_update > new_cutoff: new.append(group) if group.package.last_update < old_cutoff: @@ -96,6 +104,9 @@ def generate_report(email, repo_name): c = Context({ 'repo': repo, 'signoffs_url': signoffs_url, + 'disabled': disabled, + 'bad': bad, + 'all': signoff_groups, 'incomplete': incomplete, 'complete': complete, 'new': new, diff --git a/packages/models.py b/packages/models.py index a2b53a06..b70c21bf 100644 --- a/packages/models.py +++ b/packages/models.py @@ -1,3 +1,5 @@ +from collections import namedtuple + from django.db import models from django.db.models.signals import pre_save, post_save from django.contrib.auth.models import User @@ -42,22 +44,26 @@ class Meta: class SignoffSpecificationManager(models.Manager): def get_from_package(self, pkg): '''Utility method to pull all relevant name-version fields from a - package and get a matching specification.''' + package and get a matching signoff specification.''' return self.get( pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) - def get_or_create_from_package(self, pkg): - '''Utility method to pull all relevant name-version fields from a - package and get or create a matching specification.''' - return self.get_or_create( - pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, - epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) + def get_or_default_from_package(self, pkg): + '''utility method to pull all relevant name-version fields from a + package and get a matching signoff specification, or return the default + base case.''' + try: + return self.get( + pkgbase=pkg.pkgbase, pkgver=pkg.pkgver, pkgrel=pkg.pkgrel, + epoch=pkg.epoch, arch=pkg.arch, repo=pkg.repo) + except SignoffSpecification.DoesNotExist: + return DEFAULT_SIGNOFF_SPEC class SignoffSpecification(models.Model): ''' A specification for the signoff policy for this particular revision of a - pakcage. The default is requiring two signoffs for a given package. These + package. The default is requiring two signoffs for a given package. These are created only if necessary; e.g., if one wanted to override the required=2 attribute, otherwise a sane default object is used. ''' @@ -89,6 +95,12 @@ def __unicode__(self): return u'%s-%s' % (self.pkgbase, self.full_version) +# fake default signoff spec when we don't have a persisted one in the database +FakeSignoffSpecification = namedtuple('FakeSignoffSpecification', + ('required', 'enabled', 'known_bad', 'comments')) +DEFAULT_SIGNOFF_SPEC = FakeSignoffSpecification(2, True, False, u'') + + class SignoffManager(models.Manager): def get_from_package(self, pkg, user, revoked=False): '''Utility method to pull all relevant name-version fields from a diff --git a/packages/utils.py b/packages/utils.py index 42cfbe0f..60b95e21 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -5,7 +5,8 @@ from main.models import Package, Repo from main.utils import cache_function, groupby_preserve_order, PackageStandin -from .models import PackageGroup, PackageRelation, SignoffSpecification, Signoff +from .models import (PackageGroup, PackageRelation, + SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC) @cache_function(300) def get_group_info(include_arches=None): @@ -148,9 +149,7 @@ def get_wrong_permissions(): return relations -DEFAULT_SIGNOFF_SPEC = SignoffSpecification() - -def approved_by_signoffs(signoffs, spec=DEFAULT_SIGNOFF_SPEC): +def approved_by_signoffs(signoffs, spec): if signoffs: good_signoffs = sum(1 for s in signoffs if not s.revoked) return good_signoffs >= spec.required @@ -158,14 +157,13 @@ def approved_by_signoffs(signoffs, spec=DEFAULT_SIGNOFF_SPEC): class PackageSignoffGroup(object): '''Encompasses all packages in testing with the same pkgbase.''' - def __init__(self, packages, user=None): + def __init__(self, packages): if len(packages) == 0: raise Exception self.packages = packages - self.user = user + self.user = None self.target_repo = None self.signoffs = set() - self.specification = DEFAULT_SIGNOFF_SPEC first = packages[0] self.pkgbase = first.pkgbase @@ -175,6 +173,10 @@ def __init__(self, packages, user=None): self.last_update = first.last_update self.packager = first.packager + self.specification = \ + SignoffSpecification.objects.get_or_default_from_package(first) + self.default_spec = self.specification is DEFAULT_SIGNOFF_SPEC + version = first.full_version if all(version == pkg.full_version for pkg in packages): self.version = version diff --git a/packages/views.py b/packages/views.py index 66bcd3fc..307691e2 100644 --- a/packages/views.py +++ b/packages/views.py @@ -7,8 +7,9 @@ from django.core.mail import send_mail from django.core.serializers.json import DjangoJSONEncoder from django.db.models import Q -from django.http import HttpResponse, Http404 -from django.shortcuts import get_object_or_404, get_list_or_404, redirect +from django.http import HttpResponse, Http404, HttpResponseForbidden +from django.shortcuts import (get_object_or_404, get_list_or_404, + redirect, render) from django.template import loader, Context from django.utils import simplejson from django.views.decorators.cache import never_cache @@ -404,12 +405,16 @@ def signoff_package(request, name, repo, arch, revoke=False): package, request.user) all_signoffs = Signoff.objects.for_package(package) + spec = SignoffSpecification.objects.get_or_default_from_package(package) if request.is_ajax(): data = { 'created': created, 'revoked': bool(signoff.revoked), - 'approved': approved_by_signoffs(all_signoffs), + 'approved': approved_by_signoffs(all_signoffs, spec), + 'required': spec.required, + 'enabled': spec.enabled, + 'known_bad': spec.known_bad, 'user': str(request.user), } return HttpResponse(simplejson.dumps(data, ensure_ascii=False), @@ -429,7 +434,9 @@ def signoff_options(request, name, repo, arch): arch__name=arch, repo__name__iexact=repo, repo__testing=True) package = packages[0] - # TODO ensure submitter is maintainer and/or packager + if request.user != package.packager and \ + request.user not in package.maintainers: + return render(request, '403.html', status=403) try: spec = SignoffSpecification.objects.get_from_package(package) diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index 0a630119..6c705b4e 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -11,10 +11,12 @@ <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/revoke/" title="Revoke signoff {{ group.pkgbase }} for {{ group.arch }}">Revoke Signoff</a></div> {% else %} +{% if not group.specification.known_bad and group.specification.enabled %} <div> <a class="signoff-link" href="{{ group.package.get_absolute_url }}signoff/" title="Signoff {{ group.pkgbase }} for {{ group.arch }}">Signoff</a></div> {% endif %} +{% endif %} {% if group.packager == user %} <div> <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Packager Options</a> diff --git a/templates/packages/signoff_report.txt b/templates/packages/signoff_report.txt index 84e3fc6b..81020c8f 100644 --- a/templates/packages/signoff_report.txt +++ b/templates/packages/signoff_report.txt @@ -1,6 +1,19 @@ === {% autoescape off %}Signoff report for [{{ repo|lower }}] === {{ signoffs_url }} +There are currently: +* {{ new|length }} new package{{ new|length|pluralize }} in last {{ new_hours }} hours +* {{ bad|length }} known bad package{{ bad|length|pluralize }} +* {{ disabled|length }} package{{ disabled|length|pluralize }} not accepting signoffs +* {{ complete|length }} fully signed off package{{ complete|length|pluralize }} +* {{ incomplete|length }} package{{ incomplete|length|pluralize }} missing signoffs +* {{ old|length }} package{{ old|length|pluralize }} older than {{ old_days }} days + +(Note: the word 'package' as used here refers to packages as grouped by +pkgbase, architecture, and repository; e.g., one PKGBUILD produces one +package per architecture, even if it is a split package.) + + == New packages in [{{ repo|lower}}] in last {{ new_hours }} hours ({{ new|length }} total) == {% for group in new %} * {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %} diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index 9bc7fd74..d517e5e3 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -39,6 +39,7 @@ <h3>Filter Displayed Signoffs</h3> <th>Last Updated</th> <th>Approved</th> <th>Signoffs</th> + <th>Notes</th> </tr> </thead> <tbody id="tbody_signoffs"> @@ -50,8 +51,22 @@ <h3>Filter Displayed Signoffs</h3> <td>{{ group.packager|default:"Unknown" }}</td> <td>{{ group.packages|length }}</td> <td>{{ group.last_update|date }}</td> - <td class="signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td> + {% if group.specification.known_bad %} + <td class="approval signoff-bad">Bad</td> + {% else %} + {% if not group.specification.enabled %} + <td class="approval signoff-disabled">Disabled</td> + {% else %} + <td class="approval signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td> + {% endif %} + {% endif %} <td>{% include "packages/signoff_cell.html" %}</td> + <td class="wrap">{% if not group.default_spec %}{% with group.specification as spec %} + {% if spec.required != 2 %}Required signoffs: {{ spec.required }}<br/>{% endif %} + {% if not spec.enabled %}Signoffs are not currently enabled<br/>{% endif %} + {% if spec.known_bad %}Package is known to be bad<br/>{% endif %} + {{ spec.comments|default:""|linebreaks }} + {% endwith %}{% endif %}</td> </tr> {% endfor %} </tbody> @@ -64,7 +79,7 @@ <h3>Filter Displayed Signoffs</h3> $(document).ready(function() { $('a.signoff-link').click(signoff_package); $(".results").tablesorter({widgets: ['zebra'], sortList: [[0,0]], - headers: { 7: { sorter: false } } }); + headers: { 7: { sorter: false }, 8: {sorter: false } } }); $('#signoffs_filter input').change(filter_signoffs); $('#criteria_reset').click(filter_signoffs_reset); // fire function on page load to ensure the current form selections take effect -- cgit v1.2.3-54-g00ecf From 800ea45528e297c38e068775951e666f8191ef45 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 21:20:28 -0500 Subject: Ensure signoffs can only be created if allowed Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/views.py b/packages/views.py index 307691e2..00dd7f7d 100644 --- a/packages/views.py +++ b/packages/views.py @@ -388,9 +388,10 @@ def signoffs(request): def signoff_package(request, name, repo, arch, revoke=False): packages = get_list_or_404(Package, pkgbase=name, arch__name=arch, repo__name__iexact=repo, repo__testing=True) - package = packages[0] + spec = SignoffSpecification.objects.get_or_default_from_package(package) + if revoke: try: signoff = Signoff.objects.get_from_package( @@ -401,11 +402,13 @@ def signoff_package(request, name, repo, arch, revoke=False): signoff.save() created = False else: + # ensure we should even be accepting signoffs + if spec.known_bad or not spec.enabled: + return render(request, '403.html', status=403) signoff, created = Signoff.objects.get_or_create_from_package( package, request.user) all_signoffs = Signoff.objects.for_package(package) - spec = SignoffSpecification.objects.get_or_default_from_package(package) if request.is_ajax(): data = { -- cgit v1.2.3-54-g00ecf From 5e295a3dbb0b64f229e9419384721b154e013b9e Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 21:20:50 -0500 Subject: Allow signoff options to apply to all packages across architectures If you check the new box, you can set the options for both the i686 and the x86_64 packages at the same time. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views.py | 32 +++++++++++++++++++++++++++++++- templates/packages/signoff_cell.html | 2 +- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/packages/views.py b/packages/views.py index 00dd7f7d..aa15d0cf 100644 --- a/packages/views.py +++ b/packages/views.py @@ -6,6 +6,7 @@ from django.conf import settings from django.core.mail import send_mail from django.core.serializers.json import DjangoJSONEncoder +from django.db import transaction from django.db.models import Q from django.http import HttpResponse, Http404, HttpResponseForbidden from django.shortcuts import (get_object_or_404, get_list_or_404, @@ -426,10 +427,36 @@ def signoff_package(request, name, repo, arch, revoke=False): return redirect('package-signoffs') class SignoffOptionsForm(forms.ModelForm): + apply_all = forms.BooleanField(required=False, + help_text="Apply these options to all architectures?") + class Meta: model = SignoffSpecification fields = ('required', 'enabled', 'known_bad', 'comments') +def _signoff_options_all(request, name, repo): + seen_ids = set() + with transaction.commit_on_success(): + # find or create a specification for all architectures, then + # graft the form data onto them + packages = Package.objects.filter(pkgbase=name, + repo__name__iexact=repo, repo__testing=True) + for package in packages: + try: + spec = SignoffSpecification.objects.get_from_package(package) + if spec.pk in seen_ids: + continue + except SignoffSpecification.DoesNotExist: + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, + repo=package.repo) + spec.user = request.user + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + form.save() + seen_ids.add(form.instance.pk) + @permission_required('main.change_package') @never_cache def signoff_options(request, name, repo, arch): @@ -453,7 +480,10 @@ def signoff_options(request, name, repo, arch): if request.POST: form = SignoffOptionsForm(request.POST, instance=spec) if form.is_valid(): - form.save() + if form.cleaned_data['apply_all']: + _signoff_options_all(request, name, repo) + else: + form.save() return redirect('package-signoffs') else: form = SignoffOptionsForm(instance=spec) diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index 6c705b4e..4f9f726b 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -19,7 +19,7 @@ {% endif %} {% if group.packager == user %} <div> - <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Packager Options</a> + <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Signoff Options</a> </div> {% endif %} {% endspaceless %} -- cgit v1.2.3-54-g00ecf From 0aa42e2c01df2bf1c9e425994420f5ae10252597 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 21:32:30 -0500 Subject: Allow signoff manipulation if you are a maintainer This is a more expensive and not-yet-optimized way of doing this, but we can fix that later as needed. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 4 ++++ templates/packages/signoff_cell.html | 2 +- templates/todolists/view.html | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 60b95e21..1a2c0de0 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -2,6 +2,7 @@ from django.db import connection from django.db.models import Count, Max +from django.contrib.auth.models import User from main.models import Package, Repo from main.utils import cache_function, groupby_preserve_order, PackageStandin @@ -172,6 +173,9 @@ def __init__(self, packages): self.version = '' self.last_update = first.last_update self.packager = first.packager + self.maintainers = User.objects.filter( + package_relations__type=PackageRelation.MAINTAINER, + package_relations__pkgbase=self.pkgbase) self.specification = \ SignoffSpecification.objects.get_or_default_from_package(first) diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index 4f9f726b..0bf44ca2 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -17,7 +17,7 @@ title="Signoff {{ group.pkgbase }} for {{ group.arch }}">Signoff</a></div> {% endif %} {% endif %} -{% if group.packager == user %} +{% if user == group.packager or user in group.maintainers %} <div> <a class="signoff-options" href="{{ group.package.get_absolute_url }}signoff/options/">Signoff Options</a> </div> diff --git a/templates/todolists/view.html b/templates/todolists/view.html index 8f515c9b..c9ea919a 100644 --- a/templates/todolists/view.html +++ b/templates/todolists/view.html @@ -29,7 +29,7 @@ <h2>Todo List: {{ list.name }}</h2> <th>Name</th> <th>Arch</th> <th>Repo</th> - <th>Maintainer</th> + <th>Maintainers</th> <th>Status</th> </tr> </thead> -- cgit v1.2.3-54-g00ecf From 278d74b1d12568d4c9b6d5533e57e820d038ae64 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 22:05:35 -0500 Subject: Minor signoff query tweaks/optimizations Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 1a2c0de0..65769baf 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -248,8 +248,7 @@ def get_current_signoffs(repos): AND s.arch_id = p.arch_id AND s.repo_id = p.repo_id ) - JOIN repos r ON p.repo_id = r.id - WHERE r.id IN ( + WHERE p.repo_id IN ( """ sql += ", ".join("%s" for r in repos) sql += ")" @@ -264,15 +263,16 @@ def get_current_signoffs(repos): def get_target_repo_map(pkgbases): package_repos = Package.objects.order_by().values_list( 'pkgbase', 'repo__name').filter( - repo__testing=False, repo__staging=False, pkgbase__in=pkgbases).distinct() return dict(package_repos) def get_signoff_groups(repos=None): if repos is None: repos = Repo.objects.filter(testing=True) + repo_ids = [r.pk for r in repos] - test_pkgs = Package.objects.normal().filter(repo__in=repos) + test_pkgs = Package.objects.select_related( + 'arch', 'repo', 'packager').filter(repo__in=repo_ids) packages = test_pkgs.order_by('pkgname') # Collect all pkgbase values in testing repos -- cgit v1.2.3-54-g00ecf From 19c2841f20653fd3c59f73fdb16f7f7b1ea15434 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 22:46:36 -0500 Subject: Add new attach_maintainers() utility method This allows us to alleviate the N+1 query problem when we want maintainer data for a queryset of packages. We use it on signoffs here; we should also be able to apply this to the todolist section where this problem has existed for some time. Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 14 +++++++++++--- packages/utils.py | 34 +++++++++++++++++++++++++++++++--- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/main/models.py b/main/models.py index 972b098c..db456c20 100644 --- a/main/models.py +++ b/main/models.py @@ -206,11 +206,19 @@ def get_full_url(self, proto='https'): def is_signed(self): return bool(self.pgp_signature) + _maintainers = None + @property def maintainers(self): - return User.objects.filter( - package_relations__pkgbase=self.pkgbase, - package_relations__type=PackageRelation.MAINTAINER) + if self._maintainers is None: + self._maintainers = User.objects.filter( + package_relations__pkgbase=self.pkgbase, + package_relations__type=PackageRelation.MAINTAINER) + return self._maintainers + + @maintainers.setter + def maintainers(self, maintainers): + self._maintainers = maintainers @cache_function(300) def applicable_arches(self): diff --git a/packages/utils.py b/packages/utils.py index 65769baf..0d756a85 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -1,3 +1,4 @@ +from collections import defaultdict from operator import itemgetter from django.db import connection @@ -127,6 +128,7 @@ def get_differences_info(arch_a, arch_b): differences.sort(key=lambda a: (a.repo.name, a.pkgname)) return differences + def get_wrong_permissions(): sql = """ SELECT DISTINCT id @@ -150,6 +152,32 @@ def get_wrong_permissions(): return relations +def attach_maintainers(packages): + '''Given a queryset or something resembling it of package objects, find all + the maintainers and attach them to the packages to prevent N+1 query + cascading.''' + pkgbases = set(p.pkgbase for p in packages) + rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, + pkgbase__in=pkgbases).values_list('pkgbase', 'user_id').distinct() + + # get all the user objects we will need + user_ids = set(rel[1] for rel in rels) + users = User.objects.in_bulk(user_ids) + + # now build a pkgbase -> [maintainers...] map + maintainers = defaultdict(list) + for rel in rels: + maintainers[rel[0]].append(users[rel[1]]) + + annotated = [] + # and finally, attach the maintainer lists on the original packages + for package in packages: + package.maintainers = maintainers[package.pkgbase] + annotated.append(package) + + return annotated + + def approved_by_signoffs(signoffs, spec): if signoffs: good_signoffs = sum(1 for s in signoffs if not s.revoked) @@ -173,9 +201,7 @@ def __init__(self, packages): self.version = '' self.last_update = first.last_update self.packager = first.packager - self.maintainers = User.objects.filter( - package_relations__type=PackageRelation.MAINTAINER, - package_relations__pkgbase=self.pkgbase) + self.maintainers = first.maintainers self.specification = \ SignoffSpecification.objects.get_or_default_from_package(first) @@ -236,6 +262,7 @@ def __unicode__(self): def get_current_signoffs(repos): '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' + # TODO this isn't current at all- this is every single signoff... cursor = connection.cursor() sql = """ SELECT DISTINCT s.id @@ -274,6 +301,7 @@ def get_signoff_groups(repos=None): test_pkgs = Package.objects.select_related( 'arch', 'repo', 'packager').filter(repo__in=repo_ids) packages = test_pkgs.order_by('pkgname') + packages = attach_maintainers(packages) # Collect all pkgbase values in testing repos q_pkgbase = test_pkgs.values('pkgbase') -- cgit v1.2.3-54-g00ecf From 0db2830b8fda4d898a184a31f3375c10f3cc4083 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 3 Nov 2011 23:30:16 -0500 Subject: Make maintainer lookup on todo lists fast This is rather sick to look at. Sorry, Django gives me no other choice. Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 13 +++++++++---- packages/utils.py | 1 + todolists/views.py | 8 +++++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/main/models.py b/main/models.py index db456c20..caf36be0 100644 --- a/main/models.py +++ b/main/models.py @@ -460,12 +460,17 @@ class Todolist(models.Model): def __unicode__(self): return self.name + _packages = None + @property def packages(self): - # select_related() does not use LEFT OUTER JOIN for nullable ForeignKey - # fields. That is why we need to explicitly list the ones we want. - return TodolistPkg.objects.select_related( - 'pkg__repo', 'pkg__arch').filter(list=self).order_by('pkg') + if not self._packages: + # select_related() does not use LEFT OUTER JOIN for nullable + # ForeignKey fields. That is why we need to explicitly list the + # ones we want. + self._packages = TodolistPkg.objects.select_related( + 'pkg__repo', 'pkg__arch').filter(list=self).order_by('pkg') + return self._packages @property def package_names(self): diff --git a/packages/utils.py b/packages/utils.py index 0d756a85..4af0f67d 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -156,6 +156,7 @@ def attach_maintainers(packages): '''Given a queryset or something resembling it of package objects, find all the maintainers and attach them to the packages to prevent N+1 query cascading.''' + packages = list(packages) pkgbases = set(p.pkgbase for p in packages) rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER, pkgbase__in=pkgbases).values_list('pkgbase', 'user_id').distinct() diff --git a/todolists/views.py b/todolists/views.py index 8ad7be56..585cefd0 100644 --- a/todolists/views.py +++ b/todolists/views.py @@ -12,6 +12,7 @@ from django.utils import simplejson from main.models import Todolist, TodolistPkg, Package +from packages.utils import attach_maintainers from .utils import get_annotated_todolists class TodoListForm(forms.ModelForm): @@ -49,6 +50,9 @@ def flag(request, listid, pkgid): @never_cache def view(request, listid): todolist = get_object_or_404(Todolist, id=listid) + # we don't hold onto the result, but the objects are the same here, + # so accessing maintainers in the template is now cheap + attach_maintainers(tp.pkg for tp in todolist.packages) return direct_to_template(request, 'todolists/view.html', {'list': todolist}) @login_required @@ -163,8 +167,10 @@ def send_todolist_emails(todo_list, new_packages): def public_list(request): todo_lists = Todolist.objects.incomplete() + # total hackjob, but it makes this a lot less query-intensive. + all_pkgs = [tp for tl in todo_lists for tp in tl.packages] + attach_maintainers([tp.pkg for tp in all_pkgs]) return direct_to_template(request, "todolists/public_list.html", {"todo_lists": todo_lists}) - # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 94f46acebf03652d7ad2ed504d4ce863d5cbd913 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 00:01:51 -0500 Subject: Find all potential package signoff specifications upfront This should save a significant amount of time in the case where there are a lot of signups to look up; at least one query per signoff row. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 54 +++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 15 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 4af0f67d..5240ae23 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -194,6 +194,8 @@ def __init__(self, packages): self.user = None self.target_repo = None self.signoffs = set() + self.specification = DEFAULT_SIGNOFF_SPEC + self.default_spec = True first = packages[0] self.pkgbase = first.pkgbase @@ -204,10 +206,6 @@ def __init__(self, packages): self.packager = first.packager self.maintainers = first.maintainers - self.specification = \ - SignoffSpecification.objects.get_or_default_from_package(first) - self.default_spec = self.specification is DEFAULT_SIGNOFF_SPEC - version = first.full_version if all(version == pkg.full_version for pkg in packages): self.version = version @@ -238,6 +236,17 @@ def find_signoffs(self, all_signoffs): if s.arch_id == self.arch.id and s.repo_id == self.repo.id: self.signoffs.add(s) + def find_specification(self, specifications): + for spec in specifications: + if spec.pkgbase != self.pkgbase: + continue + if self.version and not spec.full_version == self.version: + continue + if spec.arch_id == self.arch.id and spec.repo_id == self.repo.id: + self.specification = spec + self.default_spec = False + return + def approved(self): return approved_by_signoffs(self.signoffs, self.specification) @@ -261,13 +270,9 @@ def __unicode__(self): return u'%s-%s (%s): %d' % ( self.pkgbase, self.version, self.arch, len(self.signoffs)) -def get_current_signoffs(repos): - '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' - # TODO this isn't current at all- this is every single signoff... - cursor = connection.cursor() - sql = """ +_SQL_SPEC_OR_SIGNOFF = """ SELECT DISTINCT s.id - FROM packages_signoff s + FROM %s s JOIN packages p ON ( s.pkgbase = p.pkgbase AND s.pkgver = p.pkgver @@ -276,11 +281,16 @@ def get_current_signoffs(repos): AND s.arch_id = p.arch_id AND s.repo_id = p.repo_id ) - WHERE p.repo_id IN ( + AND p.repo_id IN (%s) """ - sql += ", ".join("%s" for r in repos) - sql += ")" - cursor.execute(sql, [r.id for r in repos]) + +def get_current_signoffs(repos): + '''Returns a mapping of pkgbase -> signoff objects for the given repos.''' + cursor = connection.cursor() + # query pre-process- fill in table name and placeholders for IN + sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoff', + ','.join(['%s' for r in repos])) + cursor.execute(sql, [r.pk for r in repos]) results = cursor.fetchall() # fetch all of the returned signoffs by ID @@ -288,6 +298,18 @@ def get_current_signoffs(repos): signoffs = Signoff.objects.select_related('user').in_bulk(to_fetch) return signoffs.values() +def get_current_specifications(repos): + '''Returns a mapping of pkgbase -> signoff specification objects for the + given repos.''' + cursor = connection.cursor() + sql = _SQL_SPEC_OR_SIGNOFF % ('packages_signoffspecification', + ','.join(['%s' for r in repos])) + cursor.execute(sql, [r.pk for r in repos]) + + results = cursor.fetchall() + to_fetch = [row[0] for row in results] + return SignoffSpecification.objects.in_bulk(to_fetch).values() + def get_target_repo_map(pkgbases): package_repos = Package.objects.order_by().values_list( 'pkgbase', 'repo__name').filter( @@ -308,8 +330,9 @@ def get_signoff_groups(repos=None): q_pkgbase = test_pkgs.values('pkgbase') pkgtorepo = get_target_repo_map(q_pkgbase) - # Collect all existing signoffs for these packages + # Collect all possible signoffs and specifications for these packages signoffs = get_current_signoffs(repos) + specs = get_current_specifications(repos) same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase) grouped = groupby_preserve_order(packages, same_pkgbase_key) @@ -319,6 +342,7 @@ def get_signoff_groups(repos=None): signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, "Unknown") signoff_group.find_signoffs(signoffs) + signoff_group.find_specification(specs) signoff_groups.append(signoff_group) return signoff_groups -- cgit v1.2.3-54-g00ecf From 20ebe658921c2ce78bf9a05116de045ee38f0820 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 10:43:02 -0500 Subject: Fix signoff target repo mapping I clearly should not have removed this code yesterday, otherwise packages have their target repo matched to a testing one. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/utils.py b/packages/utils.py index 5240ae23..ddd822e4 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -313,6 +313,7 @@ def get_current_specifications(repos): def get_target_repo_map(pkgbases): package_repos = Package.objects.order_by().values_list( 'pkgbase', 'repo__name').filter( + repo__testing=False, repo__staging=False, pkgbase__in=pkgbases).distinct() return dict(package_repos) -- cgit v1.2.3-54-g00ecf From 8ba68aed370c2369bebaaca4d4158b6c40223c0f Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 10:59:45 -0500 Subject: Add filter by target repo on signoffs page And add a count of displayed rows below the filter options. Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.js | 7 ++++++- packages/views.py | 1 + templates/packages/signoffs.html | 12 +++++++++--- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index a9f4e0c9..2b8e5d6d 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -267,22 +267,27 @@ function filter_signoffs() { /* start with all rows, and then remove ones we shouldn't show */ var rows = $('#tbody_signoffs').children(); var all_rows = rows; - $('#signoffs_filter .arch_filter').each(function() { + /* apply arch and repo filters */ + $('#signoffs_filter .arch_filter').add( + '#signoffs_filter .repo_filter').each(function() { if (!$(this).is(':checked')) { rows = rows.not('.' + $(this).val()); } }); + /* and then the slightly more expensive pending check */ if ($('#id_pending').is(':checked')) { rows = rows.has('td.signoff-no'); } /* hide all rows, then show the set we care about */ all_rows.hide(); rows.show(); + $('#filter-count').text(rows.length); /* make sure we update the odd/even styling from sorting */ $('.results').trigger('applyWidgets'); } function filter_signoffs_reset() { $('#signoffs_filter .arch_filter').attr('checked', 'checked'); + $('#signoffs_filter .repo_filter').attr('checked', 'checked'); $('#id_pending').removeAttr('checked'); filter_signoffs(); } diff --git a/packages/views.py b/packages/views.py index aa15d0cf..3c0c2bee 100644 --- a/packages/views.py +++ b/packages/views.py @@ -381,6 +381,7 @@ def signoffs(request): context = { 'signoff_groups': signoff_groups, 'arches': Arch.objects.all(), + 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), } return direct_to_template(request, 'packages/signoffs.html', context) diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index d517e5e3..f4511f75 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -9,7 +9,7 @@ <h2>Package Signoffs</h2> - <p>{{ signoff_groups|length }} signoff group{{ signoff_groups|pluralize }} found. + <p>{{ signoff_groups|length }} total signoff group{{ signoff_groups|pluralize }} found. A "signoff group" consists of packages grouped by pkgbase, architecture, and repository.</p> <div class="box filter-criteria"> @@ -21,9 +21,15 @@ <h3>Filter Displayed Signoffs</h3> <div><label for="id_arch_{{ arch.name }}" title="Architecture {{ arch.name }}">Arch {{ arch.name }}</label> <input type="checkbox" name="arch_{{ arch.name }}" id="id_arch_{{ arch.name }}" class="arch_filter" value="{{ arch.name }}" checked="checked"/></div> {% endfor %} + {% for repo_name in repo_names %} + <div><label for="id_repo_{{ repo_name|lower }}" title="Target Repository {{ repo_name }}">[{{ repo_name|lower }}]</label> + <input type="checkbox" name="repo_{{ repo_name|lower }}" id="id_repo_{{ repo_name|lower }}" class="repo_filter" value="{{ repo_name|lower }}" checked="checked"/></div> + {% endfor %} <div><label for="id_pending" title="Packages with not enough signoffs">Only Pending Approval</label> <input type="checkbox" name="pending" id="id_pending" value="pending"/></div> - <div ><label> </label><input title="Reset search criteria" type="button" id="criteria_reset" value="Reset"/></div> + <div><label> </label><input title="Reset search criteria" type="button" id="criteria_reset" value="Reset"/></div> + <div class="clear"></div> + <div id="filter-info"><span id="filter-count">{{ signoff_groups|length }}</span> signoff groups displayed.</div> </fieldset> </form> </div> @@ -44,7 +50,7 @@ <h3>Filter Displayed Signoffs</h3> </thead> <tbody id="tbody_signoffs"> {% for group in signoff_groups %} - <tr class="{% cycle 'odd' 'even' %} {{ group.arch.name }}"> + <tr class="{% cycle 'odd' 'even' %} {{ group.arch.name }} {{ group.target_repo|lower }}"> <td>{% pkg_details_link group.package %} {{ group.version }}</td> <td>{{ group.arch.name }}</td> <td>{{ group.target_repo }}</td> -- cgit v1.2.3-54-g00ecf From e565fde00f56c7a01ff55a204a0a56d3ce4bf8b4 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 11:31:35 -0500 Subject: Signoff email: prune empty content Don't send the email at all if there are no packages even in the repository, and don't print empty sections. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/management/commands/signoff_report.py | 4 ++++ templates/packages/signoff_report.txt | 13 +++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py index 3431dada..3357bc1e 100644 --- a/packages/management/commands/signoff_report.py +++ b/packages/management/commands/signoff_report.py @@ -69,6 +69,10 @@ def generate_report(email, repo_name): new_cutoff = now - timedelta(hours=new_hours) old_cutoff = now - timedelta(days=old_days) + if len(signoff_groups) == 0: + # no need to send an email at all + return + for group in signoff_groups: spec = group.specification if spec.known_bad: diff --git a/templates/packages/signoff_report.txt b/templates/packages/signoff_report.txt index 81020c8f..046c2f1e 100644 --- a/templates/packages/signoff_report.txt +++ b/templates/packages/signoff_report.txt @@ -14,27 +14,28 @@ pkgbase, architecture, and repository; e.g., one PKGBUILD produces one package per architecture, even if it is a split package.) -== New packages in [{{ repo|lower}}] in last {{ new_hours }} hours ({{ new|length }} total) == +{% if new %}== New packages in [{{ repo|lower}}] in last {{ new_hours }} hours ({{ new|length }} total) == {% for group in new %} * {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %} -{% regroup incomplete by target_repo as by_repo %}{% for target_repo in by_repo %} +{% endif %}{% regroup incomplete by target_repo as by_repo %}{% for target_repo in by_repo %} == Incomplete signoffs for [{{ target_repo.grouper|lower }}] ({{ target_repo.list|length }} total) == {% for group in target_repo.list %} * {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}) {{ group.completed }}/{{ group.required }} signoffs{% endfor %} {% endfor %} -== Completed signoffs ({{ complete|length }} total) == +{% if complete %}== Completed signoffs ({{ complete|length }} total) == {% for group in complete %} * {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}){% endfor %} -== All packages in [{{ repo|lower }}] for more than {{ old_days }} days ({{ old|length }} total) == +{% endif %}{% if old %}== All packages in [{{ repo|lower }}] for more than {{ old_days }} days ({{ old|length }} total) == {% for group in old %} * {{ group.pkgbase }}-{{ group.version }} ({{ group.arch }}), since {{ group.last_update|date }}{% endfor %} -{% endautoescape %} -== Top five in signoffs in last {{ new_hours }} hours == + +{% endif %}== Top five in signoffs in last {{ new_hours }} hours == {% for leader in leaders %} {{ forloop.counter }}. {{ leader.user }} - {{ leader.count }} signoffs{% endfor %} +{% endautoescape %} -- cgit v1.2.3-54-g00ecf From e15654fd7cbd5bf5e9a5c7d59b6b2a50999ee467 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 11:56:38 -0500 Subject: Fix misnamed JS function call Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/packages/differences.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/packages/differences.html b/templates/packages/differences.html index 0400ea37..6c06ae25 100644 --- a/templates/packages/differences.html +++ b/templates/packages/differences.html @@ -65,7 +65,7 @@ <h3>Filter Differences View</h3> $('.results').tablesorter({widgets: ['zebra'], sortList: [[1,0], [0,0]]}); $('#diff_filter select').change(filter_packages); $('#diff_filter input').change(filter_packages); - $('#criteria_reset').click(filter_differences_reset); + $('#criteria_reset').click(filter_packages_reset); // fire function on page load to ensure the current form selections take effect filter_packages(); }); -- cgit v1.2.3-54-g00ecf From 28f72db7be7bf2f54d734c78422e6179f0ce29f1 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 4 Nov 2011 12:38:57 -0500 Subject: Rewrite get_target_repo_map() using raw SQL This improves the shitty query plan brought upon us by MySQL by rewriting it to use JOINs only and no dependent subqueries. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index ddd822e4..b21ac557 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -310,12 +310,25 @@ def get_current_specifications(repos): to_fetch = [row[0] for row in results] return SignoffSpecification.objects.in_bulk(to_fetch).values() -def get_target_repo_map(pkgbases): - package_repos = Package.objects.order_by().values_list( - 'pkgbase', 'repo__name').filter( - repo__testing=False, repo__staging=False, - pkgbase__in=pkgbases).distinct() - return dict(package_repos) +def get_target_repo_map(repos): + sql = """ +SELECT DISTINCT p1.pkgbase, r.name + FROM packages p1 + JOIN repos r ON p1.repo_id = r.id + JOIN packages p2 ON p1.pkgbase = p2.pkgbase + WHERE r.staging = %s + AND r.testing = %s + AND p2.repo_id IN ( + """ + sql += ','.join(['%s' for r in repos]) + sql += ")" + + params = [False, False] + params.extend(r.pk for r in repos) + + cursor = connection.cursor() + cursor.execute(sql, params) + return dict(cursor.fetchall()) def get_signoff_groups(repos=None): if repos is None: @@ -328,8 +341,7 @@ def get_signoff_groups(repos=None): packages = attach_maintainers(packages) # Collect all pkgbase values in testing repos - q_pkgbase = test_pkgs.values('pkgbase') - pkgtorepo = get_target_repo_map(q_pkgbase) + pkgtorepo = get_target_repo_map(repos) # Collect all possible signoffs and specifications for these packages signoffs = get_current_signoffs(repos) -- cgit v1.2.3-54-g00ecf From 886630dd4e65a7be6d1bd3d8a0e70ab24e5affc6 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 8 Nov 2011 13:55:38 -0600 Subject: Always use same URL for both secure and non-secure CDN requests Use a scheme-relative URL rather than serving different content to users based on HTTP or HTTPS. Should prevent mismatched certificate errors. Signed-off-by: Dan McGee <dan@archlinux.org> --- local_settings.py.example | 5 ++--- main/templatetags/cdn.py | 7 +------ 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/local_settings.py.example b/local_settings.py.example index beb48f84..bad03921 100644 --- a/local_settings.py.example +++ b/local_settings.py.example @@ -53,8 +53,7 @@ SECRET_KEY = '00000000000000000000000000000000000000000000000' ## CDN settings CDN_ENABLED = False -CDN_PATH = 'http://example.com/path/' -CDN_PATH_SECURE = 'https://example.com/path/' - +# Scheme-relative URL, should work for both http/https +CDN_PATH = '//example.com/path/' # vim: set ts=4 sw=4 et: diff --git a/main/templatetags/cdn.py b/main/templatetags/cdn.py index c25040c0..5cb12fcf 100644 --- a/main/templatetags/cdn.py +++ b/main/templatetags/cdn.py @@ -23,12 +23,7 @@ def render(self, context): oncdn = getattr(settings, 'CDN_ENABLED', True) if not oncdn: return '' - secure = 'secure' in context and context['secure'] # if left undefined, same behavior as if CDN is turned off - paths = { - False: getattr(settings, 'CDN_PATH', ''), - True: getattr(settings, 'CDN_PATH_SECURE', ''), - } - return paths[secure] + return getattr(settings, 'CDN_PATH', '') # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From d80f4236d01f70380f71a46dd98f1f789d91d31c Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 18:09:19 -0600 Subject: Add package signoffs JSON view This allows access to the same data (and even a bit more) from the signoffs overview page in a machine-friendly way. Signed-off-by: Dan McGee <dan@archlinux.org> --- mirrors/utils.py | 3 ++- packages/urls.py | 1 + packages/views.py | 47 ++++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 49 insertions(+), 2 deletions(-) diff --git a/mirrors/utils.py b/mirrors/utils.py index 686ec581..8518b3ba 100644 --- a/mirrors/utils.py +++ b/mirrors/utils.py @@ -40,7 +40,8 @@ def get_mirror_statuses(cutoff=default_cutoff): last_sync=Max('logs__last_sync'), last_check=Max('logs__check_time'), duration_avg=Avg('logs__duration'), - duration_stddev=StdDev('logs__duration') + #duration_stddev=StdDev('logs__duration') + duration_stddev=Max('logs__duration') ).order_by('-last_sync', '-duration_avg') # The Django ORM makes it really hard to get actual average delay in the diff --git a/packages/urls.py b/packages/urls.py index 4d391a3c..1f25e3fd 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -18,6 +18,7 @@ urlpatterns = patterns('packages.views', (r'^flaghelp/$', 'flaghelp'), (r'^signoffs/$', 'signoffs', {}, 'package-signoffs'), + (r'^signoffs/json/$', 'signoffs_json', {}, 'package-signoffs-json'), (r'^update/$', 'update'), (r'^$', 'search', {}, 'packages-search'), diff --git a/packages/views.py b/packages/views.py index 3c0c2bee..cac5d076 100644 --- a/packages/views.py +++ b/packages/views.py @@ -29,7 +29,8 @@ from mirrors.models import MirrorUrl from .models import PackageRelation, PackageGroup, SignoffSpecification, Signoff from .utils import (get_group_info, get_differences_info, - get_wrong_permissions, get_signoff_groups, approved_by_signoffs) + get_wrong_permissions, get_signoff_groups, approved_by_signoffs, + PackageSignoffGroup) class PackageJSONEncoder(DjangoJSONEncoder): pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', @@ -371,6 +372,7 @@ def unflag_all(request, name, repo, arch): pkgs.update(flag_date=None) return redirect(pkg) + @permission_required('main.change_package') @never_cache def signoffs(request): @@ -496,6 +498,49 @@ def signoff_options(request, name, repo, arch): } return direct_to_template(request, 'packages/signoff_options.html', context) +class SignoffJSONEncoder(DjangoJSONEncoder): + '''Base JSONEncoder extended to handle all serialization of all classes + related to signoffs.''' + signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager', + 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version'] + signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments'] + signoff_attrs = ['user', 'created', 'revoked'] + + def default(self, obj): + if isinstance(obj, PackageSignoffGroup): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_group_attrs) + data['package_count'] = len(obj.packages) + data['approved'] = obj.approved() + data.update((attr, getattr(obj.specification, attr)) + for attr in self.signoff_spec_attrs) + return data + elif isinstance(obj, Signoff): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_attrs) + return data + elif isinstance(obj, Arch) or isinstance(obj, Repo): + return unicode(obj) + elif isinstance(obj, User): + return obj.username + elif isinstance(obj, set): + return list(obj) + return super(SignoffJSONEncoder, self).default(obj) + +@permission_required('main.change_package') +@never_cache +def signoffs_json(request): + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + data = { + 'version': 1, + 'signoff_groups': signoff_groups, + } + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=SignoffJSONEncoder) + response = HttpResponse(to_json, mimetype='application/json') + return response + + def flaghelp(request): return direct_to_template(request, 'packages/flaghelp.html') -- cgit v1.2.3-54-g00ecf From 83feb682c2909cbd8c332a9b16aacbc8d696a13a Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 18:12:47 -0600 Subject: Move package views into subdirectory This simply moves views.py to views/__init__.py and adjusts the imports accordingly; future patches will split this into multiple files as this module is getting quite large. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views.py | 696 -------------------------------------------- packages/views/__init__.py | 697 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 697 insertions(+), 696 deletions(-) delete mode 100644 packages/views.py create mode 100644 packages/views/__init__.py diff --git a/packages/views.py b/packages/views.py deleted file mode 100644 index cac5d076..00000000 --- a/packages/views.py +++ /dev/null @@ -1,696 +0,0 @@ -from django import forms -from django.contrib import messages -from django.contrib.admin.widgets import AdminDateWidget -from django.contrib.auth.models import User -from django.contrib.auth.decorators import permission_required -from django.conf import settings -from django.core.mail import send_mail -from django.core.serializers.json import DjangoJSONEncoder -from django.db import transaction -from django.db.models import Q -from django.http import HttpResponse, Http404, HttpResponseForbidden -from django.shortcuts import (get_object_or_404, get_list_or_404, - redirect, render) -from django.template import loader, Context -from django.utils import simplejson -from django.views.decorators.cache import never_cache -from django.views.decorators.http import require_POST -from django.views.decorators.vary import vary_on_headers -from django.views.generic import list_detail -from django.views.generic.simple import direct_to_template - -from datetime import datetime -from operator import attrgetter -from string import Template -from urllib import urlencode - -from main.models import Package, PackageFile, Arch, Repo -from main.utils import make_choice -from mirrors.models import MirrorUrl -from .models import PackageRelation, PackageGroup, SignoffSpecification, Signoff -from .utils import (get_group_info, get_differences_info, - get_wrong_permissions, get_signoff_groups, approved_by_signoffs, - PackageSignoffGroup) - -class PackageJSONEncoder(DjangoJSONEncoder): - pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', - 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size', - 'installed_size', 'build_date', 'last_update', 'flag_date' ] - - def default(self, obj): - if hasattr(obj, '__iter__'): - # mainly for queryset serialization - return list(obj) - if isinstance(obj, Package): - data = dict((attr, getattr(obj, attr)) - for attr in self.pkg_attributes) - data['groups'] = obj.groups.all() - return data - if isinstance(obj, PackageFile): - filename = obj.filename or '' - return obj.directory + filename - if isinstance(obj, (Repo, Arch, PackageGroup)): - return obj.name.lower() - return super(PackageJSONEncoder, self).default(obj) - -def opensearch(request): - if request.is_secure(): - domain = "https://%s" % request.META['HTTP_HOST'] - else: - domain = "http://%s" % request.META['HTTP_HOST'] - - return direct_to_template(request, 'packages/opensearch.xml', - {'domain': domain}, - mimetype='application/opensearchdescription+xml') - -@permission_required('main.change_package') -@require_POST -def update(request): - ids = request.POST.getlist('pkgid') - count = 0 - - if request.POST.has_key('adopt'): - repos = request.user.userprofile.allowed_repos.all() - pkgs = Package.objects.filter(id__in=ids, repo__in=repos) - disallowed_pkgs = Package.objects.filter(id__in=ids).exclude( - repo__in=repos) - - if disallowed_pkgs: - messages.warning(request, - "You do not have permission to adopt: %s." % ( - ' '.join([p.pkgname for p in disallowed_pkgs]) - )) - - for pkg in pkgs: - if request.user not in pkg.maintainers: - prel = PackageRelation(pkgbase=pkg.pkgbase, - user=request.user, - type=PackageRelation.MAINTAINER) - count += 1 - prel.save() - - messages.info(request, "%d base packages adopted." % count) - - elif request.POST.has_key('disown'): - # allow disowning regardless of allowed repos, helps things like - # [community] -> [extra] moves - for pkg in Package.objects.filter(id__in=ids): - if request.user in pkg.maintainers: - rels = PackageRelation.objects.filter(pkgbase=pkg.pkgbase, - user=request.user, - type=PackageRelation.MAINTAINER) - count += rels.count() - rels.delete() - - messages.info(request, "%d base packages disowned." % count) - - else: - messages.error(request, "Are you trying to adopt or disown?") - return redirect('/packages/') - -def details(request, name='', repo='', arch=''): - if all([name, repo, arch]): - try: - pkg = Package.objects.select_related( - 'arch', 'repo', 'packager').get(pkgname=name, - repo__name__iexact=repo, arch__name=arch) - return direct_to_template(request, 'packages/details.html', - {'pkg': pkg, }) - except Package.DoesNotExist: - arch = get_object_or_404(Arch, name=arch) - arches = [ arch ] - arches.extend(Arch.objects.filter(agnostic=True)) - repo = get_object_or_404(Repo, name__iexact=repo) - pkgs = Package.objects.normal().filter(pkgbase=name, - repo__testing=repo.testing, repo__staging=repo.staging, - arch__in=arches).order_by('pkgname') - if len(pkgs) == 0: - raise Http404 - context = { - 'list_title': 'Split Package Details', - 'name': name, - 'arch': arch, - 'packages': pkgs, - } - return direct_to_template(request, 'packages/packages_list.html', - context) - else: - pkg_data = [ - ('arch', arch.lower()), - ('repo', repo.lower()), - ('q', name), - ] - # only include non-blank values in the query we generate - pkg_data = [(x, y) for x, y in pkg_data if y] - return redirect("/packages/?%s" % urlencode(pkg_data)) - -def groups(request, arch=None): - arches = [] - if arch: - get_object_or_404(Arch, name=arch, agnostic=False) - arches.append(arch) - grps = get_group_info(arches) - context = { - 'groups': grps, - 'arch': arch, - } - return direct_to_template(request, 'packages/groups.html', context) - -def group_details(request, arch, name): - arch = get_object_or_404(Arch, name=arch) - arches = [ arch ] - arches.extend(Arch.objects.filter(agnostic=True)) - pkgs = Package.objects.normal().filter( - groups__name=name, arch__in=arches).order_by('pkgname') - if len(pkgs) == 0: - raise Http404 - context = { - 'list_title': 'Group Details', - 'name': name, - 'arch': arch, - 'packages': pkgs, - } - return direct_to_template(request, 'packages/packages_list.html', context) - -def coerce_limit_value(value): - if not value: - return None - if value == 'all': - # negative value indicates show all results - return -1 - value = int(value) - if value < 0: - raise ValueError - return value - -class LimitTypedChoiceField(forms.TypedChoiceField): - def valid_value(self, value): - try: - coerce_limit_value(value) - return True - except (ValueError, TypeError): - return False - -class PackageSearchForm(forms.Form): - repo = forms.MultipleChoiceField(required=False) - arch = forms.MultipleChoiceField(required=False) - name = forms.CharField(required=False) - desc = forms.CharField(required=False) - q = forms.CharField(required=False) - maintainer = forms.ChoiceField(required=False) - packager = forms.ChoiceField(required=False) - last_update = forms.DateField(required=False, widget=AdminDateWidget(), - label='Last Updated After') - flagged = forms.ChoiceField( - choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']), - required=False) - signed = forms.ChoiceField( - choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']), - required=False) - limit = LimitTypedChoiceField( - choices=make_choice([50, 100, 250]) + [('all', 'All')], - coerce=coerce_limit_value, - required=False, - initial=50) - - def __init__(self, *args, **kwargs): - super(PackageSearchForm, self).__init__(*args, **kwargs) - self.fields['repo'].choices = make_choice( - [repo.name for repo in Repo.objects.all()]) - self.fields['arch'].choices = make_choice( - [arch.name for arch in Arch.objects.all()]) - self.fields['q'].widget.attrs.update({"size": "30"}) - maints = User.objects.filter(is_active=True).order_by('username') - self.fields['maintainer'].choices = \ - [('', 'All'), ('orphan', 'Orphan')] + \ - [(m.username, m.get_full_name()) for m in maints] - self.fields['packager'].choices = \ - [('', 'All'), ('unknown', 'Unknown')] + \ - [(m.username, m.get_full_name()) for m in maints] - -def search(request, page=None): - limit = 50 - packages = Package.objects.normal() - - if request.GET: - form = PackageSearchForm(data=request.GET) - if form.is_valid(): - if form.cleaned_data['repo']: - packages = packages.filter( - repo__name__in=form.cleaned_data['repo']) - - if form.cleaned_data['arch']: - packages = packages.filter( - arch__name__in=form.cleaned_data['arch']) - - if form.cleaned_data['maintainer'] == 'orphan': - inner_q = PackageRelation.objects.all().values('pkgbase') - packages = packages.exclude(pkgbase__in=inner_q) - elif form.cleaned_data['maintainer']: - inner_q = PackageRelation.objects.filter( - user__username=form.cleaned_data['maintainer']).values('pkgbase') - packages = packages.filter(pkgbase__in=inner_q) - - if form.cleaned_data['packager'] == 'unknown': - packages = packages.filter(packager__isnull=True) - elif form.cleaned_data['packager']: - packages = packages.filter( - packager__username=form.cleaned_data['packager']) - - if form.cleaned_data['flagged'] == 'Flagged': - packages = packages.filter(flag_date__isnull=False) - elif form.cleaned_data['flagged'] == 'Not Flagged': - packages = packages.filter(flag_date__isnull=True) - - if form.cleaned_data['signed'] == 'Signed': - packages = packages.filter(pgp_signature__isnull=False) - elif form.cleaned_data['signed'] == 'Unsigned': - packages = packages.filter(pgp_signature__isnull=True) - - if form.cleaned_data['last_update']: - lu = form.cleaned_data['last_update'] - packages = packages.filter(last_update__gte= - datetime(lu.year, lu.month, lu.day, 0, 0)) - - if form.cleaned_data['name']: - name = form.cleaned_data['name'] - packages = packages.filter(pkgname__icontains=name) - - if form.cleaned_data['desc']: - desc = form.cleaned_data['desc'] - packages = packages.filter(pkgdesc__icontains=desc) - - if form.cleaned_data['q']: - query = form.cleaned_data['q'] - q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) - packages = packages.filter(q) - - asked_limit = form.cleaned_data['limit'] - if asked_limit and asked_limit < 0: - limit = None - elif asked_limit: - limit = asked_limit - else: - # Form had errors, don't return any results, just the busted form - packages = Package.objects.none() - else: - form = PackageSearchForm() - - current_query = request.GET.urlencode() - page_dict = { - 'search_form': form, - 'current_query': current_query - } - allowed_sort = ["arch", "repo", "pkgname", "pkgbase", - "compressed_size", "installed_size", - "build_date", "last_update", "flag_date"] - allowed_sort += ["-" + s for s in allowed_sort] - sort = request.GET.get('sort', None) - if sort in allowed_sort: - packages = packages.order_by(sort) - page_dict['sort'] = sort - else: - packages = packages.order_by('pkgname') - - return list_detail.object_list(request, packages, - template_name="packages/search.html", - page=page, - paginate_by=limit, - template_object_name="package", - extra_context=page_dict) - -@vary_on_headers('X-Requested-With') -def files(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') - context = { - 'pkg': pkg, - 'files': fileslist, - } - template = 'packages/files.html' - if request.is_ajax(): - template = 'packages/files-list.html' - return direct_to_template(request, template, context) - -def details_json(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - to_json = simplejson.dumps(pkg, ensure_ascii=False, - cls=PackageJSONEncoder) - return HttpResponse(to_json, mimetype='application/json') - -def files_json(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') - data = { - 'pkgname': pkg.pkgname, - 'repo': pkg.repo.name.lower(), - 'arch': pkg.arch.name.lower(), - 'files': fileslist, - } - to_json = simplejson.dumps(data, ensure_ascii=False, - cls=PackageJSONEncoder) - return HttpResponse(to_json, mimetype='application/json') - -@permission_required('main.change_package') -def unflag(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - pkg.flag_date = None - pkg.save() - return redirect(pkg) - -@permission_required('main.change_package') -def unflag_all(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.filter(pkgbase=pkg.pkgbase, - repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging) - pkgs.update(flag_date=None) - return redirect(pkg) - - -@permission_required('main.change_package') -@never_cache -def signoffs(request): - signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) - for group in signoff_groups: - group.user = request.user - - context = { - 'signoff_groups': signoff_groups, - 'arches': Arch.objects.all(), - 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), - } - return direct_to_template(request, 'packages/signoffs.html', context) - -@permission_required('main.change_package') -@never_cache -def signoff_package(request, name, repo, arch, revoke=False): - packages = get_list_or_404(Package, pkgbase=name, - arch__name=arch, repo__name__iexact=repo, repo__testing=True) - package = packages[0] - - spec = SignoffSpecification.objects.get_or_default_from_package(package) - - if revoke: - try: - signoff = Signoff.objects.get_from_package( - package, request.user, False) - except Signoff.DoesNotExist: - raise Http404 - signoff.revoked = datetime.utcnow() - signoff.save() - created = False - else: - # ensure we should even be accepting signoffs - if spec.known_bad or not spec.enabled: - return render(request, '403.html', status=403) - signoff, created = Signoff.objects.get_or_create_from_package( - package, request.user) - - all_signoffs = Signoff.objects.for_package(package) - - if request.is_ajax(): - data = { - 'created': created, - 'revoked': bool(signoff.revoked), - 'approved': approved_by_signoffs(all_signoffs, spec), - 'required': spec.required, - 'enabled': spec.enabled, - 'known_bad': spec.known_bad, - 'user': str(request.user), - } - return HttpResponse(simplejson.dumps(data, ensure_ascii=False), - mimetype='application/json') - - return redirect('package-signoffs') - -class SignoffOptionsForm(forms.ModelForm): - apply_all = forms.BooleanField(required=False, - help_text="Apply these options to all architectures?") - - class Meta: - model = SignoffSpecification - fields = ('required', 'enabled', 'known_bad', 'comments') - -def _signoff_options_all(request, name, repo): - seen_ids = set() - with transaction.commit_on_success(): - # find or create a specification for all architectures, then - # graft the form data onto them - packages = Package.objects.filter(pkgbase=name, - repo__name__iexact=repo, repo__testing=True) - for package in packages: - try: - spec = SignoffSpecification.objects.get_from_package(package) - if spec.pk in seen_ids: - continue - except SignoffSpecification.DoesNotExist: - spec = SignoffSpecification(pkgbase=package.pkgbase, - pkgver=package.pkgver, pkgrel=package.pkgrel, - epoch=package.epoch, arch=package.arch, - repo=package.repo) - spec.user = request.user - form = SignoffOptionsForm(request.POST, instance=spec) - if form.is_valid(): - form.save() - seen_ids.add(form.instance.pk) - -@permission_required('main.change_package') -@never_cache -def signoff_options(request, name, repo, arch): - packages = get_list_or_404(Package, pkgbase=name, - arch__name=arch, repo__name__iexact=repo, repo__testing=True) - package = packages[0] - - if request.user != package.packager and \ - request.user not in package.maintainers: - return render(request, '403.html', status=403) - - try: - spec = SignoffSpecification.objects.get_from_package(package) - except SignoffSpecification.DoesNotExist: - # create a fake one, but don't save it just yet - spec = SignoffSpecification(pkgbase=package.pkgbase, - pkgver=package.pkgver, pkgrel=package.pkgrel, - epoch=package.epoch, arch=package.arch, repo=package.repo) - spec.user = request.user - - if request.POST: - form = SignoffOptionsForm(request.POST, instance=spec) - if form.is_valid(): - if form.cleaned_data['apply_all']: - _signoff_options_all(request, name, repo) - else: - form.save() - return redirect('package-signoffs') - else: - form = SignoffOptionsForm(instance=spec) - - context = { - 'packages': packages, - 'package': package, - 'form': form, - } - return direct_to_template(request, 'packages/signoff_options.html', context) - -class SignoffJSONEncoder(DjangoJSONEncoder): - '''Base JSONEncoder extended to handle all serialization of all classes - related to signoffs.''' - signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager', - 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version'] - signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments'] - signoff_attrs = ['user', 'created', 'revoked'] - - def default(self, obj): - if isinstance(obj, PackageSignoffGroup): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_group_attrs) - data['package_count'] = len(obj.packages) - data['approved'] = obj.approved() - data.update((attr, getattr(obj.specification, attr)) - for attr in self.signoff_spec_attrs) - return data - elif isinstance(obj, Signoff): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_attrs) - return data - elif isinstance(obj, Arch) or isinstance(obj, Repo): - return unicode(obj) - elif isinstance(obj, User): - return obj.username - elif isinstance(obj, set): - return list(obj) - return super(SignoffJSONEncoder, self).default(obj) - -@permission_required('main.change_package') -@never_cache -def signoffs_json(request): - signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) - data = { - 'version': 1, - 'signoff_groups': signoff_groups, - } - to_json = simplejson.dumps(data, ensure_ascii=False, - cls=SignoffJSONEncoder) - response = HttpResponse(to_json, mimetype='application/json') - return response - - -def flaghelp(request): - return direct_to_template(request, 'packages/flaghelp.html') - -class FlagForm(forms.Form): - email = forms.EmailField(label='* E-mail Address') - usermessage = forms.CharField(label='Message To Dev', - widget=forms.Textarea, required=False) - # The field below is used to filter out bots that blindly fill out all input elements - website = forms.CharField(label='', - widget=forms.TextInput(attrs={'style': 'display:none;'}), - required=False) - -@never_cache -def flag(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - if pkg.flag_date is not None: - # already flagged. do nothing. - return direct_to_template(request, 'packages/flagged.html', {'pkg': pkg}) - # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.normal().filter( - pkgbase=pkg.pkgbase, flag_date__isnull=True, - repo__testing=pkg.repo.testing, - repo__staging=pkg.repo.staging).order_by( - 'pkgname', 'repo__name', 'arch__name') - - if request.POST: - form = FlagForm(request.POST) - if form.is_valid() and form.cleaned_data['website'] == '': - # save the package list for later use - flagged_pkgs = list(pkgs) - pkgs.update(flag_date=datetime.utcnow()) - - maints = pkg.maintainers - if not maints: - toemail = settings.NOTIFICATIONS - subject = 'Orphan %s package [%s] marked out-of-date' % \ - (pkg.repo.name, pkg.pkgname) - else: - toemail = [] - subject = '%s package [%s] marked out-of-date' % \ - (pkg.repo.name, pkg.pkgname) - for maint in maints: - if maint.get_profile().notify == True: - toemail.append(maint.email) - - if toemail: - # send notification email to the maintainers - t = loader.get_template('packages/outofdate.txt') - c = Context({ - 'email': form.cleaned_data['email'], - 'message': form.cleaned_data['usermessage'], - 'pkg': pkg, - 'packages': flagged_pkgs, - }) - send_mail(subject, - t.render(c), - 'Arch Website Notification <nobody@archlinux.org>', - toemail, - fail_silently=True) - - return redirect('package-flag-confirmed', name=name, repo=repo, - arch=arch) - else: - form = FlagForm() - - context = { - 'package': pkg, - 'packages': pkgs, - 'form': form - } - return direct_to_template(request, 'packages/flag.html', context) - -def flag_confirmed(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - pkgs = Package.objects.normal().filter( - pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, - repo__testing=pkg.repo.testing, - repo__staging=pkg.repo.staging).order_by( - 'pkgname', 'repo__name', 'arch__name') - - context = {'package': pkg, 'packages': pkgs} - - return direct_to_template(request, 'packages/flag_confirmed.html', context) - -def download(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - mirror_urls = MirrorUrl.objects.filter( - mirror__public=True, mirror__active=True, - protocol__protocol__iexact='HTTP') - # look first for an 'Any' URL, then fall back to any HTTP URL - filtered_urls = mirror_urls.filter(mirror__country='Any')[:1] - if not filtered_urls: - filtered_urls = mirror_urls[:1] - if not filtered_urls: - raise Http404 - arch = pkg.arch.name - if pkg.arch.agnostic: - # grab the first non-any arch to fake the download path - arch = Arch.objects.exclude(agnostic=True)[0].name - values = { - 'host': filtered_urls[0].url, - 'arch': arch, - 'repo': pkg.repo.name.lower(), - 'file': pkg.filename, - } - url = Template('${host}${repo}/os/${arch}/${file}').substitute(values) - return redirect(url) - -def arch_differences(request): - # TODO: we have some hardcoded magic here with respect to the arches. - arch_a = Arch.objects.get(name='i686') - arch_b = Arch.objects.get(name='x86_64') - differences = get_differences_info(arch_a, arch_b) - context = { - 'arch_a': arch_a, - 'arch_b': arch_b, - 'differences': differences, - } - return direct_to_template(request, 'packages/differences.html', context) - -@permission_required('main.change_package') -@never_cache -def stale_relations(request): - relations = PackageRelation.objects.select_related('user') - pkgbases = Package.objects.all().values('pkgbase') - - inactive_user = relations.filter(user__is_active=False) - missing_pkgbase = relations.exclude( - pkgbase__in=pkgbases).order_by('pkgbase') - wrong_permissions = get_wrong_permissions() - - context = { - 'inactive_user': inactive_user, - 'missing_pkgbase': missing_pkgbase, - 'wrong_permissions': wrong_permissions, - } - return direct_to_template(request, 'packages/stale_relations.html', context) - -@permission_required('packages.delete_packagerelation') -@require_POST -def stale_relations_update(request): - ids = set(request.POST.getlist('relation_id')) - - if ids: - PackageRelation.objects.filter(id__in=ids).delete() - - messages.info(request, "%d package relations deleted." % len(ids)) - return redirect('/packages/stale_relations/') - -# vim: set ts=4 sw=4 et: diff --git a/packages/views/__init__.py b/packages/views/__init__.py new file mode 100644 index 00000000..7b8e4847 --- /dev/null +++ b/packages/views/__init__.py @@ -0,0 +1,697 @@ +from django import forms +from django.contrib import messages +from django.contrib.admin.widgets import AdminDateWidget +from django.contrib.auth.models import User +from django.contrib.auth.decorators import permission_required +from django.conf import settings +from django.core.mail import send_mail +from django.core.serializers.json import DjangoJSONEncoder +from django.db import transaction +from django.db.models import Q +from django.http import HttpResponse, Http404, HttpResponseForbidden +from django.shortcuts import (get_object_or_404, get_list_or_404, + redirect, render) +from django.template import loader, Context +from django.utils import simplejson +from django.views.decorators.cache import never_cache +from django.views.decorators.http import require_POST +from django.views.decorators.vary import vary_on_headers +from django.views.generic import list_detail +from django.views.generic.simple import direct_to_template + +from datetime import datetime +from operator import attrgetter +from string import Template +from urllib import urlencode + +from main.models import Package, PackageFile, Arch, Repo +from main.utils import make_choice +from mirrors.models import MirrorUrl +from ..models import (PackageRelation, PackageGroup, + SignoffSpecification, Signoff) +from ..utils import (get_group_info, get_differences_info, + get_wrong_permissions, get_signoff_groups, approved_by_signoffs, + PackageSignoffGroup) + +class PackageJSONEncoder(DjangoJSONEncoder): + pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', + 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size', + 'installed_size', 'build_date', 'last_update', 'flag_date' ] + + def default(self, obj): + if hasattr(obj, '__iter__'): + # mainly for queryset serialization + return list(obj) + if isinstance(obj, Package): + data = dict((attr, getattr(obj, attr)) + for attr in self.pkg_attributes) + data['groups'] = obj.groups.all() + return data + if isinstance(obj, PackageFile): + filename = obj.filename or '' + return obj.directory + filename + if isinstance(obj, (Repo, Arch, PackageGroup)): + return obj.name.lower() + return super(PackageJSONEncoder, self).default(obj) + +def opensearch(request): + if request.is_secure(): + domain = "https://%s" % request.META['HTTP_HOST'] + else: + domain = "http://%s" % request.META['HTTP_HOST'] + + return direct_to_template(request, 'packages/opensearch.xml', + {'domain': domain}, + mimetype='application/opensearchdescription+xml') + +@permission_required('main.change_package') +@require_POST +def update(request): + ids = request.POST.getlist('pkgid') + count = 0 + + if request.POST.has_key('adopt'): + repos = request.user.userprofile.allowed_repos.all() + pkgs = Package.objects.filter(id__in=ids, repo__in=repos) + disallowed_pkgs = Package.objects.filter(id__in=ids).exclude( + repo__in=repos) + + if disallowed_pkgs: + messages.warning(request, + "You do not have permission to adopt: %s." % ( + ' '.join([p.pkgname for p in disallowed_pkgs]) + )) + + for pkg in pkgs: + if request.user not in pkg.maintainers: + prel = PackageRelation(pkgbase=pkg.pkgbase, + user=request.user, + type=PackageRelation.MAINTAINER) + count += 1 + prel.save() + + messages.info(request, "%d base packages adopted." % count) + + elif request.POST.has_key('disown'): + # allow disowning regardless of allowed repos, helps things like + # [community] -> [extra] moves + for pkg in Package.objects.filter(id__in=ids): + if request.user in pkg.maintainers: + rels = PackageRelation.objects.filter(pkgbase=pkg.pkgbase, + user=request.user, + type=PackageRelation.MAINTAINER) + count += rels.count() + rels.delete() + + messages.info(request, "%d base packages disowned." % count) + + else: + messages.error(request, "Are you trying to adopt or disown?") + return redirect('/packages/') + +def details(request, name='', repo='', arch=''): + if all([name, repo, arch]): + try: + pkg = Package.objects.select_related( + 'arch', 'repo', 'packager').get(pkgname=name, + repo__name__iexact=repo, arch__name=arch) + return direct_to_template(request, 'packages/details.html', + {'pkg': pkg, }) + except Package.DoesNotExist: + arch = get_object_or_404(Arch, name=arch) + arches = [ arch ] + arches.extend(Arch.objects.filter(agnostic=True)) + repo = get_object_or_404(Repo, name__iexact=repo) + pkgs = Package.objects.normal().filter(pkgbase=name, + repo__testing=repo.testing, repo__staging=repo.staging, + arch__in=arches).order_by('pkgname') + if len(pkgs) == 0: + raise Http404 + context = { + 'list_title': 'Split Package Details', + 'name': name, + 'arch': arch, + 'packages': pkgs, + } + return direct_to_template(request, 'packages/packages_list.html', + context) + else: + pkg_data = [ + ('arch', arch.lower()), + ('repo', repo.lower()), + ('q', name), + ] + # only include non-blank values in the query we generate + pkg_data = [(x, y) for x, y in pkg_data if y] + return redirect("/packages/?%s" % urlencode(pkg_data)) + +def groups(request, arch=None): + arches = [] + if arch: + get_object_or_404(Arch, name=arch, agnostic=False) + arches.append(arch) + grps = get_group_info(arches) + context = { + 'groups': grps, + 'arch': arch, + } + return direct_to_template(request, 'packages/groups.html', context) + +def group_details(request, arch, name): + arch = get_object_or_404(Arch, name=arch) + arches = [ arch ] + arches.extend(Arch.objects.filter(agnostic=True)) + pkgs = Package.objects.normal().filter( + groups__name=name, arch__in=arches).order_by('pkgname') + if len(pkgs) == 0: + raise Http404 + context = { + 'list_title': 'Group Details', + 'name': name, + 'arch': arch, + 'packages': pkgs, + } + return direct_to_template(request, 'packages/packages_list.html', context) + +def coerce_limit_value(value): + if not value: + return None + if value == 'all': + # negative value indicates show all results + return -1 + value = int(value) + if value < 0: + raise ValueError + return value + +class LimitTypedChoiceField(forms.TypedChoiceField): + def valid_value(self, value): + try: + coerce_limit_value(value) + return True + except (ValueError, TypeError): + return False + +class PackageSearchForm(forms.Form): + repo = forms.MultipleChoiceField(required=False) + arch = forms.MultipleChoiceField(required=False) + name = forms.CharField(required=False) + desc = forms.CharField(required=False) + q = forms.CharField(required=False) + maintainer = forms.ChoiceField(required=False) + packager = forms.ChoiceField(required=False) + last_update = forms.DateField(required=False, widget=AdminDateWidget(), + label='Last Updated After') + flagged = forms.ChoiceField( + choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']), + required=False) + signed = forms.ChoiceField( + choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']), + required=False) + limit = LimitTypedChoiceField( + choices=make_choice([50, 100, 250]) + [('all', 'All')], + coerce=coerce_limit_value, + required=False, + initial=50) + + def __init__(self, *args, **kwargs): + super(PackageSearchForm, self).__init__(*args, **kwargs) + self.fields['repo'].choices = make_choice( + [repo.name for repo in Repo.objects.all()]) + self.fields['arch'].choices = make_choice( + [arch.name for arch in Arch.objects.all()]) + self.fields['q'].widget.attrs.update({"size": "30"}) + maints = User.objects.filter(is_active=True).order_by('username') + self.fields['maintainer'].choices = \ + [('', 'All'), ('orphan', 'Orphan')] + \ + [(m.username, m.get_full_name()) for m in maints] + self.fields['packager'].choices = \ + [('', 'All'), ('unknown', 'Unknown')] + \ + [(m.username, m.get_full_name()) for m in maints] + +def search(request, page=None): + limit = 50 + packages = Package.objects.normal() + + if request.GET: + form = PackageSearchForm(data=request.GET) + if form.is_valid(): + if form.cleaned_data['repo']: + packages = packages.filter( + repo__name__in=form.cleaned_data['repo']) + + if form.cleaned_data['arch']: + packages = packages.filter( + arch__name__in=form.cleaned_data['arch']) + + if form.cleaned_data['maintainer'] == 'orphan': + inner_q = PackageRelation.objects.all().values('pkgbase') + packages = packages.exclude(pkgbase__in=inner_q) + elif form.cleaned_data['maintainer']: + inner_q = PackageRelation.objects.filter( + user__username=form.cleaned_data['maintainer']).values('pkgbase') + packages = packages.filter(pkgbase__in=inner_q) + + if form.cleaned_data['packager'] == 'unknown': + packages = packages.filter(packager__isnull=True) + elif form.cleaned_data['packager']: + packages = packages.filter( + packager__username=form.cleaned_data['packager']) + + if form.cleaned_data['flagged'] == 'Flagged': + packages = packages.filter(flag_date__isnull=False) + elif form.cleaned_data['flagged'] == 'Not Flagged': + packages = packages.filter(flag_date__isnull=True) + + if form.cleaned_data['signed'] == 'Signed': + packages = packages.filter(pgp_signature__isnull=False) + elif form.cleaned_data['signed'] == 'Unsigned': + packages = packages.filter(pgp_signature__isnull=True) + + if form.cleaned_data['last_update']: + lu = form.cleaned_data['last_update'] + packages = packages.filter(last_update__gte= + datetime(lu.year, lu.month, lu.day, 0, 0)) + + if form.cleaned_data['name']: + name = form.cleaned_data['name'] + packages = packages.filter(pkgname__icontains=name) + + if form.cleaned_data['desc']: + desc = form.cleaned_data['desc'] + packages = packages.filter(pkgdesc__icontains=desc) + + if form.cleaned_data['q']: + query = form.cleaned_data['q'] + q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) + packages = packages.filter(q) + + asked_limit = form.cleaned_data['limit'] + if asked_limit and asked_limit < 0: + limit = None + elif asked_limit: + limit = asked_limit + else: + # Form had errors, don't return any results, just the busted form + packages = Package.objects.none() + else: + form = PackageSearchForm() + + current_query = request.GET.urlencode() + page_dict = { + 'search_form': form, + 'current_query': current_query + } + allowed_sort = ["arch", "repo", "pkgname", "pkgbase", + "compressed_size", "installed_size", + "build_date", "last_update", "flag_date"] + allowed_sort += ["-" + s for s in allowed_sort] + sort = request.GET.get('sort', None) + if sort in allowed_sort: + packages = packages.order_by(sort) + page_dict['sort'] = sort + else: + packages = packages.order_by('pkgname') + + return list_detail.object_list(request, packages, + template_name="packages/search.html", + page=page, + paginate_by=limit, + template_object_name="package", + extra_context=page_dict) + +@vary_on_headers('X-Requested-With') +def files(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') + context = { + 'pkg': pkg, + 'files': fileslist, + } + template = 'packages/files.html' + if request.is_ajax(): + template = 'packages/files-list.html' + return direct_to_template(request, template, context) + +def details_json(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + to_json = simplejson.dumps(pkg, ensure_ascii=False, + cls=PackageJSONEncoder) + return HttpResponse(to_json, mimetype='application/json') + +def files_json(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + fileslist = PackageFile.objects.filter(pkg=pkg).order_by('directory', 'filename') + data = { + 'pkgname': pkg.pkgname, + 'repo': pkg.repo.name.lower(), + 'arch': pkg.arch.name.lower(), + 'files': fileslist, + } + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=PackageJSONEncoder) + return HttpResponse(to_json, mimetype='application/json') + +@permission_required('main.change_package') +def unflag(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + pkg.flag_date = None + pkg.save() + return redirect(pkg) + +@permission_required('main.change_package') +def unflag_all(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + # find all packages from (hopefully) the same PKGBUILD + pkgs = Package.objects.filter(pkgbase=pkg.pkgbase, + repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging) + pkgs.update(flag_date=None) + return redirect(pkg) + + +@permission_required('main.change_package') +@never_cache +def signoffs(request): + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + for group in signoff_groups: + group.user = request.user + + context = { + 'signoff_groups': signoff_groups, + 'arches': Arch.objects.all(), + 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), + } + return direct_to_template(request, 'packages/signoffs.html', context) + +@permission_required('main.change_package') +@never_cache +def signoff_package(request, name, repo, arch, revoke=False): + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + package = packages[0] + + spec = SignoffSpecification.objects.get_or_default_from_package(package) + + if revoke: + try: + signoff = Signoff.objects.get_from_package( + package, request.user, False) + except Signoff.DoesNotExist: + raise Http404 + signoff.revoked = datetime.utcnow() + signoff.save() + created = False + else: + # ensure we should even be accepting signoffs + if spec.known_bad or not spec.enabled: + return render(request, '403.html', status=403) + signoff, created = Signoff.objects.get_or_create_from_package( + package, request.user) + + all_signoffs = Signoff.objects.for_package(package) + + if request.is_ajax(): + data = { + 'created': created, + 'revoked': bool(signoff.revoked), + 'approved': approved_by_signoffs(all_signoffs, spec), + 'required': spec.required, + 'enabled': spec.enabled, + 'known_bad': spec.known_bad, + 'user': str(request.user), + } + return HttpResponse(simplejson.dumps(data, ensure_ascii=False), + mimetype='application/json') + + return redirect('package-signoffs') + +class SignoffOptionsForm(forms.ModelForm): + apply_all = forms.BooleanField(required=False, + help_text="Apply these options to all architectures?") + + class Meta: + model = SignoffSpecification + fields = ('required', 'enabled', 'known_bad', 'comments') + +def _signoff_options_all(request, name, repo): + seen_ids = set() + with transaction.commit_on_success(): + # find or create a specification for all architectures, then + # graft the form data onto them + packages = Package.objects.filter(pkgbase=name, + repo__name__iexact=repo, repo__testing=True) + for package in packages: + try: + spec = SignoffSpecification.objects.get_from_package(package) + if spec.pk in seen_ids: + continue + except SignoffSpecification.DoesNotExist: + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, + repo=package.repo) + spec.user = request.user + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + form.save() + seen_ids.add(form.instance.pk) + +@permission_required('main.change_package') +@never_cache +def signoff_options(request, name, repo, arch): + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + package = packages[0] + + if request.user != package.packager and \ + request.user not in package.maintainers: + return render(request, '403.html', status=403) + + try: + spec = SignoffSpecification.objects.get_from_package(package) + except SignoffSpecification.DoesNotExist: + # create a fake one, but don't save it just yet + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, repo=package.repo) + spec.user = request.user + + if request.POST: + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + if form.cleaned_data['apply_all']: + _signoff_options_all(request, name, repo) + else: + form.save() + return redirect('package-signoffs') + else: + form = SignoffOptionsForm(instance=spec) + + context = { + 'packages': packages, + 'package': package, + 'form': form, + } + return direct_to_template(request, 'packages/signoff_options.html', context) + +class SignoffJSONEncoder(DjangoJSONEncoder): + '''Base JSONEncoder extended to handle all serialization of all classes + related to signoffs.''' + signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager', + 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version'] + signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments'] + signoff_attrs = ['user', 'created', 'revoked'] + + def default(self, obj): + if isinstance(obj, PackageSignoffGroup): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_group_attrs) + data['package_count'] = len(obj.packages) + data['approved'] = obj.approved() + data.update((attr, getattr(obj.specification, attr)) + for attr in self.signoff_spec_attrs) + return data + elif isinstance(obj, Signoff): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_attrs) + return data + elif isinstance(obj, Arch) or isinstance(obj, Repo): + return unicode(obj) + elif isinstance(obj, User): + return obj.username + elif isinstance(obj, set): + return list(obj) + return super(SignoffJSONEncoder, self).default(obj) + +@permission_required('main.change_package') +@never_cache +def signoffs_json(request): + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + data = { + 'version': 1, + 'signoff_groups': signoff_groups, + } + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=SignoffJSONEncoder) + response = HttpResponse(to_json, mimetype='application/json') + return response + + +def flaghelp(request): + return direct_to_template(request, 'packages/flaghelp.html') + +class FlagForm(forms.Form): + email = forms.EmailField(label='* E-mail Address') + usermessage = forms.CharField(label='Message To Dev', + widget=forms.Textarea, required=False) + # The field below is used to filter out bots that blindly fill out all input elements + website = forms.CharField(label='', + widget=forms.TextInput(attrs={'style': 'display:none;'}), + required=False) + +@never_cache +def flag(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + if pkg.flag_date is not None: + # already flagged. do nothing. + return direct_to_template(request, 'packages/flagged.html', {'pkg': pkg}) + # find all packages from (hopefully) the same PKGBUILD + pkgs = Package.objects.normal().filter( + pkgbase=pkg.pkgbase, flag_date__isnull=True, + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( + 'pkgname', 'repo__name', 'arch__name') + + if request.POST: + form = FlagForm(request.POST) + if form.is_valid() and form.cleaned_data['website'] == '': + # save the package list for later use + flagged_pkgs = list(pkgs) + pkgs.update(flag_date=datetime.utcnow()) + + maints = pkg.maintainers + if not maints: + toemail = settings.NOTIFICATIONS + subject = 'Orphan %s package [%s] marked out-of-date' % \ + (pkg.repo.name, pkg.pkgname) + else: + toemail = [] + subject = '%s package [%s] marked out-of-date' % \ + (pkg.repo.name, pkg.pkgname) + for maint in maints: + if maint.get_profile().notify == True: + toemail.append(maint.email) + + if toemail: + # send notification email to the maintainers + t = loader.get_template('packages/outofdate.txt') + c = Context({ + 'email': form.cleaned_data['email'], + 'message': form.cleaned_data['usermessage'], + 'pkg': pkg, + 'packages': flagged_pkgs, + }) + send_mail(subject, + t.render(c), + 'Arch Website Notification <nobody@archlinux.org>', + toemail, + fail_silently=True) + + return redirect('package-flag-confirmed', name=name, repo=repo, + arch=arch) + else: + form = FlagForm() + + context = { + 'package': pkg, + 'packages': pkgs, + 'form': form + } + return direct_to_template(request, 'packages/flag.html', context) + +def flag_confirmed(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + pkgs = Package.objects.normal().filter( + pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( + 'pkgname', 'repo__name', 'arch__name') + + context = {'package': pkg, 'packages': pkgs} + + return direct_to_template(request, 'packages/flag_confirmed.html', context) + +def download(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + mirror_urls = MirrorUrl.objects.filter( + mirror__public=True, mirror__active=True, + protocol__protocol__iexact='HTTP') + # look first for an 'Any' URL, then fall back to any HTTP URL + filtered_urls = mirror_urls.filter(mirror__country='Any')[:1] + if not filtered_urls: + filtered_urls = mirror_urls[:1] + if not filtered_urls: + raise Http404 + arch = pkg.arch.name + if pkg.arch.agnostic: + # grab the first non-any arch to fake the download path + arch = Arch.objects.exclude(agnostic=True)[0].name + values = { + 'host': filtered_urls[0].url, + 'arch': arch, + 'repo': pkg.repo.name.lower(), + 'file': pkg.filename, + } + url = Template('${host}${repo}/os/${arch}/${file}').substitute(values) + return redirect(url) + +def arch_differences(request): + # TODO: we have some hardcoded magic here with respect to the arches. + arch_a = Arch.objects.get(name='i686') + arch_b = Arch.objects.get(name='x86_64') + differences = get_differences_info(arch_a, arch_b) + context = { + 'arch_a': arch_a, + 'arch_b': arch_b, + 'differences': differences, + } + return direct_to_template(request, 'packages/differences.html', context) + +@permission_required('main.change_package') +@never_cache +def stale_relations(request): + relations = PackageRelation.objects.select_related('user') + pkgbases = Package.objects.all().values('pkgbase') + + inactive_user = relations.filter(user__is_active=False) + missing_pkgbase = relations.exclude( + pkgbase__in=pkgbases).order_by('pkgbase') + wrong_permissions = get_wrong_permissions() + + context = { + 'inactive_user': inactive_user, + 'missing_pkgbase': missing_pkgbase, + 'wrong_permissions': wrong_permissions, + } + return direct_to_template(request, 'packages/stale_relations.html', context) + +@permission_required('packages.delete_packagerelation') +@require_POST +def stale_relations_update(request): + ids = set(request.POST.getlist('relation_id')) + + if ids: + PackageRelation.objects.filter(id__in=ids).delete() + + messages.info(request, "%d package relations deleted." % len(ids)) + return redirect('/packages/stale_relations/') + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From a1ef52f87fb7c2fa976431393769c9c4ec88ba22 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 18:25:25 -0600 Subject: packages/views: move flag-related views to own module One step in splitting the package views. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views/__init__.py | 110 ++--------------------------------------- packages/views/flag.py | 121 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 107 deletions(-) create mode 100644 packages/views/flag.py diff --git a/packages/views/__init__.py b/packages/views/__init__.py index 7b8e4847..4782e457 100644 --- a/packages/views/__init__.py +++ b/packages/views/__init__.py @@ -3,15 +3,12 @@ from django.contrib.admin.widgets import AdminDateWidget from django.contrib.auth.models import User from django.contrib.auth.decorators import permission_required -from django.conf import settings -from django.core.mail import send_mail from django.core.serializers.json import DjangoJSONEncoder from django.db import transaction from django.db.models import Q from django.http import HttpResponse, Http404, HttpResponseForbidden from django.shortcuts import (get_object_or_404, get_list_or_404, redirect, render) -from django.template import loader, Context from django.utils import simplejson from django.views.decorators.cache import never_cache from django.views.decorators.http import require_POST @@ -33,6 +30,9 @@ get_wrong_permissions, get_signoff_groups, approved_by_signoffs, PackageSignoffGroup) +# make other views available from this same package +from .flag import flaghelp, flag, flag_confirmed, unflag, unflag_all + class PackageJSONEncoder(DjangoJSONEncoder): pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', 'pkgrel', 'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size', @@ -355,24 +355,6 @@ def files_json(request, name, repo, arch): cls=PackageJSONEncoder) return HttpResponse(to_json, mimetype='application/json') -@permission_required('main.change_package') -def unflag(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - pkg.flag_date = None - pkg.save() - return redirect(pkg) - -@permission_required('main.change_package') -def unflag_all(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.filter(pkgbase=pkg.pkgbase, - repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging) - pkgs.update(flag_date=None) - return redirect(pkg) - @permission_required('main.change_package') @never_cache @@ -542,92 +524,6 @@ def signoffs_json(request): return response -def flaghelp(request): - return direct_to_template(request, 'packages/flaghelp.html') - -class FlagForm(forms.Form): - email = forms.EmailField(label='* E-mail Address') - usermessage = forms.CharField(label='Message To Dev', - widget=forms.Textarea, required=False) - # The field below is used to filter out bots that blindly fill out all input elements - website = forms.CharField(label='', - widget=forms.TextInput(attrs={'style': 'display:none;'}), - required=False) - -@never_cache -def flag(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - if pkg.flag_date is not None: - # already flagged. do nothing. - return direct_to_template(request, 'packages/flagged.html', {'pkg': pkg}) - # find all packages from (hopefully) the same PKGBUILD - pkgs = Package.objects.normal().filter( - pkgbase=pkg.pkgbase, flag_date__isnull=True, - repo__testing=pkg.repo.testing, - repo__staging=pkg.repo.staging).order_by( - 'pkgname', 'repo__name', 'arch__name') - - if request.POST: - form = FlagForm(request.POST) - if form.is_valid() and form.cleaned_data['website'] == '': - # save the package list for later use - flagged_pkgs = list(pkgs) - pkgs.update(flag_date=datetime.utcnow()) - - maints = pkg.maintainers - if not maints: - toemail = settings.NOTIFICATIONS - subject = 'Orphan %s package [%s] marked out-of-date' % \ - (pkg.repo.name, pkg.pkgname) - else: - toemail = [] - subject = '%s package [%s] marked out-of-date' % \ - (pkg.repo.name, pkg.pkgname) - for maint in maints: - if maint.get_profile().notify == True: - toemail.append(maint.email) - - if toemail: - # send notification email to the maintainers - t = loader.get_template('packages/outofdate.txt') - c = Context({ - 'email': form.cleaned_data['email'], - 'message': form.cleaned_data['usermessage'], - 'pkg': pkg, - 'packages': flagged_pkgs, - }) - send_mail(subject, - t.render(c), - 'Arch Website Notification <nobody@archlinux.org>', - toemail, - fail_silently=True) - - return redirect('package-flag-confirmed', name=name, repo=repo, - arch=arch) - else: - form = FlagForm() - - context = { - 'package': pkg, - 'packages': pkgs, - 'form': form - } - return direct_to_template(request, 'packages/flag.html', context) - -def flag_confirmed(request, name, repo, arch): - pkg = get_object_or_404(Package, - pkgname=name, repo__name__iexact=repo, arch__name=arch) - pkgs = Package.objects.normal().filter( - pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, - repo__testing=pkg.repo.testing, - repo__staging=pkg.repo.staging).order_by( - 'pkgname', 'repo__name', 'arch__name') - - context = {'package': pkg, 'packages': pkgs} - - return direct_to_template(request, 'packages/flag_confirmed.html', context) - def download(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) diff --git a/packages/views/flag.py b/packages/views/flag.py new file mode 100644 index 00000000..7e9d87c7 --- /dev/null +++ b/packages/views/flag.py @@ -0,0 +1,121 @@ +from datetime import datetime + +from django import forms +from django.conf import settings +from django.contrib.auth.decorators import permission_required +from django.core.mail import send_mail +from django.shortcuts import get_object_or_404, redirect +from django.template import loader, Context +from django.views.generic.simple import direct_to_template +from django.views.decorators.cache import never_cache + +from main.models import Package + + +def flaghelp(request): + return direct_to_template(request, 'packages/flaghelp.html') + +class FlagForm(forms.Form): + email = forms.EmailField(label='* E-mail Address') + usermessage = forms.CharField(label='Message To Dev', + widget=forms.Textarea, required=False) + # The field below is used to filter out bots that blindly fill out all + # input elements + website = forms.CharField(label='', + widget=forms.TextInput(attrs={'style': 'display:none;'}), + required=False) + +@never_cache +def flag(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + if pkg.flag_date is not None: + # already flagged. do nothing. + return direct_to_template(request, 'packages/flagged.html', + {'pkg': pkg}) + # find all packages from (hopefully) the same PKGBUILD + pkgs = Package.objects.normal().filter( + pkgbase=pkg.pkgbase, flag_date__isnull=True, + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( + 'pkgname', 'repo__name', 'arch__name') + + if request.POST: + form = FlagForm(request.POST) + if form.is_valid() and form.cleaned_data['website'] == '': + # save the package list for later use + flagged_pkgs = list(pkgs) + pkgs.update(flag_date=datetime.utcnow()) + + maints = pkg.maintainers + if not maints: + toemail = settings.NOTIFICATIONS + subject = 'Orphan %s package [%s] marked out-of-date' % \ + (pkg.repo.name, pkg.pkgname) + else: + toemail = [] + subject = '%s package [%s] marked out-of-date' % \ + (pkg.repo.name, pkg.pkgname) + for maint in maints: + if maint.get_profile().notify == True: + toemail.append(maint.email) + + if toemail: + # send notification email to the maintainers + tmpl = loader.get_template('packages/outofdate.txt') + ctx = Context({ + 'email': form.cleaned_data['email'], + 'message': form.cleaned_data['usermessage'], + 'pkg': pkg, + 'packages': flagged_pkgs, + }) + send_mail(subject, + tmpl.render(ctx), + 'Arch Website Notification <nobody@archlinux.org>', + toemail, + fail_silently=True) + + return redirect('package-flag-confirmed', name=name, repo=repo, + arch=arch) + else: + form = FlagForm() + + context = { + 'package': pkg, + 'packages': pkgs, + 'form': form + } + return direct_to_template(request, 'packages/flag.html', context) + +def flag_confirmed(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + pkgs = Package.objects.normal().filter( + pkgbase=pkg.pkgbase, flag_date=pkg.flag_date, + repo__testing=pkg.repo.testing, + repo__staging=pkg.repo.staging).order_by( + 'pkgname', 'repo__name', 'arch__name') + + context = {'package': pkg, 'packages': pkgs} + + return direct_to_template(request, 'packages/flag_confirmed.html', context) + +@permission_required('main.change_package') +def unflag(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + pkg.flag_date = None + pkg.save() + return redirect(pkg) + +@permission_required('main.change_package') +def unflag_all(request, name, repo, arch): + pkg = get_object_or_404(Package, + pkgname=name, repo__name__iexact=repo, arch__name=arch) + # find all packages from (hopefully) the same PKGBUILD + pkgs = Package.objects.filter(pkgbase=pkg.pkgbase, + repo__testing=pkg.repo.testing, repo__staging=pkg.repo.staging) + pkgs.update(flag_date=None) + return redirect(pkg) + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 865fa5c1e34123a066d5366e04dda84f84232ade Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 18:35:29 -0600 Subject: packages/views: move signoff-related views into separate module Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views/__init__.py | 179 +------------------------------------------ packages/views/signoff.py | 187 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 191 insertions(+), 175 deletions(-) create mode 100644 packages/views/signoff.py diff --git a/packages/views/__init__.py b/packages/views/__init__.py index 4782e457..e02740f2 100644 --- a/packages/views/__init__.py +++ b/packages/views/__init__.py @@ -4,7 +4,6 @@ from django.contrib.auth.models import User from django.contrib.auth.decorators import permission_required from django.core.serializers.json import DjangoJSONEncoder -from django.db import transaction from django.db.models import Q from django.http import HttpResponse, Http404, HttpResponseForbidden from django.shortcuts import (get_object_or_404, get_list_or_404, @@ -17,21 +16,20 @@ from django.views.generic.simple import direct_to_template from datetime import datetime -from operator import attrgetter from string import Template from urllib import urlencode from main.models import Package, PackageFile, Arch, Repo from main.utils import make_choice from mirrors.models import MirrorUrl -from ..models import (PackageRelation, PackageGroup, - SignoffSpecification, Signoff) +from ..models import PackageRelation, PackageGroup from ..utils import (get_group_info, get_differences_info, - get_wrong_permissions, get_signoff_groups, approved_by_signoffs, - PackageSignoffGroup) + get_wrong_permissions) # make other views available from this same package from .flag import flaghelp, flag, flag_confirmed, unflag, unflag_all +from .signoff import signoffs, signoff_package, signoff_options, signoffs_json + class PackageJSONEncoder(DjangoJSONEncoder): pkg_attributes = [ 'pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', @@ -355,175 +353,6 @@ def files_json(request, name, repo, arch): cls=PackageJSONEncoder) return HttpResponse(to_json, mimetype='application/json') - -@permission_required('main.change_package') -@never_cache -def signoffs(request): - signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) - for group in signoff_groups: - group.user = request.user - - context = { - 'signoff_groups': signoff_groups, - 'arches': Arch.objects.all(), - 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), - } - return direct_to_template(request, 'packages/signoffs.html', context) - -@permission_required('main.change_package') -@never_cache -def signoff_package(request, name, repo, arch, revoke=False): - packages = get_list_or_404(Package, pkgbase=name, - arch__name=arch, repo__name__iexact=repo, repo__testing=True) - package = packages[0] - - spec = SignoffSpecification.objects.get_or_default_from_package(package) - - if revoke: - try: - signoff = Signoff.objects.get_from_package( - package, request.user, False) - except Signoff.DoesNotExist: - raise Http404 - signoff.revoked = datetime.utcnow() - signoff.save() - created = False - else: - # ensure we should even be accepting signoffs - if spec.known_bad or not spec.enabled: - return render(request, '403.html', status=403) - signoff, created = Signoff.objects.get_or_create_from_package( - package, request.user) - - all_signoffs = Signoff.objects.for_package(package) - - if request.is_ajax(): - data = { - 'created': created, - 'revoked': bool(signoff.revoked), - 'approved': approved_by_signoffs(all_signoffs, spec), - 'required': spec.required, - 'enabled': spec.enabled, - 'known_bad': spec.known_bad, - 'user': str(request.user), - } - return HttpResponse(simplejson.dumps(data, ensure_ascii=False), - mimetype='application/json') - - return redirect('package-signoffs') - -class SignoffOptionsForm(forms.ModelForm): - apply_all = forms.BooleanField(required=False, - help_text="Apply these options to all architectures?") - - class Meta: - model = SignoffSpecification - fields = ('required', 'enabled', 'known_bad', 'comments') - -def _signoff_options_all(request, name, repo): - seen_ids = set() - with transaction.commit_on_success(): - # find or create a specification for all architectures, then - # graft the form data onto them - packages = Package.objects.filter(pkgbase=name, - repo__name__iexact=repo, repo__testing=True) - for package in packages: - try: - spec = SignoffSpecification.objects.get_from_package(package) - if spec.pk in seen_ids: - continue - except SignoffSpecification.DoesNotExist: - spec = SignoffSpecification(pkgbase=package.pkgbase, - pkgver=package.pkgver, pkgrel=package.pkgrel, - epoch=package.epoch, arch=package.arch, - repo=package.repo) - spec.user = request.user - form = SignoffOptionsForm(request.POST, instance=spec) - if form.is_valid(): - form.save() - seen_ids.add(form.instance.pk) - -@permission_required('main.change_package') -@never_cache -def signoff_options(request, name, repo, arch): - packages = get_list_or_404(Package, pkgbase=name, - arch__name=arch, repo__name__iexact=repo, repo__testing=True) - package = packages[0] - - if request.user != package.packager and \ - request.user not in package.maintainers: - return render(request, '403.html', status=403) - - try: - spec = SignoffSpecification.objects.get_from_package(package) - except SignoffSpecification.DoesNotExist: - # create a fake one, but don't save it just yet - spec = SignoffSpecification(pkgbase=package.pkgbase, - pkgver=package.pkgver, pkgrel=package.pkgrel, - epoch=package.epoch, arch=package.arch, repo=package.repo) - spec.user = request.user - - if request.POST: - form = SignoffOptionsForm(request.POST, instance=spec) - if form.is_valid(): - if form.cleaned_data['apply_all']: - _signoff_options_all(request, name, repo) - else: - form.save() - return redirect('package-signoffs') - else: - form = SignoffOptionsForm(instance=spec) - - context = { - 'packages': packages, - 'package': package, - 'form': form, - } - return direct_to_template(request, 'packages/signoff_options.html', context) - -class SignoffJSONEncoder(DjangoJSONEncoder): - '''Base JSONEncoder extended to handle all serialization of all classes - related to signoffs.''' - signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager', - 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version'] - signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments'] - signoff_attrs = ['user', 'created', 'revoked'] - - def default(self, obj): - if isinstance(obj, PackageSignoffGroup): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_group_attrs) - data['package_count'] = len(obj.packages) - data['approved'] = obj.approved() - data.update((attr, getattr(obj.specification, attr)) - for attr in self.signoff_spec_attrs) - return data - elif isinstance(obj, Signoff): - data = dict((attr, getattr(obj, attr)) - for attr in self.signoff_attrs) - return data - elif isinstance(obj, Arch) or isinstance(obj, Repo): - return unicode(obj) - elif isinstance(obj, User): - return obj.username - elif isinstance(obj, set): - return list(obj) - return super(SignoffJSONEncoder, self).default(obj) - -@permission_required('main.change_package') -@never_cache -def signoffs_json(request): - signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) - data = { - 'version': 1, - 'signoff_groups': signoff_groups, - } - to_json = simplejson.dumps(data, ensure_ascii=False, - cls=SignoffJSONEncoder) - response = HttpResponse(to_json, mimetype='application/json') - return response - - def download(request, name, repo, arch): pkg = get_object_or_404(Package, pkgname=name, repo__name__iexact=repo, arch__name=arch) diff --git a/packages/views/signoff.py b/packages/views/signoff.py new file mode 100644 index 00000000..a42c1c66 --- /dev/null +++ b/packages/views/signoff.py @@ -0,0 +1,187 @@ +from datetime import datetime +from operator import attrgetter + +from django import forms +from django.contrib.auth.decorators import permission_required +from django.contrib.auth.models import User +from django.core.serializers.json import DjangoJSONEncoder +from django.db import transaction +from django.http import HttpResponse, Http404 +from django.shortcuts import get_list_or_404, redirect, render +from django.utils import simplejson +from django.views.decorators.cache import never_cache +from django.views.generic.simple import direct_to_template + +from main.models import Package, Arch, Repo +from ..models import SignoffSpecification, Signoff +from ..utils import (get_signoff_groups, approved_by_signoffs, + PackageSignoffGroup) + +@permission_required('main.change_package') +@never_cache +def signoffs(request): + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + for group in signoff_groups: + group.user = request.user + + context = { + 'signoff_groups': signoff_groups, + 'arches': Arch.objects.all(), + 'repo_names': sorted(set(g.target_repo for g in signoff_groups)), + } + return direct_to_template(request, 'packages/signoffs.html', context) + +@permission_required('main.change_package') +@never_cache +def signoff_package(request, name, repo, arch, revoke=False): + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + package = packages[0] + + spec = SignoffSpecification.objects.get_or_default_from_package(package) + + if revoke: + try: + signoff = Signoff.objects.get_from_package( + package, request.user, False) + except Signoff.DoesNotExist: + raise Http404 + signoff.revoked = datetime.utcnow() + signoff.save() + created = False + else: + # ensure we should even be accepting signoffs + if spec.known_bad or not spec.enabled: + return render(request, '403.html', status=403) + signoff, created = Signoff.objects.get_or_create_from_package( + package, request.user) + + all_signoffs = Signoff.objects.for_package(package) + + if request.is_ajax(): + data = { + 'created': created, + 'revoked': bool(signoff.revoked), + 'approved': approved_by_signoffs(all_signoffs, spec), + 'required': spec.required, + 'enabled': spec.enabled, + 'known_bad': spec.known_bad, + 'user': str(request.user), + } + return HttpResponse(simplejson.dumps(data, ensure_ascii=False), + mimetype='application/json') + + return redirect('package-signoffs') + +class SignoffOptionsForm(forms.ModelForm): + apply_all = forms.BooleanField(required=False, + help_text="Apply these options to all architectures?") + + class Meta: + model = SignoffSpecification + fields = ('required', 'enabled', 'known_bad', 'comments') + +def _signoff_options_all(request, name, repo): + seen_ids = set() + with transaction.commit_on_success(): + # find or create a specification for all architectures, then + # graft the form data onto them + packages = Package.objects.filter(pkgbase=name, + repo__name__iexact=repo, repo__testing=True) + for package in packages: + try: + spec = SignoffSpecification.objects.get_from_package(package) + if spec.pk in seen_ids: + continue + except SignoffSpecification.DoesNotExist: + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, + repo=package.repo) + spec.user = request.user + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + form.save() + seen_ids.add(form.instance.pk) + +@permission_required('main.change_package') +@never_cache +def signoff_options(request, name, repo, arch): + packages = get_list_or_404(Package, pkgbase=name, + arch__name=arch, repo__name__iexact=repo, repo__testing=True) + package = packages[0] + + if request.user != package.packager and \ + request.user not in package.maintainers: + return render(request, '403.html', status=403) + + try: + spec = SignoffSpecification.objects.get_from_package(package) + except SignoffSpecification.DoesNotExist: + # create a fake one, but don't save it just yet + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, repo=package.repo) + spec.user = request.user + + if request.POST: + form = SignoffOptionsForm(request.POST, instance=spec) + if form.is_valid(): + if form.cleaned_data['apply_all']: + _signoff_options_all(request, name, repo) + else: + form.save() + return redirect('package-signoffs') + else: + form = SignoffOptionsForm(instance=spec) + + context = { + 'packages': packages, + 'package': package, + 'form': form, + } + return direct_to_template(request, 'packages/signoff_options.html', context) + +class SignoffJSONEncoder(DjangoJSONEncoder): + '''Base JSONEncoder extended to handle all serialization of all classes + related to signoffs.''' + signoff_group_attrs = ['arch', 'last_update', 'maintainers', 'packager', + 'pkgbase', 'repo', 'signoffs', 'target_repo', 'version'] + signoff_spec_attrs = ['required', 'enabled', 'known_bad', 'comments'] + signoff_attrs = ['user', 'created', 'revoked'] + + def default(self, obj): + if isinstance(obj, PackageSignoffGroup): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_group_attrs) + data['package_count'] = len(obj.packages) + data['approved'] = obj.approved() + data.update((attr, getattr(obj.specification, attr)) + for attr in self.signoff_spec_attrs) + return data + elif isinstance(obj, Signoff): + data = dict((attr, getattr(obj, attr)) + for attr in self.signoff_attrs) + return data + elif isinstance(obj, Arch) or isinstance(obj, Repo): + return unicode(obj) + elif isinstance(obj, User): + return obj.username + elif isinstance(obj, set): + return list(obj) + return super(SignoffJSONEncoder, self).default(obj) + +@permission_required('main.change_package') +@never_cache +def signoffs_json(request): + signoff_groups = sorted(get_signoff_groups(), key=attrgetter('pkgbase')) + data = { + 'version': 1, + 'signoff_groups': signoff_groups, + } + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=SignoffJSONEncoder) + response = HttpResponse(to_json, mimetype='application/json') + return response + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 6e6392c089688e227339efd58d42f84de92bda11 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 19:09:29 -0600 Subject: packages/views: split out search view Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views/__init__.py | 160 +------------------------------------------- packages/views/search.py | 161 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 164 insertions(+), 157 deletions(-) create mode 100644 packages/views/search.py diff --git a/packages/views/__init__.py b/packages/views/__init__.py index e02740f2..e3264161 100644 --- a/packages/views/__init__.py +++ b/packages/views/__init__.py @@ -1,26 +1,18 @@ -from django import forms from django.contrib import messages -from django.contrib.admin.widgets import AdminDateWidget -from django.contrib.auth.models import User from django.contrib.auth.decorators import permission_required from django.core.serializers.json import DjangoJSONEncoder -from django.db.models import Q -from django.http import HttpResponse, Http404, HttpResponseForbidden -from django.shortcuts import (get_object_or_404, get_list_or_404, - redirect, render) +from django.http import HttpResponse, Http404 +from django.shortcuts import get_object_or_404, redirect from django.utils import simplejson from django.views.decorators.cache import never_cache from django.views.decorators.http import require_POST from django.views.decorators.vary import vary_on_headers -from django.views.generic import list_detail from django.views.generic.simple import direct_to_template -from datetime import datetime from string import Template from urllib import urlencode from main.models import Package, PackageFile, Arch, Repo -from main.utils import make_choice from mirrors.models import MirrorUrl from ..models import PackageRelation, PackageGroup from ..utils import (get_group_info, get_differences_info, @@ -28,6 +20,7 @@ # make other views available from this same package from .flag import flaghelp, flag, flag_confirmed, unflag, unflag_all +from .search import search from .signoff import signoffs, signoff_package, signoff_options, signoffs_json @@ -171,153 +164,6 @@ def group_details(request, arch, name): } return direct_to_template(request, 'packages/packages_list.html', context) -def coerce_limit_value(value): - if not value: - return None - if value == 'all': - # negative value indicates show all results - return -1 - value = int(value) - if value < 0: - raise ValueError - return value - -class LimitTypedChoiceField(forms.TypedChoiceField): - def valid_value(self, value): - try: - coerce_limit_value(value) - return True - except (ValueError, TypeError): - return False - -class PackageSearchForm(forms.Form): - repo = forms.MultipleChoiceField(required=False) - arch = forms.MultipleChoiceField(required=False) - name = forms.CharField(required=False) - desc = forms.CharField(required=False) - q = forms.CharField(required=False) - maintainer = forms.ChoiceField(required=False) - packager = forms.ChoiceField(required=False) - last_update = forms.DateField(required=False, widget=AdminDateWidget(), - label='Last Updated After') - flagged = forms.ChoiceField( - choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']), - required=False) - signed = forms.ChoiceField( - choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']), - required=False) - limit = LimitTypedChoiceField( - choices=make_choice([50, 100, 250]) + [('all', 'All')], - coerce=coerce_limit_value, - required=False, - initial=50) - - def __init__(self, *args, **kwargs): - super(PackageSearchForm, self).__init__(*args, **kwargs) - self.fields['repo'].choices = make_choice( - [repo.name for repo in Repo.objects.all()]) - self.fields['arch'].choices = make_choice( - [arch.name for arch in Arch.objects.all()]) - self.fields['q'].widget.attrs.update({"size": "30"}) - maints = User.objects.filter(is_active=True).order_by('username') - self.fields['maintainer'].choices = \ - [('', 'All'), ('orphan', 'Orphan')] + \ - [(m.username, m.get_full_name()) for m in maints] - self.fields['packager'].choices = \ - [('', 'All'), ('unknown', 'Unknown')] + \ - [(m.username, m.get_full_name()) for m in maints] - -def search(request, page=None): - limit = 50 - packages = Package.objects.normal() - - if request.GET: - form = PackageSearchForm(data=request.GET) - if form.is_valid(): - if form.cleaned_data['repo']: - packages = packages.filter( - repo__name__in=form.cleaned_data['repo']) - - if form.cleaned_data['arch']: - packages = packages.filter( - arch__name__in=form.cleaned_data['arch']) - - if form.cleaned_data['maintainer'] == 'orphan': - inner_q = PackageRelation.objects.all().values('pkgbase') - packages = packages.exclude(pkgbase__in=inner_q) - elif form.cleaned_data['maintainer']: - inner_q = PackageRelation.objects.filter( - user__username=form.cleaned_data['maintainer']).values('pkgbase') - packages = packages.filter(pkgbase__in=inner_q) - - if form.cleaned_data['packager'] == 'unknown': - packages = packages.filter(packager__isnull=True) - elif form.cleaned_data['packager']: - packages = packages.filter( - packager__username=form.cleaned_data['packager']) - - if form.cleaned_data['flagged'] == 'Flagged': - packages = packages.filter(flag_date__isnull=False) - elif form.cleaned_data['flagged'] == 'Not Flagged': - packages = packages.filter(flag_date__isnull=True) - - if form.cleaned_data['signed'] == 'Signed': - packages = packages.filter(pgp_signature__isnull=False) - elif form.cleaned_data['signed'] == 'Unsigned': - packages = packages.filter(pgp_signature__isnull=True) - - if form.cleaned_data['last_update']: - lu = form.cleaned_data['last_update'] - packages = packages.filter(last_update__gte= - datetime(lu.year, lu.month, lu.day, 0, 0)) - - if form.cleaned_data['name']: - name = form.cleaned_data['name'] - packages = packages.filter(pkgname__icontains=name) - - if form.cleaned_data['desc']: - desc = form.cleaned_data['desc'] - packages = packages.filter(pkgdesc__icontains=desc) - - if form.cleaned_data['q']: - query = form.cleaned_data['q'] - q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) - packages = packages.filter(q) - - asked_limit = form.cleaned_data['limit'] - if asked_limit and asked_limit < 0: - limit = None - elif asked_limit: - limit = asked_limit - else: - # Form had errors, don't return any results, just the busted form - packages = Package.objects.none() - else: - form = PackageSearchForm() - - current_query = request.GET.urlencode() - page_dict = { - 'search_form': form, - 'current_query': current_query - } - allowed_sort = ["arch", "repo", "pkgname", "pkgbase", - "compressed_size", "installed_size", - "build_date", "last_update", "flag_date"] - allowed_sort += ["-" + s for s in allowed_sort] - sort = request.GET.get('sort', None) - if sort in allowed_sort: - packages = packages.order_by(sort) - page_dict['sort'] = sort - else: - packages = packages.order_by('pkgname') - - return list_detail.object_list(request, packages, - template_name="packages/search.html", - page=page, - paginate_by=limit, - template_object_name="package", - extra_context=page_dict) - @vary_on_headers('X-Requested-With') def files(request, name, repo, arch): pkg = get_object_or_404(Package, diff --git a/packages/views/search.py b/packages/views/search.py new file mode 100644 index 00000000..e2d00d62 --- /dev/null +++ b/packages/views/search.py @@ -0,0 +1,161 @@ +from datetime import datetime + +from django import forms +from django.contrib.admin.widgets import AdminDateWidget +from django.contrib.auth.models import User +from django.db.models import Q +from django.views.generic import list_detail + +from main.models import Package, Arch, Repo +from main.utils import make_choice +from ..models import PackageRelation + + +def coerce_limit_value(value): + if not value: + return None + if value == 'all': + # negative value indicates show all results + return -1 + value = int(value) + if value < 0: + raise ValueError + return value + +class LimitTypedChoiceField(forms.TypedChoiceField): + def valid_value(self, value): + try: + coerce_limit_value(value) + return True + except (ValueError, TypeError): + return False + +class PackageSearchForm(forms.Form): + repo = forms.MultipleChoiceField(required=False) + arch = forms.MultipleChoiceField(required=False) + name = forms.CharField(required=False) + desc = forms.CharField(required=False) + q = forms.CharField(required=False) + maintainer = forms.ChoiceField(required=False) + packager = forms.ChoiceField(required=False) + last_update = forms.DateField(required=False, widget=AdminDateWidget(), + label='Last Updated After') + flagged = forms.ChoiceField( + choices=[('', 'All')] + make_choice(['Flagged', 'Not Flagged']), + required=False) + signed = forms.ChoiceField( + choices=[('', 'All')] + make_choice(['Signed', 'Unsigned']), + required=False) + limit = LimitTypedChoiceField( + choices=make_choice([50, 100, 250]) + [('all', 'All')], + coerce=coerce_limit_value, + required=False, + initial=50) + + def __init__(self, *args, **kwargs): + super(PackageSearchForm, self).__init__(*args, **kwargs) + self.fields['repo'].choices = make_choice( + [repo.name for repo in Repo.objects.all()]) + self.fields['arch'].choices = make_choice( + [arch.name for arch in Arch.objects.all()]) + self.fields['q'].widget.attrs.update({"size": "30"}) + maints = User.objects.filter(is_active=True).order_by('username') + self.fields['maintainer'].choices = \ + [('', 'All'), ('orphan', 'Orphan')] + \ + [(m.username, m.get_full_name()) for m in maints] + self.fields['packager'].choices = \ + [('', 'All'), ('unknown', 'Unknown')] + \ + [(m.username, m.get_full_name()) for m in maints] + +def search(request, page=None): + limit = 50 + packages = Package.objects.normal() + + if request.GET: + form = PackageSearchForm(data=request.GET) + if form.is_valid(): + if form.cleaned_data['repo']: + packages = packages.filter( + repo__name__in=form.cleaned_data['repo']) + + if form.cleaned_data['arch']: + packages = packages.filter( + arch__name__in=form.cleaned_data['arch']) + + if form.cleaned_data['maintainer'] == 'orphan': + inner_q = PackageRelation.objects.all().values('pkgbase') + packages = packages.exclude(pkgbase__in=inner_q) + elif form.cleaned_data['maintainer']: + inner_q = PackageRelation.objects.filter( + user__username=form.cleaned_data['maintainer']).values('pkgbase') + packages = packages.filter(pkgbase__in=inner_q) + + if form.cleaned_data['packager'] == 'unknown': + packages = packages.filter(packager__isnull=True) + elif form.cleaned_data['packager']: + packages = packages.filter( + packager__username=form.cleaned_data['packager']) + + if form.cleaned_data['flagged'] == 'Flagged': + packages = packages.filter(flag_date__isnull=False) + elif form.cleaned_data['flagged'] == 'Not Flagged': + packages = packages.filter(flag_date__isnull=True) + + if form.cleaned_data['signed'] == 'Signed': + packages = packages.filter(pgp_signature__isnull=False) + elif form.cleaned_data['signed'] == 'Unsigned': + packages = packages.filter(pgp_signature__isnull=True) + + if form.cleaned_data['last_update']: + lu = form.cleaned_data['last_update'] + packages = packages.filter(last_update__gte= + datetime(lu.year, lu.month, lu.day, 0, 0)) + + if form.cleaned_data['name']: + name = form.cleaned_data['name'] + packages = packages.filter(pkgname__icontains=name) + + if form.cleaned_data['desc']: + desc = form.cleaned_data['desc'] + packages = packages.filter(pkgdesc__icontains=desc) + + if form.cleaned_data['q']: + query = form.cleaned_data['q'] + q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) + packages = packages.filter(q) + + asked_limit = form.cleaned_data['limit'] + if asked_limit and asked_limit < 0: + limit = None + elif asked_limit: + limit = asked_limit + else: + # Form had errors, don't return any results, just the busted form + packages = Package.objects.none() + else: + form = PackageSearchForm() + + current_query = request.GET.urlencode() + page_dict = { + 'search_form': form, + 'current_query': current_query + } + allowed_sort = ["arch", "repo", "pkgname", "pkgbase", + "compressed_size", "installed_size", + "build_date", "last_update", "flag_date"] + allowed_sort += ["-" + s for s in allowed_sort] + sort = request.GET.get('sort', None) + if sort in allowed_sort: + packages = packages.order_by(sort) + page_dict['sort'] = sort + else: + packages = packages.order_by('pkgname') + + return list_detail.object_list(request, packages, + template_name="packages/search.html", + page=page, + paginate_by=limit, + template_object_name="package", + extra_context=page_dict) + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 504a8cabfc84b4ecd4fa72ddee288412dfdb7cc3 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 10 Nov 2011 19:21:44 -0600 Subject: packages/view/search: refactor out the form parsing code This is a block of very repetitive code that lends itself well to being a separate method. It would still be nice to find a way to clean this up but that can come later. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views/search.py | 108 +++++++++++++++++++++++++---------------------- 1 file changed, 57 insertions(+), 51 deletions(-) diff --git a/packages/views/search.py b/packages/views/search.py index e2d00d62..57481614 100644 --- a/packages/views/search.py +++ b/packages/views/search.py @@ -36,6 +36,7 @@ class PackageSearchForm(forms.Form): name = forms.CharField(required=False) desc = forms.CharField(required=False) q = forms.CharField(required=False) + sort = forms.CharField(required=False) maintainer = forms.ChoiceField(required=False) packager = forms.ChoiceField(required=False) last_update = forms.DateField(required=False, widget=AdminDateWidget(), @@ -67,68 +68,74 @@ def __init__(self, *args, **kwargs): [('', 'All'), ('unknown', 'Unknown')] + \ [(m.username, m.get_full_name()) for m in maints] +def parse_form(form, packages): + if form.cleaned_data['repo']: + packages = packages.filter( + repo__name__in=form.cleaned_data['repo']) + + if form.cleaned_data['arch']: + packages = packages.filter( + arch__name__in=form.cleaned_data['arch']) + + if form.cleaned_data['maintainer'] == 'orphan': + inner_q = PackageRelation.objects.all().values('pkgbase') + packages = packages.exclude(pkgbase__in=inner_q) + elif form.cleaned_data['maintainer']: + inner_q = PackageRelation.objects.filter( + user__username=form.cleaned_data['maintainer']).values('pkgbase') + packages = packages.filter(pkgbase__in=inner_q) + + if form.cleaned_data['packager'] == 'unknown': + packages = packages.filter(packager__isnull=True) + elif form.cleaned_data['packager']: + packages = packages.filter( + packager__username=form.cleaned_data['packager']) + + if form.cleaned_data['flagged'] == 'Flagged': + packages = packages.filter(flag_date__isnull=False) + elif form.cleaned_data['flagged'] == 'Not Flagged': + packages = packages.filter(flag_date__isnull=True) + + if form.cleaned_data['signed'] == 'Signed': + packages = packages.filter(pgp_signature__isnull=False) + elif form.cleaned_data['signed'] == 'Unsigned': + packages = packages.filter(pgp_signature__isnull=True) + + if form.cleaned_data['last_update']: + lu = form.cleaned_data['last_update'] + packages = packages.filter(last_update__gte= + datetime(lu.year, lu.month, lu.day, 0, 0)) + + if form.cleaned_data['name']: + name = form.cleaned_data['name'] + packages = packages.filter(pkgname__icontains=name) + + if form.cleaned_data['desc']: + desc = form.cleaned_data['desc'] + packages = packages.filter(pkgdesc__icontains=desc) + + if form.cleaned_data['q']: + query = form.cleaned_data['q'] + q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) + packages = packages.filter(q) + + return packages + def search(request, page=None): limit = 50 + sort = None packages = Package.objects.normal() if request.GET: form = PackageSearchForm(data=request.GET) if form.is_valid(): - if form.cleaned_data['repo']: - packages = packages.filter( - repo__name__in=form.cleaned_data['repo']) - - if form.cleaned_data['arch']: - packages = packages.filter( - arch__name__in=form.cleaned_data['arch']) - - if form.cleaned_data['maintainer'] == 'orphan': - inner_q = PackageRelation.objects.all().values('pkgbase') - packages = packages.exclude(pkgbase__in=inner_q) - elif form.cleaned_data['maintainer']: - inner_q = PackageRelation.objects.filter( - user__username=form.cleaned_data['maintainer']).values('pkgbase') - packages = packages.filter(pkgbase__in=inner_q) - - if form.cleaned_data['packager'] == 'unknown': - packages = packages.filter(packager__isnull=True) - elif form.cleaned_data['packager']: - packages = packages.filter( - packager__username=form.cleaned_data['packager']) - - if form.cleaned_data['flagged'] == 'Flagged': - packages = packages.filter(flag_date__isnull=False) - elif form.cleaned_data['flagged'] == 'Not Flagged': - packages = packages.filter(flag_date__isnull=True) - - if form.cleaned_data['signed'] == 'Signed': - packages = packages.filter(pgp_signature__isnull=False) - elif form.cleaned_data['signed'] == 'Unsigned': - packages = packages.filter(pgp_signature__isnull=True) - - if form.cleaned_data['last_update']: - lu = form.cleaned_data['last_update'] - packages = packages.filter(last_update__gte= - datetime(lu.year, lu.month, lu.day, 0, 0)) - - if form.cleaned_data['name']: - name = form.cleaned_data['name'] - packages = packages.filter(pkgname__icontains=name) - - if form.cleaned_data['desc']: - desc = form.cleaned_data['desc'] - packages = packages.filter(pkgdesc__icontains=desc) - - if form.cleaned_data['q']: - query = form.cleaned_data['q'] - q = Q(pkgname__icontains=query) | Q(pkgdesc__icontains=query) - packages = packages.filter(q) - + packages = parse_form(form, packages) asked_limit = form.cleaned_data['limit'] if asked_limit and asked_limit < 0: limit = None elif asked_limit: limit = asked_limit + sort = form.cleaned_data['sort'] else: # Form had errors, don't return any results, just the busted form packages = Package.objects.none() @@ -144,7 +151,6 @@ def search(request, page=None): "compressed_size", "installed_size", "build_date", "last_update", "flag_date"] allowed_sort += ["-" + s for s in allowed_sort] - sort = request.GET.get('sort', None) if sort in allowed_sort: packages = packages.order_by(sort) page_dict['sort'] = sort -- cgit v1.2.3-54-g00ecf From 626dd1156dd2b161d90ceb6bf9a9120d982c29f9 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 11 Nov 2011 10:12:53 -0600 Subject: Add pkgnames array to JSON package signoffs view This makes it easier to match up exact packages with their signoff entry. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/views/signoff.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/views/signoff.py b/packages/views/signoff.py index a42c1c66..26b6e710 100644 --- a/packages/views/signoff.py +++ b/packages/views/signoff.py @@ -154,6 +154,7 @@ def default(self, obj): if isinstance(obj, PackageSignoffGroup): data = dict((attr, getattr(obj, attr)) for attr in self.signoff_group_attrs) + data['pkgnames'] = [p.pkgname for p in obj.packages] data['package_count'] = len(obj.packages) data['approved'] = obj.approved() data.update((attr, getattr(obj.specification, attr)) -- cgit v1.2.3-54-g00ecf From 21d5f818a60ab2626f941f8ff53e263e802494d5 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 11 Nov 2011 10:42:51 -0600 Subject: Touch up signoff page styles Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.css | 10 +++++----- templates/packages/signoff_cell.html | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/media/archweb.css b/media/archweb.css index 303173f2..c5477422 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -899,26 +899,26 @@ ul.admin-actions { background: #ffd; } -#dev-signoffs ul { +ul.signoff-list { list-style: none; margin: 0; padding: 0; } -#dev-signoffs .signoff-yes { +.signoff-yes { color: green; font-weight: bold; } -#dev-signoffs .signoff-no { +.signoff-no { color: red; } -#dev-signoffs .signoff-bad { +.signoff-bad { color: darkorange; } -#dev-signoffs .signoff-disabled { +.signoff-disabled { color: gray; } diff --git a/templates/packages/signoff_cell.html b/templates/packages/signoff_cell.html index 0bf44ca2..01a5d58d 100644 --- a/templates/packages/signoff_cell.html +++ b/templates/packages/signoff_cell.html @@ -1,6 +1,6 @@ {% spaceless %} {% if group.signoffs %} -<ul> +<ul class="signoff-list"> {% for signoff in group.signoffs %} <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li> {% endfor %} -- cgit v1.2.3-54-g00ecf From 022692b3f33de8c45741d3cb27fa95f9f6facdea Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 11 Nov 2011 10:43:18 -0600 Subject: Show relevant signoffs on dashboard Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/views.py | 5 +++++ packages/utils.py | 7 ++++++- templates/devel/index.html | 50 ++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 59 insertions(+), 3 deletions(-) diff --git a/devel/views.py b/devel/views.py index 694ed6dc..0002df04 100644 --- a/devel/views.py +++ b/devel/views.py @@ -18,6 +18,7 @@ from main.models import Arch, Repo from main.models import UserProfile from packages.models import PackageRelation +from packages.utils import get_signoff_groups from todolists.utils import get_annotated_todolists from .utils import get_annotated_maintainers @@ -48,6 +49,9 @@ def index(request): todolists = get_annotated_todolists() todolists = [todolist for todolist in todolists if todolist.incomplete_count > 0] + signoffs = sorted(get_signoff_groups(user=request.user), + key=operator.attrgetter('pkgbase')) + maintainers = get_annotated_maintainers() maintained = PackageRelation.objects.filter( @@ -70,6 +74,7 @@ def index(request): 'orphan': orphan, 'flagged' : flagged, 'todopkgs' : todopkgs, + 'signoffs': signoffs } return direct_to_template(request, 'devel/index.html', page_dict) diff --git a/packages/utils.py b/packages/utils.py index b21ac557..0df0e382 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -330,7 +330,7 @@ def get_target_repo_map(repos): cursor.execute(sql, params) return dict(cursor.fetchall()) -def get_signoff_groups(repos=None): +def get_signoff_groups(repos=None, user=None): if repos is None: repos = Repo.objects.filter(testing=True) repo_ids = [r.pk for r in repos] @@ -340,6 +340,11 @@ def get_signoff_groups(repos=None): packages = test_pkgs.order_by('pkgname') packages = attach_maintainers(packages) + # Filter by user if asked to do so + if user is not None: + packages = [p for p in packages if user == p.packager + or user in p.maintainers] + # Collect all pkgbase values in testing repos pkgtorepo = get_target_repo_map(repos) diff --git a/templates/devel/index.html b/templates/devel/index.html index d3f7ec3b..06cf10ab 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -15,8 +15,8 @@ <h3>My Flagged Packages</h3> <thead> <tr> <th>Name</th> - <th>Repo</th> <th>Version</th> + <th>Repo</th> <th>Arch</th> <th>Flagged</th> <th>Last Updated</th> @@ -26,8 +26,8 @@ <h3>My Flagged Packages</h3> {% for pkg in flagged %} <tr class="{% cycle 'odd' 'even' %}"> <td>{% pkg_details_link pkg %}</td> - <td>{{ pkg.repo.name }}</td> <td>{{ pkg.full_version }}</td> + <td>{{ pkg.repo.name }}</td> <td>{{ pkg.arch.name }}</td> <td>{{ pkg.flag_date|date }}</td> <td>{{ pkg.last_update|date }}</td> @@ -96,6 +96,47 @@ <h3>Package Todo Lists</h3> </tbody> </table> + <h3>Signoff Status</h3> + + <table id="dash-signoffs" class="results"> + <thead> + <tr> + <th>Name</th> + <th>Version</th> + <th>Arch</th> + <th>Target Repo</th> + <th>Last Updated</th> + <th>Approved</th> + <th>Signoffs</th> + </tr> + </thead> + <tbody> + {% for group in signoffs %} + <tr class="{% cycle 'odd' 'even' %}"> + <td>{% pkg_details_link group.package %}</td> + <td>{{ group.version }}</td> + <td>{{ group.arch.name }}</td> + <td>{{ group.target_repo }}</td> + <td>{{ group.last_update|date }}</td> + {% if group.specification.known_bad %} + <td class="approval signoff-bad">Bad</td> + {% else %} + {% if not group.specification.enabled %} + <td class="approval signoff-disabled">Disabled</td> + {% else %} + <td class="approval signoff-{{ group.approved|yesno }}">{{ group.approved|yesno|capfirst }}</td> + {% endif %} + {% endif %} + <td><ul class="signoff-list"> + {% for signoff in group.signoffs %} + <li class="signed-username" title="Signed off by {{ signoff.user }}">{{ signoff.user }}{% if signoff.revoked %} (revoked){% endif %}</li> + {% endfor %} + </ul></td> + </tr> + {% endfor %} + </tbody> + </table> + <h3>Developer Reports</h3> <ul> <li><a href="reports/big/">Big</a>: @@ -255,6 +296,11 @@ <h2>Stats by Developer</h2> {widgets: ['zebra'], sortList: [[0,0], [1,0]]}); $("#dash-todo:not(:has(tbody tr.empty))").tablesorter( {widgets: ['zebra'], sortList: [[1,1]]}); + $("#dash-signoffs:not(:has(tbody tr.empty))").tablesorter({ + widgets: ['zebra'], + sortList: [[0,0]], + headers: { 6: {sorter: false } } + }); $(".dash-stats").tablesorter({ widgets: ['zebra'], sortList: [[0,0]], -- cgit v1.2.3-54-g00ecf From a883b0af23143364ab0724fda2ecdef9aba8191f Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Fri, 11 Nov 2011 11:57:04 -0600 Subject: Add a split packages sitemap With very low priority, but this should at least give a few more cross-linking pages to any crawlers using sitemaps. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/utils.py | 18 ++++++++++++++++-- sitemaps.py | 17 ++++++++++++++++- urls.py | 1 + 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index 0df0e382..f8e1f2a1 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -2,10 +2,10 @@ from operator import itemgetter from django.db import connection -from django.db.models import Count, Max +from django.db.models import Count, Max, F from django.contrib.auth.models import User -from main.models import Package, Repo +from main.models import Package, Arch, Repo from main.utils import cache_function, groupby_preserve_order, PackageStandin from .models import (PackageGroup, PackageRelation, SignoffSpecification, Signoff, DEFAULT_SIGNOFF_SPEC) @@ -49,6 +49,20 @@ def get_group_info(include_arches=None): groups.extend(val.itervalues()) return sorted(groups, key=itemgetter('name', 'arch')) +def get_split_packages_info(): + '''Return info on split packages that do not have an actual package name + matching the split pkgbase.''' + pkgnames = Package.objects.values('pkgname') + split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude( + pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate( + last_update=Max('last_update')) + all_arches = Arch.objects.in_bulk(set(s['arch'] for s in split_pkgs)) + all_repos = Repo.objects.in_bulk(set(s['repo'] for s in split_pkgs)) + for split in split_pkgs: + split['arch'] = all_arches[split['arch']] + split['repo'] = all_repos[split['repo']] + return split_pkgs + class Difference(object): def __init__(self, pkgname, repo, pkg_a, pkg_b): self.pkgname = pkgname diff --git a/sitemaps.py b/sitemaps.py index 8ac5bc4f..7718002d 100644 --- a/sitemaps.py +++ b/sitemaps.py @@ -3,7 +3,7 @@ from main.models import Package from news.models import News -from packages.utils import get_group_info +from packages.utils import get_group_info, get_split_packages_info class PackagesSitemap(Sitemap): changefreq = "weekly" @@ -41,6 +41,21 @@ def location(self, obj): return '/groups/%s/%s/' % (obj['arch'], obj['name']) +class SplitPackagesSitemap(Sitemap): + changefreq = "weekly" + priority = "0.3" + + def items(self): + return get_split_packages_info() + + def lastmod(self, obj): + return obj['last_update'] + + def location(self, obj): + return '/packages/%s/%s/%s/' % ( + obj['repo'].name.lower(), obj['arch'], obj['pkgbase']) + + class NewsSitemap(Sitemap): changefreq = "never" priority = "0.8" diff --git a/urls.py b/urls.py index adbc8870..1d06f0f2 100644 --- a/urls.py +++ b/urls.py @@ -18,6 +18,7 @@ 'packages': sitemaps.PackagesSitemap, 'package-files': sitemaps.PackageFilesSitemap, 'package-groups': sitemaps.PackageGroupsSitemap, + 'split-packages': sitemaps.SplitPackagesSitemap, } admin.autodiscover() -- cgit v1.2.3-54-g00ecf From b7439cacf08288d8533910e08d093e9276167ed2 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Sun, 13 Nov 2011 12:39:50 -0600 Subject: Fix styling on new signoff list JS creation Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index 2b8e5d6d..4f098c7d 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -218,9 +218,9 @@ function signoff_package() { var cell = link.closest('td'); if (data.created) { signoff = $('<li>').addClass('signed-username').text(data.user); - var list = cell.children('ul'); + var list = cell.children('ul.signoff-list'); if (list.size() == 0) { - list = $('<ul>').prependTo(cell); + list = $('<ul class="signoff-list">').prependTo(cell); } list.append(signoff); } else if(data.user) { -- cgit v1.2.3-54-g00ecf From 12408702eaf89ea338670ba808da9ef49e35c562 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Mon, 14 Nov 2011 12:19:17 -0600 Subject: Allow population of signoff specs with SVN commit messages This pulls them from the latest SVN commit on trunk. We don't have a failproof method of getting the exact right commit, but this should be close if it is run on a regular basis via cron (aka hourly). Note that running locally, I needed the development version of South to get the migration included here to apply because of information_schema changes in the current version of MySQL. Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/management/commands/populate_signoffs.py | 89 +++++++++++ packages/management/commands/signoff_report.py | 2 +- ...11_auto__chg_field_signoffspecification_user.py | 165 +++++++++++++++++++++ packages/models.py | 2 +- packages/views/signoff.py | 5 + settings.py | 4 + templates/packages/signoffs.html | 2 +- 7 files changed, 266 insertions(+), 3 deletions(-) create mode 100644 packages/management/commands/populate_signoffs.py create mode 100644 packages/migrations/0011_auto__chg_field_signoffspecification_user.py diff --git a/packages/management/commands/populate_signoffs.py b/packages/management/commands/populate_signoffs.py new file mode 100644 index 00000000..5b5acbaf --- /dev/null +++ b/packages/management/commands/populate_signoffs.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +populate_signoffs command + +Pull the latest commit message from SVN for a given package that is +signoff-eligible and does not have an existing comment attached. + +Usage: ./manage.py populate_signoffs +""" + +from datetime import datetime +import logging +import subprocess +import sys +from xml.etree.ElementTree import XML + +from django.conf import settings +from django.contrib.auth.models import User +from django.core.management.base import NoArgsCommand + +from ...models import SignoffSpecification +from ...utils import get_signoff_groups + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s -> %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + stream=sys.stderr) +logger = logging.getLogger() + +class Command(NoArgsCommand): + help = "Pull the latest commit message from SVN for a given package that is signoff-eligible and does not have an existing comment attached" + + def handle_noargs(self, **options): + v = int(options.get('verbosity', None)) + if v == 0: + logger.level = logging.ERROR + elif v == 1: + logger.level = logging.INFO + elif v == 2: + logger.level = logging.DEBUG + + return add_signoff_comments() + +def svn_log(pkgbase, repo): + path = '%s%s/%s/trunk/' % (settings.SVN_BASE_URL, repo.svn_root, pkgbase) + cmd = ['svn', 'log', '--limit=1', '--xml', path] + log_data = subprocess.check_output(cmd) + # the XML format is very very simple, especially with only one revision + xml = XML(log_data) + revision = int(xml.find('logentry').get('revision')) + date = datetime.strptime(xml.findtext('logentry/date'), + '%Y-%m-%dT%H:%M:%S.%fZ') + return { + 'revision': revision, + 'date': date, + 'author': xml.findtext('logentry/author'), + 'message': xml.findtext('logentry/msg'), + } + +def create_specification(package, log): + trimmed_message = log['message'].strip() + spec = SignoffSpecification(pkgbase=package.pkgbase, + pkgver=package.pkgver, pkgrel=package.pkgrel, + epoch=package.epoch, arch=package.arch, repo=package.repo, + comments=trimmed_message) + try: + spec.user = User.objects.get(username=log['author']) + except User.DoesNotExist: + pass + + return spec + +def add_signoff_comments(): + logger.info("getting all signoff groups") + groups = get_signoff_groups() + logger.info("%d signoff groups found", len(groups)) + + for group in groups: + if not group.default_spec: + continue + + logger.debug("getting SVN log for %s (%s)", group.pkgbase, group.repo) + log = svn_log(group.pkgbase, group.repo) + logger.info("creating spec with SVN message for %s", group.pkgbase) + spec = create_specification(group.packages[0], log) + spec.save() + +# vim: set ts=4 sw=4 et: diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py index 3357bc1e..3b67f518 100644 --- a/packages/management/commands/signoff_report.py +++ b/packages/management/commands/signoff_report.py @@ -22,7 +22,7 @@ from operator import attrgetter import sys -from main.models import Package, Repo +from main.models import Repo from packages.models import Signoff from packages.utils import get_signoff_groups diff --git a/packages/migrations/0011_auto__chg_field_signoffspecification_user.py b/packages/migrations/0011_auto__chg_field_signoffspecification_user.py new file mode 100644 index 00000000..f6e3cdd9 --- /dev/null +++ b/packages/migrations/0011_auto__chg_field_signoffspecification_user.py @@ -0,0 +1,165 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.alter_column('packages_signoffspecification', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)) + + def backwards(self, orm): + db.alter_column('packages_signoffspecification', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['auth.User'])) + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'packages.conflict': { + 'Meta': {'ordering': "['name']", 'object_name': 'Conflict'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'conflicts'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.license': { + 'Meta': {'ordering': "['name']", 'object_name': 'License'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'licenses'", 'to': "orm['main.Package']"}) + }, + 'packages.packagegroup': { + 'Meta': {'object_name': 'PackageGroup'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Package']"}) + }, + 'packages.packagerelation': { + 'Meta': {'unique_together': "(('pkgbase', 'user', 'type'),)", 'object_name': 'PackageRelation'}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_relations'", 'to': "orm['auth.User']"}) + }, + 'packages.provision': { + 'Meta': {'ordering': "['name']", 'object_name': 'Provision'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'provides'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.replacement': { + 'Meta': {'ordering': "['name']", 'object_name': 'Replacement'}, + 'comparison': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replaces'", 'to': "orm['main.Package']"}), + 'version': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}) + }, + 'packages.signoff': { + 'Meta': {'object_name': 'Signoff'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_signoffs'", 'to': "orm['auth.User']"}) + }, + 'packages.signoffspecification': { + 'Meta': {'object_name': 'SignoffSpecification'}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Arch']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'known_bad': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Repo']"}), + 'required': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}) + } + } + + complete_apps = ['packages'] diff --git a/packages/models.py b/packages/models.py index b70c21bf..0d02ab31 100644 --- a/packages/models.py +++ b/packages/models.py @@ -73,7 +73,7 @@ class SignoffSpecification(models.Model): epoch = models.PositiveIntegerField(default=0) arch = models.ForeignKey('main.Arch') repo = models.ForeignKey('main.Repo') - user = models.ForeignKey(User) + user = models.ForeignKey(User, null=True) created = models.DateTimeField(editable=False) required = models.PositiveIntegerField(default=2, help_text="How many signoffs are required for this package?") diff --git a/packages/views/signoff.py b/packages/views/signoff.py index 26b6e710..e57b4d9a 100644 --- a/packages/views/signoff.py +++ b/packages/views/signoff.py @@ -98,7 +98,10 @@ def _signoff_options_all(request, name, repo): pkgver=package.pkgver, pkgrel=package.pkgrel, epoch=package.epoch, arch=package.arch, repo=package.repo) + + if spec.user is None: spec.user = request.user + form = SignoffOptionsForm(request.POST, instance=spec) if form.is_valid(): form.save() @@ -122,6 +125,8 @@ def signoff_options(request, name, repo, arch): spec = SignoffSpecification(pkgbase=package.pkgbase, pkgver=package.pkgver, pkgrel=package.pkgrel, epoch=package.epoch, arch=package.arch, repo=package.repo) + + if spec.user is None: spec.user = request.user if request.POST: diff --git a/settings.py b/settings.py index 51f9fcf6..80e024af 100644 --- a/settings.py +++ b/settings.py @@ -134,4 +134,8 @@ # URL to fetch a current list of available ISOs ISO_LIST_URL = 'http://releng.archlinux.org/isos/' +# URL for SVN access for fetching commit messages (note absence of packages or +# community bit on the end, repo.svn_root is appended) +SVN_BASE_URL = 'svn+ssh://svn.archlinux.org/srv/svn-' + # vim: set ts=4 sw=4 et: diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index f4511f75..bd84289c 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -71,7 +71,7 @@ <h3>Filter Displayed Signoffs</h3> {% if spec.required != 2 %}Required signoffs: {{ spec.required }}<br/>{% endif %} {% if not spec.enabled %}Signoffs are not currently enabled<br/>{% endif %} {% if spec.known_bad %}Package is known to be bad<br/>{% endif %} - {{ spec.comments|default:""|linebreaks }} + {{ spec.comments|default:""|linebreaksbr }} {% endwith %}{% endif %}</td> </tr> {% endfor %} -- cgit v1.2.3-54-g00ecf From 5b63c29fe1c37ce9d946adedeaf13f5ad94d144a Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 14:03:36 -0600 Subject: Show full names on developer user list pages The old display format doesn't really make sense. Also fix the invalid HTML generated by the PGP tag link- we need to escape using & inside the generated URLs. Signed-off-by: Dan McGee <dan@archlinux.org> --- main/templatetags/pgp.py | 2 +- media/archweb.css | 4 ++++ templates/public/developer_list.html | 2 +- templates/public/userlist.html | 2 -- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py index f875c11e..67f5e08d 100644 --- a/main/templatetags/pgp.py +++ b/main/templatetags/pgp.py @@ -21,7 +21,7 @@ def pgp_key_link(key_id): pgp_server = getattr(settings, 'PGP_SERVER', None) if not pgp_server: return format_key(key_id) - url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \ + url = 'http://%s/pks/lookup?op=vindex&fingerprint=on&exact=on&search=0x%s' % \ (pgp_server, key_id) values = (url, format_key(key_id), key_id[-8:]) return '<a href="%s" title="PGP key search for %s">0x%s</a>' % values diff --git a/media/archweb.css b/media/archweb.css index c5477422..f4bb92fa 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -802,6 +802,10 @@ div#arch-bio-toc { text-align: center; } + div#arch-bio-toc a { + white-space: nowrap; + } + table.arch-bio-entry td.pic { vertical-align: top; padding-right: 15px; diff --git a/templates/public/developer_list.html b/templates/public/developer_list.html index 2abbbfe4..0ac444e5 100644 --- a/templates/public/developer_list.html +++ b/templates/public/developer_list.html @@ -4,7 +4,7 @@ <p> {% for dev in dev_list %} <a href="#{{ dev.username }}" title="Jump to profile for {{ dev.get_full_name }}"> - {{ dev.first_name }}{{ dev.last_name.0|capfirst}}</a>    + {{ dev.first_name }} {{ dev.last_name }}</a>    {% endfor %} </p> </div> diff --git a/templates/public/userlist.html b/templates/public/userlist.html index c51215c3..0077f611 100644 --- a/templates/public/userlist.html +++ b/templates/public/userlist.html @@ -6,7 +6,6 @@ {% block content %} {% cache 600 dev-tu-profiles user_type %} <div id="dev-tu-profiles" class="box"> - <h2>Arch Linux {{user_type}}</h2> <p>{{description}}</p> @@ -14,7 +13,6 @@ <h2>Arch Linux {{user_type}}</h2> {% with users as dev_list %} {% include 'public/developer_list.html' %} {% endwith %} - </div> {% endcache %} {% endblock %} -- cgit v1.2.3-54-g00ecf From 84679f90a772e591dae2dea28194730d5453e6cf Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 14:37:46 -0600 Subject: Move certain settings above local settings import This ensures one can override them in local_settings.py if necesary. Signed-off-by: Dan McGee <dan@archlinux.org> --- settings.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/settings.py b/settings.py index 80e024af..77f84ba2 100644 --- a/settings.py +++ b/settings.py @@ -112,8 +112,16 @@ 'visualize', ) +## Server used for linking to PGP keysearch results PGP_SERVER = 'pgp.mit.edu:11371' +# URL to fetch a current list of available ISOs +ISO_LIST_URL = 'http://releng.archlinux.org/isos/' + +# URL for SVN access for fetching commit messages (note absence of packages or +# community bit on the end, repo.svn_root is appended) +SVN_BASE_URL = 'svn+ssh://svn.archlinux.org/srv/svn-' + ## Import local settings from local_settings import * @@ -131,11 +139,4 @@ INSTALLED_APPS = list(INSTALLED_APPS) + [ 'debug_toolbar' ] -# URL to fetch a current list of available ISOs -ISO_LIST_URL = 'http://releng.archlinux.org/isos/' - -# URL for SVN access for fetching commit messages (note absence of packages or -# community bit on the end, repo.svn_root is appended) -SVN_BASE_URL = 'svn+ssh://svn.archlinux.org/srv/svn-' - # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 0344f8ad564644c50203985255fab1d053aed463 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 15:04:33 -0600 Subject: Add ability to cache users by username on the UserFinder This is very useful in the signoff message population script where we are very likely to encounter the same users over and over. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/utils.py | 16 ++++++++++++++++ packages/management/commands/populate_signoffs.py | 13 ++++++------- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/devel/utils.py b/devel/utils.py index d7a154a8..62b12cd5 100644 --- a/devel/utils.py +++ b/devel/utils.py @@ -47,6 +47,7 @@ def get_annotated_maintainers(): class UserFinder(object): def __init__(self): self.cache = {} + self.username_cache = {} @staticmethod def user_email(name, email): @@ -111,7 +112,22 @@ def find(self, userstring): self.cache[userstring] = user return user + def find_by_username(self, username): + if not username: + return None + if username in self.username_cache: + return self.username_cache[username] + + try: + user = User.objects.get(username=username) + except User.DoesNotExist: + user = None + + self.username_cache[username] = user + return user + def clear_cache(self): self.cache = {} + self.username_cache = {} # vim: set ts=4 sw=4 et: diff --git a/packages/management/commands/populate_signoffs.py b/packages/management/commands/populate_signoffs.py index 5b5acbaf..ce5ec734 100644 --- a/packages/management/commands/populate_signoffs.py +++ b/packages/management/commands/populate_signoffs.py @@ -20,6 +20,7 @@ from ...models import SignoffSpecification from ...utils import get_signoff_groups +from devel.utils import UserFinder logging.basicConfig( level=logging.INFO, @@ -58,17 +59,13 @@ def svn_log(pkgbase, repo): 'message': xml.findtext('logentry/msg'), } -def create_specification(package, log): +def create_specification(package, log, finder): trimmed_message = log['message'].strip() spec = SignoffSpecification(pkgbase=package.pkgbase, pkgver=package.pkgver, pkgrel=package.pkgrel, epoch=package.epoch, arch=package.arch, repo=package.repo, comments=trimmed_message) - try: - spec.user = User.objects.get(username=log['author']) - except User.DoesNotExist: - pass - + spec.user = finder.find_by_username(log['author']) return spec def add_signoff_comments(): @@ -76,6 +73,8 @@ def add_signoff_comments(): groups = get_signoff_groups() logger.info("%d signoff groups found", len(groups)) + finder = UserFinder() + for group in groups: if not group.default_spec: continue @@ -83,7 +82,7 @@ def add_signoff_comments(): logger.debug("getting SVN log for %s (%s)", group.pkgbase, group.repo) log = svn_log(group.pkgbase, group.repo) logger.info("creating spec with SVN message for %s", group.pkgbase) - spec = create_specification(group.packages[0], log) + spec = create_specification(group.packages[0], log, finder) spec.save() # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 64f5799ef7fb76ed0e8759359b4ee8127e8903f5 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 15:15:08 -0600 Subject: Fix up empty table display on dev dashboard Fix the colspan for the existing tables, and add a notice for the new signoffs table which did not have one. Thanks-to: Andrea Scarpino <andrea@archlinux.org> Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/devel/index.html | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/templates/devel/index.html b/templates/devel/index.html index 06cf10ab..0f0ded38 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -33,7 +33,7 @@ <h3>My Flagged Packages</h3> <td>{{ pkg.last_update|date }}</td> </tr> {% empty %} - <tr class="empty"><td colspan="4"><em>No flagged packages to display</em></td></tr> + <tr class="empty"><td colspan="6"><em>No flagged packages to display</em></td></tr> {% endfor %} </tbody> </table> @@ -91,7 +91,7 @@ <h3>Package Todo Lists</h3> <td>{{ todo.incomplete_count }}</td> </tr> {% empty %} - <tr class="empty"><td colspan="3"><em>No package todo lists to display</em></td></tr> + <tr class="empty"><td colspan="6"><em>No package todo lists to display</em></td></tr> {% endfor %} </tbody> </table> @@ -133,6 +133,8 @@ <h3>Signoff Status</h3> {% endfor %} </ul></td> </tr> + {% empty %} + <tr class="empty"><td colspan="7"><em>No packages you maintain or have packaged need signoffs</em></td></tr> {% endfor %} </tbody> </table> -- cgit v1.2.3-54-g00ecf From 9550236a87fc65827e994bea108350a43d3f161f Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 21:26:57 -0600 Subject: Improve primary arch validation Ensure we can accept either a Arch object or an architecture name when passed to read_repo() by moving the validation there and being a bit more careful about typechecking and object lookup. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index a8e3219e..b4966834 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -51,8 +51,6 @@ class Command(BaseCommand): def handle(self, arch=None, filename=None, **options): if not arch: raise CommandError('Architecture is required.') - if not validate_arch(arch): - raise CommandError('Specified architecture %s is not currently known.' % arch) if not filename: raise CommandError('Package database file is required.') filename = os.path.normpath(filename) @@ -464,29 +462,38 @@ def parse_repo(repopath): logger.info("Finished repo parsing, %d total packages", len(pkgs)) return (reponame, pkgs.values()) -def validate_arch(archname): +def locate_arch(arch): "Check if arch is valid." - return Arch.objects.filter(name__iexact=archname).exists() + if isinstance(arch, Arch): + return arch + try: + return Arch.objects.get(name__iexact=arch) + except Arch.DoesNotExist: + raise CommandError( + 'Specified architecture %s is not currently known.' % arch) + def read_repo(primary_arch, repo_file, options): """ Parses repo.db.tar.gz file and returns exit status. """ + # always returns an Arch object, regardless of what is passed in + primary_arch = locate_arch(primary_arch) + repo, packages = parse_repo(repo_file) # group packages by arch -- to handle noarch stuff packages_arches = {} for arch in Arch.objects.filter(agnostic=True): packages_arches[arch.name] = [] - packages_arches[primary_arch] = [] + packages_arches[primary_arch.name] = [] for package in packages: if package.arch in packages_arches: packages_arches[package.arch].append(package) else: # we don't include mis-arched packages - logger.warning("Package %s arch = %s", - package.name, package.arch) + logger.warning("Package %s arch = %s", package.name, package.arch) del packages logger.info('Starting database updates.') -- cgit v1.2.3-54-g00ecf From 2a2df0074e39a797a0a4b5f7db7cfc9097301328 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 21:28:42 -0600 Subject: Add new reporead_inotify management command This is the new on-the-fly updates hotness. Rather than continue to schedule reporead to run once an hour in cron or however else you ran it, this command can be run once and left running, and will automagically pick up on any database file changes and run an import. It operates on the files databases only; this will keep both the packages and files always in sync and remove the delay in updating, especially helpful for new testing packages. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead_inotify.py | 188 ++++++++++++++++++++++++++ 1 file changed, 188 insertions(+) create mode 100755 devel/management/commands/reporead_inotify.py diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py new file mode 100755 index 00000000..135c0367 --- /dev/null +++ b/devel/management/commands/reporead_inotify.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +""" +reporead_inotify command + +Watches repo.files.tar.gz files for updates and parses them after a short delay +in order to catch all updates in a single bulk update. + +Usage: ./manage.py reporead_inotify [path_template] + +Where 'path_template' is an optional path_template for finding the +repo.files.tar.gz files. The form is '/srv/ftp/%(repo)s/os/%(arch)s/', which is +also the default template if none is specified. While 'repo' is not required to +be present in the path_template, note that 'arch' is so reporead can function +correctly. +""" + +import logging +import os.path +import pyinotify +import sys +import threading +import time + +from django.core.management.base import BaseCommand, CommandError + +from main.models import Arch, Repo +from .reporead import read_repo + +logging.basicConfig( + level=logging.WARNING, + format='%(asctime)s -> %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S', + stream=sys.stderr) +logger = logging.getLogger() + +class Command(BaseCommand): + help = "Watch database files and run an update when necessary." + args = "[path_template]" + + def handle(self, path_template=None, **options): + v = int(options.get('verbosity', 0)) + if v == 0: + logger.level = logging.ERROR + elif v == 1: + logger.level = logging.INFO + elif v == 2: + logger.level = logging.DEBUG + + if not path_template: + path_template = '/srv/ftp/%(repo)s/os/%(arch)s/' + self.path_template = path_template + + notifier = self.setup_notifier() + logger.info('Entering notifier loop') + notifier.loop() + + def setup_notifier(self): + '''Set up and configure the inotify machinery and logic. + This takes the provided or default path_template and builds a list of + directories we need to watch for database updates. It then validates + and passes these on to the various pyinotify pieces as necessary and + finally builds and returns a notifier object.''' + arches = Arch.objects.filter(agnostic=False) + repos = Repo.objects.all() + arch_path_map = dict((arch, None) for arch in arches) + all_paths = set() + total_paths = 0 + for arch in arches: + combos = ({ 'repo': repo.name.lower(), 'arch': arch.name } + for repo in repos) + # take a python format string and generate all unique combinations + # of directories from it; using set() ensures we filter it down + paths = set(self.path_template % values for values in combos) + total_paths += len(paths) + all_paths |= paths + arch_path_map[arch] = paths + + logger.info('Watching %d total paths', total_paths) + logger.debug(all_paths) + + # sanity check- basically ensure every path we created from the + # template mapped to only one architecture + if total_paths != len(all_paths): + raise CommandError('path template did not uniquely ' + 'determine architecture for each file') + + # A proper atomic replacement of the database as done by rsync is type + # IN_MOVED_TO. repo-add/remove will finish with a IN_CLOSE_WRITE. + mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO + + manager = pyinotify.WatchManager() + for name in all_paths: + manager.add_watch(name, mask) + + handler = EventHandler(arch_paths=arch_path_map) + return pyinotify.Notifier(manager, handler) + + +class Database(object): + '''A object representing a pacman database on the filesystem. It stores + various bits of metadata and state representing the file path, when we last + updated, how long our delay is before performing the update, whether we are + updating now, etc.''' + def __init__(self, arch, path, delay=60.0): + self.arch = arch + self.path = path + self.delay = delay + self.mtime = None + self.last_import = None + self.update_thread = None + self.updating = False + self.run_again = False + self.lock = threading.Lock() + + def _start_update_countdown(self): + self.update_thread = threading.Timer(self.delay, self.update) + logger.info('Starting %.1f second countdown to update %s', + self.delay, self.path) + self.update_thread.start() + + def queue_for_update(self, mtime): + logger.debug('Queueing database %s...', self.path) + with self.lock: + self.mtime = mtime + if self.updating: + # store the fact that we will need to run it again + self.run_again = True + return + if self.update_thread: + self.update_thread.cancel() + self._start_update_countdown() + + def update(self): + logger.debug('Updating database %s...', self.path) + with self.lock: + self.last_import = time.time() + self.updating = True + + try: + # invoke reporead's primary method + read_repo(self.arch, self.path, {}) + finally: + logger.debug('Done updating database %s.', self.path) + with self.lock: + self.update_thread = None + self.updating = False + if self.run_again: + self.run_again = False + self._start_update_countdown() + + +class EventHandler(pyinotify.ProcessEvent): + '''Our main event handler which listens for database change events. Because + we are watching the whole directory, we filter down and only look at those + events dealing with files databases.''' + + def my_init(self, **kwargs): + self.databases = {} + self.arch_lookup = {} + + # we really want a single path to arch mapping, so massage the data + arch_paths = kwargs['arch_paths'] + for arch, paths in arch_paths.items(): + self.arch_lookup.update((path.rstrip('/'), arch) for path in paths) + + def process_default(self, event): + '''Primary event processing function which kicks off reporead timer + threads if a files database was updated.''' + if not event.name: + return + # screen to only the files we care about + if event.name.endswith('.files.tar.gz'): + path = event.pathname + stat = os.stat(path) + database = self.databases.get(path, None) + if database is None: + arch = self.arch_lookup.get(event.path, None) + if arch is None: + logger.warning( + 'Could not determine arch for %s, skipping update', + path) + return + database = Database(arch, path) + self.databases[path] = database + database.queue_for_update(stat.st_mtime) + + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From d3b36e1ce992a8b70f4fe8fe9e8df74e835fb865 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 22:03:01 -0600 Subject: reporead_inotify: cancel threads that haven't started yet on shutdown Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead_inotify.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index 135c0367..4c865ce1 100755 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -54,6 +54,11 @@ def handle(self, path_template=None, **options): logger.info('Entering notifier loop') notifier.loop() + logger.info('Cancelling remaining threads...') + for thread in threading.enumerate(): + if hasattr(thread, 'cancel'): + thread.cancel() + def setup_notifier(self): '''Set up and configure the inotify machinery and logic. This takes the provided or default path_template and builds a list of -- cgit v1.2.3-54-g00ecf From 79dde0389d5f185fc71e2ac37e1bc9fc2c9b3af7 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 22:05:34 -0600 Subject: Update requirements.txt Signed-off-by: Dan McGee <dan@archlinux.org> --- requirements.txt | 2 +- requirements_prod.txt | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 27fda229..fd58616e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ Django==1.3.1 Markdown==2.0.3 South==0.7.3 -pytz>=2011c +pytz>=2011n diff --git a/requirements_prod.txt b/requirements_prod.txt index 9749a072..47d37ce2 100644 --- a/requirements_prod.txt +++ b/requirements_prod.txt @@ -2,5 +2,6 @@ Django==1.3.1 Markdown==2.0.3 MySQL-python==1.2.3 South==0.7.3 +pyinotify==0.9.2 python-memcached==1.47 -pytz>=2011c +pytz>=2011n -- cgit v1.2.3-54-g00ecf From c00e7e84045613ee2aa80f66b9972db971ab3f26 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Tue, 15 Nov 2011 23:43:42 -0600 Subject: reporead: clean up some debug logging Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index b4966834..45229524 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -36,6 +36,8 @@ format='%(asctime)s -> %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S', stream=sys.stderr) +TRACE = 5 +logging.addLevelName(TRACE, 'TRACE') logger = logging.getLogger() class Command(BaseCommand): @@ -368,7 +370,7 @@ def db_update(archname, reponame, pkgs, options): # packages in both database and in syncdb (update in database) pkg_in_both = syncset & dbset for p in [x for x in pkgs if x.name in pkg_in_both]: - logger.debug("Looking for package updates") + logger.debug("Checking package %s", p.name) dbp = dbdict[p.name] timestamp = None # for a force, we don't want to update the timestamp. @@ -406,7 +408,7 @@ def parse_info(iofile): continue elif line.startswith('%') and line.endswith('%'): blockname = line[1:-1].lower() - logger.debug("Parsing package block %s", blockname) + logger.log(TRACE, "Parsing package block %s", blockname) store[blockname] = [] elif blockname: store[blockname].append(line) @@ -456,7 +458,7 @@ def parse_repo(repopath): tarinfo.name) data_file.close() - logger.debug("Done parsing file %s", fname) + logger.debug("Done parsing file %s/%s", pkgid, fname) repodb.close() logger.info("Finished repo parsing, %d total packages", len(pkgs)) -- cgit v1.2.3-54-g00ecf From 404c4b400b2bd2a14e0363e33d66505c51903fe7 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 16 Nov 2011 12:48:36 -0600 Subject: reporead: a few small tweaks Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 45229524..ad76db4d 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -450,13 +450,14 @@ def parse_repo(repopath): continue data_file = repodb.extractfile(tarinfo) data_file = io.TextIOWrapper(io.BytesIO(data_file.read()), - encoding='utf=8') + encoding='UTF-8') try: pkgs[pkgid].populate(parse_info(data_file)) except UnicodeDecodeError: logger.warn("Could not correctly decode %s, skipping file", tarinfo.name) data_file.close() + del data_file logger.debug("Done parsing file %s/%s", pkgid, fname) @@ -498,10 +499,10 @@ def read_repo(primary_arch, repo_file, options): logger.warning("Package %s arch = %s", package.name, package.arch) del packages - logger.info('Starting database updates.') + logger.info('Starting database updates for %s.', repo_file) for arch in sorted(packages_arches.keys()): db_update(arch, repo, packages_arches[arch], options) - logger.info('Finished database updates.') + logger.info('Finished database updates for %s.', repo_file) return 0 # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From b1e406d73844d5a30344ca8ac855fe850c52bc2f Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 16 Nov 2011 12:49:17 -0600 Subject: reporead_inotify: spin up read_repo() in separate thread This prevents memory usage from ballooning to absolutely huge values, such as when multiple threads kick off at the same time. The bulk of our memory allocation obviously comes in these threads and not the main threads, so being able to isolate them in processes helps a lot. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead_inotify.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index 4c865ce1..ffd49b8f 100755 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -15,6 +15,7 @@ """ import logging +import multiprocessing import os.path import pyinotify import sys @@ -133,6 +134,7 @@ def queue_for_update(self, mtime): return if self.update_thread: self.update_thread.cancel() + self.update_thread = None self._start_update_countdown() def update(self): @@ -142,8 +144,13 @@ def update(self): self.updating = True try: - # invoke reporead's primary method - read_repo(self.arch, self.path, {}) + # invoke reporead's primary method. we do this in a separate + # process for memory conservation purposes; these processes grow + # rather large so it is best to free up the memory ASAP. + process = multiprocessing.Process(target=read_repo, + args=[self.arch, self.path, {}]) + process.start() + process.join() finally: logger.debug('Done updating database %s.', self.path) with self.lock: -- cgit v1.2.3-54-g00ecf From aa20c798ca8af365b2549591700e932a74d068b8 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 16 Nov 2011 13:02:35 -0600 Subject: reporead_inotify: close connection once we are done with it This prevents an otherwise idle connection from sitting around and being totally useless. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead_inotify.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index ffd49b8f..acb53a54 100755 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -23,6 +23,7 @@ import time from django.core.management.base import BaseCommand, CommandError +from django.db import connection from main.models import Arch, Repo from .reporead import read_repo @@ -90,6 +91,11 @@ def setup_notifier(self): raise CommandError('path template did not uniquely ' 'determine architecture for each file') + # this thread is done using the database; all future access is done in + # the spawned read_repo() processes, so close the otherwise completely + # idle connection. + connection.close() + # A proper atomic replacement of the database as done by rsync is type # IN_MOVED_TO. repo-add/remove will finish with a IN_CLOSE_WRITE. mask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO -- cgit v1.2.3-54-g00ecf From 9d2fdbe5bc6a0d9ab2907b377056851fc5eb56c3 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 17 Nov 2011 10:17:55 -0600 Subject: reporead_inotify: nice the spawned subprocesses This prevents the reporead job from taking over time from more important processes; this is not a rush task. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead_inotify.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/devel/management/commands/reporead_inotify.py b/devel/management/commands/reporead_inotify.py index acb53a54..c74762eb 100755 --- a/devel/management/commands/reporead_inotify.py +++ b/devel/management/commands/reporead_inotify.py @@ -16,7 +16,7 @@ import logging import multiprocessing -import os.path +import os import pyinotify import sys import threading @@ -113,10 +113,11 @@ class Database(object): various bits of metadata and state representing the file path, when we last updated, how long our delay is before performing the update, whether we are updating now, etc.''' - def __init__(self, arch, path, delay=60.0): + def __init__(self, arch, path, delay=60.0, nice=3): self.arch = arch self.path = path self.delay = delay + self.nice = nice self.mtime = None self.last_import = None self.update_thread = None @@ -153,8 +154,12 @@ def update(self): # invoke reporead's primary method. we do this in a separate # process for memory conservation purposes; these processes grow # rather large so it is best to free up the memory ASAP. - process = multiprocessing.Process(target=read_repo, - args=[self.arch, self.path, {}]) + def run(): + if self.nice != 0: + os.nice(self.nice) + read_repo(self.arch, self.path, {}) + + process = multiprocessing.Process(target=run) process.start() process.join() finally: -- cgit v1.2.3-54-g00ecf From a9819e3d715ce3e5c20c9665db9a6100f06ab562 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 17 Nov 2011 12:34:12 -0600 Subject: Ensure reporead is protected against simultaneous runs This adds a bunch of transaction magic and SELECT FOR UPDATE stuff to reporead to cope with the now-concurrent runs of reporead we get when invoked from our inotify-based updater. The collision occurs with 'any' architecture packages as both repo databases contain the new version, and the updates occur at exactly the same time. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 206 +++++++++++++------------ main/migrations/0055_unique_package_in_repo.py | 155 +++++++++++++++++++ main/models.py | 3 +- 3 files changed, 263 insertions(+), 101 deletions(-) create mode 100644 main/migrations/0055_unique_package_in_repo.py diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index ad76db4d..b6bd8457 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -13,10 +13,6 @@ ./manage.py reporead i686 /tmp/core.db.tar.gz """ -from django.core.management.base import BaseCommand, CommandError -from django.contrib.auth.models import User -from django.db import transaction - from collections import defaultdict import io import os @@ -27,6 +23,11 @@ from datetime import datetime from optparse import make_option +from django.core.management.base import BaseCommand, CommandError +from django.contrib.auth.models import User +from django.db import connections, router, transaction +from django.db.utils import IntegrityError + from devel.utils import UserFinder from main.models import Arch, Package, PackageDepend, PackageFile, Repo from packages.models import Conflict, Provision, Replacement @@ -189,8 +190,6 @@ def create_multivalued(dbpkg, repopkg, db_attr, repo_attr): finder = UserFinder() def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): - db_score = 1 - if repopkg.base: dbpkg.pkgbase = repopkg.base else: @@ -214,7 +213,7 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): dbpkg.last_update = timestamp dbpkg.save() - db_score += populate_files(dbpkg, repopkg, force=force) + populate_files(dbpkg, repopkg, force=force) dbpkg.packagedepend_set.all().delete() for y in repopkg.depends: @@ -235,28 +234,23 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None): create_multivalued(dbpkg, repopkg, 'groups', 'groups') create_multivalued(dbpkg, repopkg, 'licenses', 'license') - related_score = (len(repopkg.depends) + len(repopkg.optdepends) - + len(repopkg.conflicts) + len(repopkg.provides) - + len(repopkg.replaces) + len(repopkg.groups) - + len(repopkg.license)) - if related_score: - db_score += (related_score / 20) + 1 - return db_score +pkg_same_version = lambda pkg, dbpkg: pkg.ver == dbpkg.pkgver \ + and pkg.rel == dbpkg.pkgrel and pkg.epoch == dbpkg.epoch def populate_files(dbpkg, repopkg, force=False): if not force: - if dbpkg.pkgver != repopkg.ver or dbpkg.pkgrel != repopkg.rel \ - or dbpkg.epoch != repopkg.epoch: + if not pkg_same_version(repopkg, dbpkg): logger.info("DB version (%s) didn't match repo version " "(%s) for package %s, skipping file list addition", dbpkg.full_version, repopkg.full_version, dbpkg.pkgname) - return 0 + return if not dbpkg.files_last_update or not dbpkg.last_update: pass elif dbpkg.files_last_update > dbpkg.last_update: - return 0 + return + # only delete files if we are reading a DB that contains them if repopkg.has_files: dbpkg.packagefile_set.all().delete() @@ -275,30 +269,19 @@ def populate_files(dbpkg, repopkg, force=False): pkgfile.save(force_insert=True) dbpkg.files_last_update = datetime.utcnow() dbpkg.save() - return (len(repopkg.files) / 50) + 1 - return 0 - -class Batcher(object): - def __init__(self, threshold, start=0): - self.threshold = threshold - self.meter = start - def batch_commit(self, score): - """ - Track updates to the database and perform a commit if the batch - becomes sufficiently large. "Large" is defined by waiting for the - sum of scores to exceed the arbitrary threshold value; once it is - hit a commit is issued. - """ - self.meter += score - if self.meter > self.threshold: - logger.debug("Committing transaction, batch threshold hit") - transaction.commit() - self.meter = 0 +def select_pkg_for_update(dbpkg): + database = router.db_for_write(Package, instance=dbpkg) + connection = connections[database] + if 'sqlite' in connection.settings_dict['ENGINE'].lower(): + return dbpkg + new_pkg = Package.objects.raw( + 'SELECT * FROM packages WHERE id = %s FOR UPDATE', + [dbpkg.id]) + return list(new_pkg)[0] -@transaction.commit_on_success def db_update(archname, reponame, pkgs, options): """ Parses a list and updates the Arch dev database accordingly. @@ -310,88 +293,111 @@ def db_update(archname, reponame, pkgs, options): logger.info('Updating Arch: %s', archname) force = options.get('force', False) filesonly = options.get('filesonly', False) - repository = Repo.objects.get(name__iexact=reponame) - architecture = Arch.objects.get(name__iexact=archname) - # no-arg order_by() removes even the default ordering; we don't need it - dbpkgs = Package.objects.filter( - arch=architecture, repo=repository).order_by() - # This makes our inner loop where we find packages by name *way* more - # efficient by not having to go to the database for each package to - # SELECT them by name. - dbdict = dict([(pkg.pkgname, pkg) for pkg in dbpkgs]) - - logger.debug("Creating sets") - dbset = set(dbdict.keys()) - syncset = set([pkg.name for pkg in pkgs]) - logger.info("%d packages in current web DB", len(dbset)) - logger.info("%d packages in new updating db", len(syncset)) - in_sync_not_db = syncset - dbset - logger.info("%d packages in sync not db", len(in_sync_not_db)) - - # Try to catch those random package deletions that make Eric so unhappy. - if len(dbset): - dbpercent = 100.0 * len(syncset) / len(dbset) - else: - dbpercent = 0.0 - logger.info("DB package ratio: %.1f%%", dbpercent) - - # Fewer than 20 packages makes the percentage check unreliable, but it also - # means we expect the repo to fluctuate a lot. - msg = "Package database has %.1f%% the number of packages in the " \ - "web database" % dbpercent - if len(dbset) == 0 and len(syncset) == 0: - pass - elif not filesonly and \ - len(dbset) > 20 and dbpercent < 50.0 and \ - not repository.testing and not repository.staging: - logger.error(msg) - raise Exception(msg) - elif dbpercent < 75.0: - logger.warning(msg) - - batcher = Batcher(100) + + with transaction.commit_manually(): + repository = Repo.objects.get(name__iexact=reponame) + architecture = Arch.objects.get(name__iexact=archname) + # no-arg order_by() removes even the default ordering; we don't need it + dbpkgs = Package.objects.filter( + arch=architecture, repo=repository).order_by() + # This makes our inner loop where we find packages by name *way* more + # efficient by not having to go to the database for each package to + # SELECT them by name. + dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) + + logger.debug("Creating sets") + dbset = set(dbdict.keys()) + syncset = set([pkg.name for pkg in pkgs]) + logger.info("%d packages in current web DB", len(dbset)) + logger.info("%d packages in new updating db", len(syncset)) + in_sync_not_db = syncset - dbset + logger.info("%d packages in sync not db", len(in_sync_not_db)) + + # Try to catch those random package deletions that make Eric so unhappy. + if len(dbset): + dbpercent = 100.0 * len(syncset) / len(dbset) + else: + dbpercent = 0.0 + logger.info("DB package ratio: %.1f%%", dbpercent) + + # Fewer than 20 packages makes the percentage check unreliable, but it also + # means we expect the repo to fluctuate a lot. + msg = "Package database has %.1f%% the number of packages in the " \ + "web database" % dbpercent + if len(dbset) == 0 and len(syncset) == 0: + pass + elif not filesonly and \ + len(dbset) > 20 and dbpercent < 50.0 and \ + not repository.testing and not repository.staging: + logger.error(msg) + raise Exception(msg) + elif dbpercent < 75.0: + logger.warning(msg) + + # If isolation level is repeatable-read, we need to ensure each package + # update starts a new transaction and re-queries the database as necessary + # to guard against simultaneous updates + transaction.commit() if not filesonly: # packages in syncdb and not in database (add to database) - for p in [x for x in pkgs if x.name in in_sync_not_db]: - logger.info("Adding package %s", p.name) - pkg = Package(pkgname=p.name, arch=architecture, repo=repository) - score = populate_pkg(pkg, p, timestamp=datetime.utcnow()) - batcher.batch_commit(score) + for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db): + logger.info("Adding package %s", pkg.name) + dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository) + try: + with transaction.commit_on_success(): + populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow()) + except IntegrityError: + logger.warning("Could not add package %s; " + "not fatal if another thread beat us to it.", + pkg.name, exc_info=True) # packages in database and not in syncdb (remove from database) - in_db_not_sync = dbset - syncset - for p in in_db_not_sync: - logger.info("Removing package %s", p) - dbp = dbdict[p] - dbp.delete() - batcher.batch_commit(1) + for pkgname in (dbset - syncset): + logger.info("Removing package %s", pkgname) + dbpkg = dbdict[pkgname] + with transaction.commit_on_success(): + # no race condition here as long as simultaneous threads both + # issue deletes; second delete will be a no-op + dbpkg.delete() # packages in both database and in syncdb (update in database) pkg_in_both = syncset & dbset - for p in [x for x in pkgs if x.name in pkg_in_both]: - logger.debug("Checking package %s", p.name) - dbp = dbdict[p.name] + for pkg in (x for x in pkgs if x.name in pkg_in_both): + logger.debug("Checking package %s", pkg.name) + dbpkg = dbdict[pkg.name] timestamp = None # for a force, we don't want to update the timestamp. # for a non-force, we don't want to do anything at all. if filesonly: pass - elif p.ver == dbp.pkgver and p.rel == dbp.pkgrel \ - and p.epoch == dbp.epoch: + elif pkg_same_version(pkg, dbpkg): if not force: continue else: timestamp = datetime.utcnow() + # The odd select_for_update song and dance here are to ensure + # simultaneous updates don't happen on a package, causing + # files/depends/all related items to be double-imported. if filesonly: - logger.debug("Checking files for package %s", p.name) - score = populate_files(dbp, p, force=force) + with transaction.commit_on_success(): + # TODO Django 1.4 select_for_update() will work once released + dbpkg = select_pkg_for_update(dbpkg) + if pkg_same_version(pkg, dbpkg): + logger.debug("Package %s was already updated", pkg.name) + continue + logger.debug("Checking files for package %s", pkg.name) + populate_files(dbpkg, pkg, force=force) else: - logger.info("Updating package %s", p.name) - score = populate_pkg(dbp, p, force=force, timestamp=timestamp) - - batcher.batch_commit(score) + with transaction.commit_on_success(): + # TODO Django 1.4 select_for_update() will work once released + dbpkg = select_pkg_for_update(dbpkg) + if pkg_same_version(pkg, dbpkg): + logger.debug("Package %s was already updated", pkg.name) + continue + logger.info("Updating package %s", pkg.name) + populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp) logger.info('Finished updating Arch: %s', archname) diff --git a/main/migrations/0055_unique_package_in_repo.py b/main/migrations/0055_unique_package_in_repo.py new file mode 100644 index 00000000..63951a08 --- /dev/null +++ b/main/migrations/0055_unique_package_in_repo.py @@ -0,0 +1,155 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.delete_index('packages', ['pkgname']) + db.create_unique('packages', ['pkgname', 'repo_id', 'arch_id']) + + def backwards(self, orm): + db.delete_unique('packages', ['pkgname', 'repo_id', 'arch_id']) + db.create_index('packages', ['pkgname']) + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index caf36be0..cad4f9fc 100644 --- a/main/models.py +++ b/main/models.py @@ -159,7 +159,7 @@ class Package(models.Model): on_delete=models.PROTECT) arch = models.ForeignKey(Arch, related_name="packages", on_delete=models.PROTECT) - pkgname = models.CharField(max_length=255, db_index=True) + pkgname = models.CharField(max_length=255) pkgbase = models.CharField(max_length=255, db_index=True) pkgver = models.CharField(max_length=255) pkgrel = models.CharField(max_length=255) @@ -184,6 +184,7 @@ class Meta: db_table = 'packages' ordering = ('pkgname',) get_latest_by = 'last_update' + unique_together = (('pkgname', 'repo', 'arch'),) def __unicode__(self): return self.pkgname -- cgit v1.2.3-54-g00ecf From 3a0e55a1de6fb42aaa0deece68cccba8ebb83cc8 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 17 Nov 2011 12:50:14 -0600 Subject: Change package description to a text field No need to have length restrictions on this. Although long descriptions are frowned upon, we shouldn't truncate them if someone really wants one. Signed-off-by: Dan McGee <dan@archlinux.org> --- .../0056_auto__chg_field_package_pkgdesc.py | 153 +++++++++++++++++++++ main/models.py | 2 +- 2 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 main/migrations/0056_auto__chg_field_package_pkgdesc.py diff --git a/main/migrations/0056_auto__chg_field_package_pkgdesc.py b/main/migrations/0056_auto__chg_field_package_pkgdesc.py new file mode 100644 index 00000000..21dd43af --- /dev/null +++ b/main/migrations/0056_auto__chg_field_package_pkgdesc.py @@ -0,0 +1,153 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.alter_column('packages', 'pkgdesc', self.gf('django.db.models.fields.TextField')(null=True)) + + def backwards(self, orm): + db.alter_column('packages', 'pkgdesc', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)) + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index cad4f9fc..b37468f9 100644 --- a/main/models.py +++ b/main/models.py @@ -164,7 +164,7 @@ class Package(models.Model): pkgver = models.CharField(max_length=255) pkgrel = models.CharField(max_length=255) epoch = models.PositiveIntegerField(default=0) - pkgdesc = models.CharField(max_length=255, null=True) + pkgdesc = models.TextField(null=True) url = models.CharField(max_length=255, null=True) filename = models.CharField(max_length=255) compressed_size = PositiveBigIntegerField() -- cgit v1.2.3-54-g00ecf From f43a33ed8696d7bcb987d4878c6411c5d16846d6 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 17 Nov 2011 13:32:42 -0600 Subject: Display package URLs unquoted if possible MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Example: kbd-ru-keymaps. Before: http://wiki.archlinux.org/index.php/%D0%98%D0%BD%D1%82%D0%B5%D1%80%D0%BD%D0%B0%D1%86%D0%B8%D0%BE%D0%BD%D0%B0%D0%BB%D0%B8%D0%B7%D0%B0%D1%86%D0%B8%D1%8F After: http://wiki.archlinux.org/index.php/Интернационализация Signed-off-by: Dan McGee <dan@archlinux.org> --- packages/templatetags/package_extras.py | 13 ++++++++++++- templates/packages/details.html | 2 +- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 01bf7510..67c7fbbc 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -1,4 +1,4 @@ -from urllib import urlencode, quote as urlquote +from urllib import urlencode, quote as urlquote, unquote try: from urlparse import parse_qs except ImportError: @@ -13,6 +13,17 @@ def link_encode(url, query, doseq=False): data = urlencode(query, doseq).replace('&', '&') return "%s?%s" % (url, data) +@register.filter +def url_unquote(original_url): + try: + url = original_url + if isinstance(url, unicode): + url = url.encode('ascii') + url = unquote(url).decode('utf-8') + return url + except UnicodeError: + return original_url + class BuildQueryStringNode(template.Node): def __init__(self, sortfield): self.sortfield = sortfield diff --git a/templates/packages/details.html b/templates/packages/details.html index 2998592f..a9908012 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -108,7 +108,7 @@ <h4>Versions Elsewhere</h4> </tr><tr> <th>Upstream URL:</th> <td>{% if pkg.url %}<a href="{{ pkg.url }}" - title="Visit the website for {{ pkg.pkgname }}">{{ pkg.url }}</a>{% endif %}</td> + title="Visit the website for {{ pkg.pkgname }}">{{ pkg.url|url_unquote }}</a>{% endif %}</td> </tr><tr> <th>License(s):</th> <td>{{ pkg.licenses.all|join:", " }}</td> -- cgit v1.2.3-54-g00ecf From 2cb4f97bb235217d6e56deded1444f5e84f08b71 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Thu, 17 Nov 2011 13:36:27 -0600 Subject: reporead: don't trim pkgdesc length Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index b6bd8457..cf101d97 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -73,7 +73,7 @@ def handle(self, arch=None, filename=None, **options): class Pkg(object): """An interim 'container' object for holding Arch package data.""" - bare = ( 'name', 'base', 'arch', 'desc', 'filename', + bare = ( 'name', 'base', 'arch', 'filename', 'md5sum', 'sha256sum', 'url', 'packager' ) number = ( 'csize', 'isize' ) collections = ( 'depends', 'optdepends', 'conflicts', @@ -101,8 +101,8 @@ def populate(self, values): setattr(self, k, v[0][:254]) elif k in self.number: setattr(self, k, long(v[0])) - elif k == 'pgpsig': - # do NOT prune this value at all + elif k in ('desc', 'pgpsig'): + # do NOT prune these values at all setattr(self, k, v[0]) elif k == 'version': match = self.version_re.match(v[0]) -- cgit v1.2.3-54-g00ecf From 85657db05d7f65604340699cfcb9967c9e81a0ef Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Mon, 21 Nov 2011 10:08:23 -0600 Subject: Better support for non-latin full names Add a 'latin_name' field to the user profile so we can better support those developers with names in non-Latin scripts, and yet still show a Latin name as necessary on the developer profile page. This field only shows up if populated. Also, use consistent sorting everywhere- rather than using username, always use first_name and last_name fields. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/views.py | 2 +- .../0057_auto__add_field_userprofile_latin_name.py | 153 +++++++++++++++++++++ main/models.py | 2 + packages/views/search.py | 3 +- public/views.py | 6 +- templates/devel/clock.html | 2 +- templates/public/developer_list.html | 2 +- 7 files changed, 164 insertions(+), 6 deletions(-) create mode 100644 main/migrations/0057_auto__add_field_userprofile_latin_name.py diff --git a/devel/views.py b/devel/views.py index 0002df04..08b19cd7 100644 --- a/devel/views.py +++ b/devel/views.py @@ -83,7 +83,7 @@ def index(request): @never_cache def clock(request): devs = User.objects.filter(is_active=True).order_by( - 'username').select_related('userprofile') + 'first_name', 'last_name').select_related('userprofile') now = datetime.now() utc_now = datetime.utcnow().replace(tzinfo=pytz.utc) diff --git a/main/migrations/0057_auto__add_field_userprofile_latin_name.py b/main/migrations/0057_auto__add_field_userprofile_latin_name.py new file mode 100644 index 00000000..ffde1885 --- /dev/null +++ b/main/migrations/0057_auto__add_field_userprofile_latin_name.py @@ -0,0 +1,153 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.add_column('user_profiles', 'latin_name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True), keep_default=False) + + def backwards(self, orm): + db.delete_column('user_profiles', 'latin_name') + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'main.arch': { + 'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"}, + 'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) + }, + 'main.donor': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Donor', 'db_table': "'donors'"}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}) + }, + 'main.package': { + 'Meta': {'ordering': "('pkgname',)", 'unique_together': "(('pkgname', 'repo', 'arch'),)", 'object_name': 'Package', 'db_table': "'packages'"}, + 'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}), + 'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'compressed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'installed_size': ('main.models.PositiveBigIntegerField', [], {}), + 'last_update': ('django.db.models.fields.DateTimeField', [], {}), + 'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), + 'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'pkgdesc': ('django.db.models.fields.TextField', [], {'null': 'True'}), + 'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}) + }, + 'main.packagedepend': { + 'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"}, + 'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), + 'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.packagefile': { + 'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"}, + 'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.repo': { + 'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"}, + 'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), + 'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'main.todolist': { + 'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"}, + 'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), + 'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'main.todolistpkg': { + 'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"}, + 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}), + 'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"}) + }, + 'main.userprofile': { + 'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"}, + 'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}), + 'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'latin_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), + 'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'pgp_key': ('main.models.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}), + 'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}), + 'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), + 'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) + } + } + + complete_apps = ['main'] diff --git a/main/models.py b/main/models.py index b37468f9..d7780b91 100644 --- a/main/models.py +++ b/main/models.py @@ -77,6 +77,8 @@ class UserProfile(models.Model): help_text="Ideally 125px by 125px") user = models.OneToOneField(User, related_name='userprofile') allowed_repos = models.ManyToManyField('Repo', blank=True) + latin_name = models.CharField(max_length=255, null=True, blank=True, + help_text="Latin-form name; used only for non-Latin full names") class Meta: db_table = 'user_profiles' diff --git a/packages/views/search.py b/packages/views/search.py index 57481614..65fcddb3 100644 --- a/packages/views/search.py +++ b/packages/views/search.py @@ -60,7 +60,8 @@ def __init__(self, *args, **kwargs): self.fields['arch'].choices = make_choice( [arch.name for arch in Arch.objects.all()]) self.fields['q'].widget.attrs.update({"size": "30"}) - maints = User.objects.filter(is_active=True).order_by('username') + maints = User.objects.filter(is_active=True).order_by( + 'first_name', 'last_name') self.fields['maintainer'].choices = \ [('', 'All'), ('orphan', 'Orphan')] + \ [(m.username, m.get_full_name()) for m in maints] diff --git a/public/views.py b/public/views.py index 14dd6353..c28fd303 100644 --- a/public/views.py +++ b/public/views.py @@ -34,13 +34,15 @@ def index(request): } def userlist(request, user_type='devs'): - users = User.objects.order_by('username').select_related('userprofile') + users = User.objects.order_by( + 'first_name', 'last_name').select_related('userprofile') if user_type == 'devs': users = users.filter(is_active=True, groups__name="Developers") elif user_type == 'tus': users = users.filter(is_active=True, groups__name="Trusted Users") elif user_type == 'fellows': - users = users.filter(is_active=False, groups__name__in=["Developers", "Trusted Users"]) + users = users.filter(is_active=False, + groups__name__in=["Developers", "Trusted Users"]) else: raise Http404 diff --git a/templates/devel/clock.html b/templates/devel/clock.html index 0f0e20c5..d2eb0a8d 100644 --- a/templates/devel/clock.html +++ b/templates/devel/clock.html @@ -45,7 +45,7 @@ <h2>Developer World Clocks</h2> <script type="text/javascript"> $(document).ready(function() { $("#clocks-table:has(tbody tr)").tablesorter( - {widgets: ['zebra'], sortList: [[1,0]]}); + {widgets: ['zebra'], sortList: [[0,0]]}); }); </script> {% endblock %} diff --git a/templates/public/developer_list.html b/templates/public/developer_list.html index 0ac444e5..5aa4c6b2 100644 --- a/templates/public/developer_list.html +++ b/templates/public/developer_list.html @@ -21,7 +21,7 @@ <table class="bio bio-{{ dev.username }}" cellspacing="0"> <tr> <th>Name:</th> - <td>{{ dev.get_full_name }}</td> + <td>{{ dev.get_full_name }}{% if prof.latin_name %} ({{ prof.latin_name}}){% endif %}</td> </tr><tr> <th>Alias:</th> <td>{{ prof.alias }}</td> -- cgit v1.2.3-54-g00ecf From 0be32f0a388267e54d9ab88e64391f355dafdfb5 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Sat, 26 Nov 2011 13:25:12 -0500 Subject: A bunch of whitespace or .example/whatever changes to reduce stupid differences between us and upstream archweb --- TODO | 4 + local_settings.py.example | 59 +++ media/archnavbar/archnavbar.css | 9 +- media/archweb.css | 972 ++++++++++++++++++++++++++++++++------- public/views.py | 6 +- templates/packages/flaghelp.html | 3 +- templates/packages/search.html | 11 +- templates/releng/add.html | 22 +- 8 files changed, 885 insertions(+), 201 deletions(-) create mode 100644 TODO create mode 100644 local_settings.py.example diff --git a/TODO b/TODO new file mode 100644 index 00000000..608d8470 --- /dev/null +++ b/TODO @@ -0,0 +1,4 @@ +TODO: + - refactor stats by templates in dashboard, maybe a templatetag + + diff --git a/local_settings.py.example b/local_settings.py.example new file mode 100644 index 00000000..bad03921 --- /dev/null +++ b/local_settings.py.example @@ -0,0 +1,59 @@ +### Django settings for archlinux project. + +## Debug settings +DEBUG = False +TEMPLATE_DEBUG = True +DEBUG_TOOLBAR = True + +## For django debug toolbar +INTERNAL_IPS = ('127.0.0.1',) + +## Notification admins +ADMINS = ( + # ('Joe Admin', 'joeadmin@example.com'), +) + +## MySQL Database settings +DATABASES = { + 'default': { + 'ENGINE' : 'django.db.backends.mysql', + 'NAME' : 'archlinux', + 'USER' : 'archlinux', + 'PASSWORD': 'archlinux', + 'HOST' : '', + 'PORT' : '', + 'OPTIONS' : {'init_command': 'SET storage_engine=InnoDB'}, + }, +} + +## Define cache settings +CACHES = { + 'default': { + 'BACKEND' : 'django.core.cache.backends.dummy.DummyCache', + #'BACKEND' : 'django.core.cache.backends.memcached.MemcachedCache', + #'LOCATION': '127.0.0.1:11211', + } +} +CACHE_MIDDLEWARE_KEY_PREFIX = 'arch' +CACHE_MIDDLEWARE_SECONDS = 300 + +## Use secure session cookies? Make this true if you want all +## logged-in actions to take place over HTTPS only. If developing +## locally, you will want to use False. +SESSION_COOKIE_SECURE = False + +## location for saving dev pictures +MEDIA_ROOT = '/srv/example.com/img/' + +## web url for serving image files +MEDIA_URL = 'http://example.com/img/' + +## Make this unique, and don't share it with anybody. +SECRET_KEY = '00000000000000000000000000000000000000000000000' + +## CDN settings +CDN_ENABLED = False +# Scheme-relative URL, should work for both http/https +CDN_PATH = '//example.com/path/' + +# vim: set ts=4 sw=4 et: diff --git a/media/archnavbar/archnavbar.css b/media/archnavbar/archnavbar.css index 6b82cb92..f83b8544 100644 --- a/media/archnavbar/archnavbar.css +++ b/media/archnavbar/archnavbar.css @@ -7,11 +7,9 @@ */ /* container for the entire bar */ -#archnavbar { height: 40px !important; padding: 10px 15px !important; -background: #000 !important; border-bottom: 5px #787DAB solid !important; } +#archnavbar { height: 40px !important; padding: 10px 15px !important; background: #000 !important; border-bottom: 5px #787DAB solid !important; } -#archnavbarlogo { float: left !important; margin: 0 !important; padding: 0 -!important; height: 50px !important; width: 397px !important; } +#archnavbarlogo { float: left !important; margin: 0 !important; padding: 0 !important; height: 50px !important; width: 397px !important; } /* and use a proper PNG for all other modern browsers */ html > body #archnavbarlogo { background: url('parabolabw.png') no-repeat !important; } @@ -20,8 +18,7 @@ html > body #archnavbarlogo { background: url('parabolabw.png') no-repeat !impor #archnavbarlogo h1 { margin: 0 !important; padding: 0 !important; text-indent: -9999px !important; } /* make the link the same size as the logo */ -#archnavbarlogo a { display: block !important; height: 50px !important; width: -397px !important; } +#archnavbarlogo a { display: block !important; height: 50px !important; width: 397px !important; } /* display the list inline, float it to the right and style it */ #archnavbar ul { display: inline !important; float: right !important; list-style: none !important; margin: 0 !important; padding: 0 !important; } diff --git a/media/archweb.css b/media/archweb.css index 85fdb610..0cb1c6ac 100644 --- a/media/archweb.css +++ b/media/archweb.css @@ -13,262 +13,886 @@ @import url('archnavbar/archnavbar.css'); /* simple reset */ -* { margin: 0; padding: 0; line-height: 1.4; } +* { + margin: 0; + padding: 0; + line-height: 1.4; +} /* general styling */ -body { min-width: 650px; background: #f6f9fc; color: #222; font: normal 100% sans-serif; text-align: center; } -p { margin: .33em 0 1em; } -ol, ul { margin-bottom: 1em; padding-left: 2em; } -ul { list-style: square; } -code { font: 1.2em monospace; background: #ffd; padding: 0.15em 0.25em; } -pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 0.25em 2em; } -pre code { display: block; background: none; } -blockquote { margin: 1.5em 2em; } -input { vertical-align: middle; } -select[multiple] { padding-top: 1px; padding-bottom: 1px; } -select[multiple] option { padding-left: 0.3em; padding-right: 0.5em; } -input[type=submit] { padding-left: 0.6em; padding-right: 0.6em; } -.clear { clear: both; } -hr { border: none; border-top: 1px solid #888; } -img { border: 0; } +body { + min-width: 650px; + background: #f6f9fc; + color: #222; + font: normal 100% sans-serif; + text-align: center; +} + +p { + margin: .33em 0 1em; +} + +ol, +ul { + margin-bottom: 1em; + padding-left: 2em; +} + + ul { + list-style: square; + } + +code { + font: 1.2em monospace; + background: #ffd; + padding: 0.15em 0.25em; +} + +pre { + font: 1.2em monospace; + border: 1px solid #bdb; + background: #dfd; + padding: 0.5em; + margin: 0.25em 2em; +} + + pre code { + display: block; + background: none; + } + +blockquote { + margin: 1.5em 2em; +} + +input { + vertical-align: middle; +} + +select[multiple] { + padding-top: 1px; + padding-bottom: 1px; +} + + select[multiple] option { + padding-left: 0.3em; + padding-right: 0.5em; + } + +input[type=submit] { + padding-left: 0.6em; + padding-right: 0.6em; +} + +.clear { + clear: both; +} + +hr { + border: none; + border-top: 1px solid #888; +} + +img { + border: 0; +} /* scale fonts down to a sane default (16 * .812 = 13px) */ -#content { font-size: 0.812em; } +#content { + font-size: 0.812em; +} /* link style */ -a { text-decoration: none; } -a:link, th a:visited { color: #07b; } -a:visited { color: #666; } -a:hover { text-decoration: underline; color: #666; } -a:active { color: #e90; } +a { + text-decoration: none; +} + + a:link, + th a:visited { + color: #07b; + } + + a:visited { + color: #666; + } + + a:hover { + text-decoration: underline; + color: #666; + } + + a:active { + color: #e90; + } /* headings */ -h2 { font-size: 1.5em; margin-bottom: 0.5em; border-bottom: 1px solid #888; } -h3 { font-size: 1.25em; margin-top: 1em; } -h4 { font-size: 1.15em; margin-top: 1em; } -h5 { font-size: 1em; margin-top: 1em; } +h2 { + font-size: 1.5em; + margin-bottom: 0.5em; + border-bottom: 1px solid #888; +} + +h3 { + font-size: 1.25em; + margin-top: 1em; +} + +h4 { + font-size: 1.15em; + margin-top: 1em; +} + +h5 { + font-size: 1em; + margin-top: 1em; +} /* general layout */ -div#content { width: 95%; margin: 0 auto; text-align: left; } -div#content-left-wrapper { float: left; width: 100%; } /* req to keep content above sidebar in source code */ -div#content-left { margin: 0 340px 0 0; } -div#content-right { float: left; width: 300px; margin-left: -300px; } -div.box { margin-bottom: 1.5em; padding: 0.65em; background: #ecf2f5; border: 1px solid #bcd; } -div#footer { clear: both; margin: 2em 0 1em; } -div#footer p { margin: 0; text-align: center; font-size: 0.85em; } +div#content { + width: 95%; + margin: 0 auto; + text-align: left; +} + +div#content-left-wrapper { + float: left; + width: 100%; /* req to keep content above sidebar in source code */ +} + +div#content-left { + margin: 0 340px 0 0; +} + +div#content-right { + float: left; + width: 300px; + margin-left: -300px; +} + +div.box { + margin-bottom: 1.5em; + padding: 0.65em; + background: #ecf2f5; + border: 1px solid #bcd; +} + +div#footer { + clear: both; + margin: 2em 0 1em; +} + + div#footer p { + margin: 0; + text-align: center; + font-size: 0.85em; + } /* alignment */ -div.center, table.center, img.center { width: auto; margin-left: auto; margin-right: auto; } -p.center, td.center, th.center { text-align: center; } +div.center, +table.center, +img.center { + width: auto; + margin-left: auto; + margin-right: auto; +} + +p.center, +td.center, +th.center { + text-align: center; +} /* table generics */ -table { width: 100%; border-collapse: collapse; } -table .wrap { white-space: normal; } -th, td { white-space: nowrap; text-align: left; } -th { vertical-align: middle; font-weight: bold; } -td { vertical-align: top; } +table { + width: 100%; + border-collapse: collapse; +} + + table .wrap { + white-space: normal; + } + +th, +td { + white-space: nowrap; + text-align: left; +} + + th { + vertical-align: middle; + font-weight: bold; + } + + td { + vertical-align: top; + } /* table pretty styles */ -table.pretty1 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bcd; } -table.pretty1 th { padding: 0.35em; background: #e4eeff; border: 1px solid #bcd; } -table.pretty1 td { padding: 0.35em; border: 1px dotted #bcd; } -table.pretty2 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bbb; } -table.pretty2 th { padding: 0.35em; background: #eee; border: 1px solid #bbb; } -table.pretty2 td { padding: 0.35em; border: 1px dotted #bbb; } +table.pretty1 { + width: auto; + margin-top: 0.25em; + margin-bottom: 0.5em; + border-collapse: collapse; + border: 1px solid #bcd; +} + + table.pretty1 th { + padding: 0.35em; + background: #e4eeff; + border: 1px solid #bcd; + } + + table.pretty1 td { + padding: 0.35em; + border: 1px dotted #bcd; + } + +table.pretty2 { + width: auto; + margin-top: 0.25em; + margin-bottom: 0.5em; + border-collapse: collapse; + border: 1px solid #bbb; +} + + table.pretty2 th { + padding: 0.35em; + background: #eee; + border: 1px solid #bbb; + } + + table.pretty2 td { + padding: 0.35em; + border: 1px dotted #bbb; + } /* forms and input styling */ -form p { margin: 0.5em 0; } -fieldset { border: 0; } -label { width: 12em; vertical-align: top; display: inline-block; font-weight: bold; } -input[type=text], input[type=password], textarea { padding: 0.10em; } -form.general-form label, form.general-form .form-help { width: 10em; vertical-align: top; display: inline-block; } -form.general-form input[type=text], form.general-form textarea { width: 45%; } +form p { + margin: 0.5em 0; +} + +fieldset { + border: 0; +} + +label { + width: 12em; + vertical-align: top; + display: inline-block; + font-weight: bold; +} + +input[type=text], +input[type=password], +textarea { + padding: 0.10em; +} + +form.general-form label, +form.general-form .form-help { + width: 10em; + vertical-align: top; + display: inline-block; +} + +form.general-form input[type=text], +form.general-form textarea { + width: 45%; +} /* archdev navbar */ -div#archdev-navbar { margin: 1.5em 0; } -div#archdev-navbar ul { list-style: none; margin: -0.5em 0; padding: 0; } -div#archdev-navbar li { display: inline; margin: 0; padding: 0; font-size: 0.9em; } -div#archdev-navbar li a { padding: 0 0.5em; color: #07b; } +div#archdev-navbar { + margin: 1.5em 0; +} + + div#archdev-navbar ul { + list-style: none; + margin: -0.5em 0; + padding: 0; + } + + div#archdev-navbar li { + display: inline; + margin: 0; + padding: 0; + font-size: 0.9em; + } + + div#archdev-navbar li a { + padding: 0 0.5em; + color: #07b; + } /* error/info messages (x pkg is already flagged out-of-date, etc) */ -#sys-message { width: 35em; text-align: center; margin: 1em auto; padding: 0.5em; background: #fff; border: 1px solid #f00; } -#sys-message p { margin: 0; } - -ul.errorlist { color: red; } - -/* +#sys-message { + width: 35em; + text-align: center; + margin: 1em auto; + padding: 0.5em; + background: #fff; + border: 1px solid #f00; +} + + #sys-message p { + margin: 0; + } + +ul.errorlist { + color: red; +} + +/** * PAGE SPECIFIC STYLES */ /* home: introduction */ -#intro p.readmore { margin: -0.5em 0 0 0; font-size: .9em; text-align: right; } +#intro p.readmore { + margin: -0.5em 0 0 0; + font-size: .9em; + text-align: right; +} /* home: news */ -#news { margin-top: 1.5em; } -#news h3 { border-bottom: 1px solid #888; } -#news div { margin-bottom: 1em; } -#news div p { margin-bottom: 0.5em; } -#news .more { font-weight: normal; } -#news .rss-icon { float: right; margin: -1.6em 0.4em 0 0; } -#news h4 { font-size: 1em; margin-top: 1.5em; border-bottom: 1px dotted #bbb; } -#news .timestamp { float: right; font-size: 0.85em; margin: -1.8em 0.5em 0 0; } +#news { + margin-top: 1.5em; +} + + #news h3 { + border-bottom: 1px solid #888; + } + + #news div { + margin-bottom: 1em; + } + + #news div p { + margin-bottom: 0.5em; + } + + #news .more { + font-weight: normal; + } + + #news .rss-icon { + float: right; + margin: -1.6em 0.4em 0 0; + } + + #news h4 { + font-size: 1em; + margin-top: 1.5em; + border-bottom: 1px dotted #bbb; + } + + #news .timestamp { + float: right; + font-size: 0.85em; + margin: -1.8em 0.5em 0 0; + } /* home: pkgsearch box */ -#pkgsearch { padding: 1em 0.75em; background: #3ad; color: #fff; border: 1px solid #08b; } -#pkgsearch label { width: auto; padding: 0.1em 0; } -#pkgsearch input { width: 10em; float: right; font-size: 1em; color: #000; background: #fff; border: 1px solid #09c; } +#pkgsearch { + padding: 1em 0.75em; + background: #3ad; + color: #fff; + border: 1px solid #08b; +} + + #pkgsearch label { + width: auto; + padding: 0.1em 0; + } + + #pkgsearch input { + width: 10em; + float: right; + font-size: 1em; + color: #000; + background: #fff; + border: 1px solid #09c; + } /* home: recent pkg updates */ -#pkg-updates h3 { margin: 0 0 0.3em; } -#pkg-updates .more { font-weight: normal; } -#pkg-updates .rss-icon { float: right; margin: -2em 0 0 0; } -#pkg-updates table { margin: 0; } -#pkg-updates td.pkg-name { white-space: normal; } -#pkg-updates td.pkg-arch { text-align: right; } -#pkg-updates span.testing, #pkg-updates span.community-testing, span.multilib-testing { font-style: italic; } -#pkg-updates span.staging, #pkg-updates span.community-staging, span.multilib-staging { font-style: italic; color: #ff8040; } +#pkg-updates h3 { + margin: 0 0 0.3em; +} + + #pkg-updates .more { + font-weight: normal; + } + + #pkg-updates .rss-icon { + float: right; + margin: -2em 0 0 0; + } + + #pkg-updates table { + margin: 0; + } + + #pkg-updates td.pkg-name { + white-space: normal; + } + + #pkg-updates td.pkg-arch { + text-align: right; + } + + #pkg-updates span.testing, + #pkg-updates span.community-testing, + span.multilib-testing { + font-style: italic; + } + + #pkg-updates span.staging, + #pkg-updates span.community-staging, + span.multilib-staging { + font-style: italic; + color: #ff8040; + } /* home: sidebar navigation */ -div#nav-sidebar ul { list-style: none; margin: 0.5em 0 0.5em 1em; padding: 0; } +div#nav-sidebar ul { + list-style: none; + margin: 0.5em 0 0.5em 1em; + padding: 0; +} /* home: sponsor banners */ -div#arch-sponsors img { padding: 0.3em 0; } +div#arch-sponsors img { + padding: 0.3em 0; +} /* home: sidebar components (navlist, sponsors, pkgsearch, etc) */ -div.widget { margin-bottom: 1.5em; } +div.widget { + margin-bottom: 1.5em; +} /* feeds page */ -#rss-feeds .rss { padding-right: 20px; background: url(rss.png) top right no-repeat; } +#rss-feeds .rss { + padding-right: 20px; + background: url(rss.png) top right no-repeat; +} /* artwork: logo images */ -#artwork img.inverted { background: #333; padding: 0; } -#artwork div.imagelist img { display: inline; margin: 0.75em; } +#artwork img.inverted { + background: #333; + padding: 0; +} + +#artwork div.imagelist img { + display: inline; + margin: 0.75em; +} /* news: article list */ -.news-nav { float: right; margin-top: -2.2em; } -.news-nav .prev, .news-nav .next { margin-left: 1em; margin-right: 1em; } +.news-nav { + float: right; + margin-top: -2.2em; +} + + .news-nav .prev, + .news-nav .next { + margin-left: 1em; + margin-right: 1em; + } /* news: article pages */ -div.news-article .article-info { margin: 0; color: #999; } +div.news-article .article-info { + margin: 0; + color: #999; +} /* news: add/edit article */ -form#newsform { width: 60em; } -form#newsform input[type=text], form#newsform textarea { width: 75%; } +form#newsform { + width: 60em; +} + + form#newsform input[type=text], + form#newsform textarea { + width: 75%; + } /* donate: donor list */ -div#donor-list ul { width: 100%; } -/* max 4 columns, but possibly fewer if screen size doesn't allow for more */ -div#donor-list li { float: left; width: 25%; min-width: 20em; } +div#donor-list ul { + width: 100%; +} + /* max 4 columns, but possibly fewer if screen size doesn't allow for more */ + div#donor-list li { + float: left; + width: 25%; + min-width: 20em; + } /* download page */ -#arch-downloads h3 { border-bottom: 1px dotted #aaa; } -table#download-torrents .cpu-arch { text-align: center; } -table#download-mirrors { width: auto; margin-bottom: 1em; } -table#download-mirrors td.mirror-country { padding-top: 1em; } -table#download-mirrors td.mirror-server { padding-right: 1em; } -table#download-mirrors a { display: block; float: right; width: 4em; } +#arch-downloads h3 { + border-bottom: 1px dotted #aaa; +} + +table#download-torrents .cpu-arch { + text-align: center; +} + +table#download-mirrors { + width: auto; + margin-bottom: 1em; +} + + table#download-mirrors td.mirror-country { + padding-top: 1em; + } + + table#download-mirrors td.mirror-server { + padding-right: 1em; + } + + table#download-mirrors a { + display: block; + float: right; + width: 4em; + } /* pkglists/devlists */ -table.results { font-size: 0.846em; border-top: 1px dotted #999; border-bottom: 1px dotted #999; } -table.results th { padding: 0.5em 1em 0.25em 0.25em; border-bottom: 1px solid #999; white-space: nowrap; background-color:#fff; } -table.results td { padding: .3em 1em .3em 3px; } -table.results tr.odd { background: #fff; } -table.results tr.even { background: #e4eeff; } -/* additional styles for JS sorting */ -table.results th.header { padding-right: 20px; background-image: url(nosort.gif); background-repeat: no-repeat; background-position: center right; cursor: pointer; } -table.results th.headerSortDown { background-color: #e4eeff; background-image: url(desc.gif); } -table.results th.headerSortUp { background-color: #e4eeff; background-image: url(asc.gif); } -table.results .flagged { color: red; } +table.results { + font-size: 0.846em; + border-top: 1px dotted #999; + border-bottom: 1px dotted #999; +} + + table.results th { + padding: 0.5em 1em 0.25em 0.25em; + border-bottom: 1px solid #999; + white-space: nowrap; + background-color:#fff; + } + + table.results td { + padding: .3em 1em .3em 3px; + } + + table.results tr.odd { + background: #fff; + } + + table.results tr.even { + background: #e4eeff; + } + + /* additional styles for JS sorting */ + table.results th.header { + padding-right: 20px; + background-image: url(nosort.gif); + background-repeat: no-repeat; + background-position: center right; + cursor: pointer; + } + + table.results th.headerSortDown { + background-color: #e4eeff; + background-image: url(desc.gif); + } + + table.results th.headerSortUp { + background-color: #e4eeff; + background-image: url(asc.gif); + } + + table.results .flagged { + color: red; + } /* pkglist: layout */ -div#pkglist-about { margin-top: 1.5em; } +div#pkglist-about { + margin-top: 1.5em; +} /* pkglist: results navigation */ -#pkglist-stats-top, #pkglist-stats-bottom { font-size: 0.85em; } -#pkglist-results .pkglist-nav { float: right; margin-top: -2.2em; } -.pkglist-nav .prev { margin-right: 1em; } -.pkglist-nav .next { margin-right: 1em; } +#pkglist-stats-top, +#pkglist-stats-bottom { + font-size: 0.85em; +} + +#pkglist-results .pkglist-nav { + float: right; + margin-top: -2.2em; +} + +.pkglist-nav .prev { + margin-right: 1em; +} + +.pkglist-nav .next { + margin-right: 1em; +} /* search fields and other filter selections */ -.filter-criteria h3 { font-size: 1em; margin-top:0; } -.filter-criteria div { float: left; margin-right: 1.65em; font-size: 0.85em; } -.filter-criteria legend { display: none; } -.filter-criteria label { width: auto; display: block; font-weight: normal; } +.filter-criteria h3 { + font-size: 1em; + margin-top:0; +} + +.filter-criteria div { + float: left; + margin-right: 1.65em; + font-size: 0.85em; +} + +.filter-criteria legend { + display: none; +} + +.filter-criteria label { + width: auto; + display: block; + font-weight: normal; +} /* pkgdetails: details links that float on the right */ -#pkgdetails #detailslinks { float: right; } -#pkgdetails #detailslinks h4 { margin-top: 0; margin-bottom: 0.25em; } -#pkgdetails #detailslinks ul { list-style: none; padding: 0; margin-bottom: 0; font-size: 0.846em; } -#pkgdetails #detailslinks > div { padding: 0.5em; margin-bottom: 1em; background: #eee; border: 1px solid #bbb; } -#pkgdetails #actionlist .flagged { color: red; font-size: 0.9em; font-style: italic; } +#pkgdetails #detailslinks { + float: right; +} + + #pkgdetails #detailslinks h4 { + margin-top: 0; + margin-bottom: 0.25em; + } + + #pkgdetails #detailslinks ul { + list-style: none; + padding: 0; + margin-bottom: 0; + font-size: 0.846em; + } + + #pkgdetails #detailslinks > div { + padding: 0.5em; + margin-bottom: 1em; + background: #eee; + border: 1px solid #bbb; + } + +#pkgdetails #actionlist .flagged { + color: red; + font-size: 0.9em; + font-style: italic; +} /* pkgdetails: pkg info */ -#pkgdetails #pkginfo { width: auto; } -#pkgdetails #pkginfo td { padding: 0.25em 0 0.25em 1.5em; } +#pkgdetails #pkginfo { + width: auto; +} + + #pkgdetails #pkginfo td { + padding: 0.25em 0 0.25em 1.5em; + } /* pkgdetails: flag package */ -form#flag-pkg-form label { width: 10em; } -form#flag-pkg-form textarea, form#flag-pkg-form input[type=text] { width: 45%; } +form#flag-pkg-form label { + width: 10em; +} + +form#flag-pkg-form textarea, +form#flag-pkg-form input[type=text] { + width: 45%; +} /* pkgdetails: deps, required by and file lists */ -#pkgdetails #metadata h3 { background: #555; color: #fff; font-size: 1em; margin-bottom: 0.5em; padding: 0.2em 0.35em; } -#pkgdetails #metadata ul { list-style: none; margin: 0; padding: 0; } -#pkgdetails #metadata li { padding-left: 0.5em; } -#pkgdetails #metadata p { padding-left: 0.5em; } -#pkgdetails #metadata .message { font-style: italic; } -#pkgdetails #metadata br { clear: both; } -#pkgdetails #pkgdeps { float: left; width: 48%; margin-right: 2%; } -#pkgdetails #metadata .virtual-dep { font-style: italic; } -#pkgdetails #metadata .testing-dep { font-style: italic; } -#pkgdetails #metadata .opt-dep { font-style: italic; } -#pkgdetails #metadata .dep-desc { font-style: italic; } -#pkgdetails #pkgreqs { float: left; width: 50%; } -#pkgdetails #pkgfiles { clear: left; padding-top: 1em; } +#pkgdetails #metadata h3 { + background: #555; + color: #fff; + font-size: 1em; + margin-bottom: 0.5em; + padding: 0.2em 0.35em; +} + +#pkgdetails #metadata ul { + list-style: none; + margin: 0; + padding: 0; +} + +#pkgdetails #metadata li { + padding-left: 0.5em; +} + +#pkgdetails #metadata p { + padding-left: 0.5em; +} + +#pkgdetails #metadata .message { + font-style: italic; +} + +#pkgdetails #metadata br { + clear: both; +} + +#pkgdetails #pkgdeps { + float: left; + width: 48%; + margin-right: 2%; +} + +#pkgdetails #metadata .virtual-dep, +#pkgdetails #metadata .testing-dep, +#pkgdetails #metadata .opt-dep, +#pkgdetails #metadata .dep-desc { + font-style: italic; +} +#pkgdetails #pkgreqs { + float: left; + width: 50%; +} + +#pkgdetails #pkgfiles { + clear: left; + padding-top: 1em; +} /* dev/TU biographies */ -div#arch-bio-toc { width: 75%; margin: 0 auto; text-align: center; } -table.arch-bio-entry td.pic { vertical-align: top; padding-right: 15px; padding-top: 10px; } -table.arch-bio-entry td.pic img { padding: 4px; border: 1px solid #ccc; } -table.arch-bio-entry table.bio { margin-bottom: 2em; } -table.arch-bio-entry table.bio th { text-align: left; padding-right: 0.5em; vertical-align: top; white-space: nowrap; } -table.arch-bio-entry table.bio td { width: 100%; padding-bottom: 0.25em; } +div#arch-bio-toc { + width: 75%; + margin: 0 auto; + text-align: center; +} + +table.arch-bio-entry td.pic { + vertical-align: top; + padding-right: 15px; + padding-top: 10px; +} + + table.arch-bio-entry td.pic img { + padding: 4px; + border: 1px solid #ccc; + } + +table.arch-bio-entry table.bio { + margin-bottom: 2em; +} + + table.arch-bio-entry table.bio th { + text-align: left; + padding-right: 0.5em; + vertical-align: top; + white-space: nowrap; + } + + table.arch-bio-entry table.bio td { + width: 100%; + padding-bottom: 0.25em; + } /* dev: login/out */ p.login-error {} -table#dev-login { width: auto; } +table#dev-login { + width: auto; +} /* dev dashboard: flagged packages */ -form#dash-pkg-notify { text-align: right; padding: 1em 0 0; margin-top: 1em; font-size: 0.85em; border-top: 1px dotted #aaa; } -form#dash-pkg-notify label { width: auto; font-weight: normal; } -form#dash-pkg-notify input { vertical-align: middle; margin: 0 0.25em; } -form#dash-pkg-notify input[type=submit] { margin-top: -0.25em; } -form#dash-pkg-notify p { margin: 0; } - -table.dash-stats .key { width: 50%; } +form#dash-pkg-notify { + text-align: right; + padding: 1em 0 0; + margin-top: 1em; + font-size: 0.85em; + border-top: 1px dotted #aaa; +} + + form#dash-pkg-notify label { + width: auto; + font-weight: normal; + } + + form#dash-pkg-notify input { + vertical-align: middle; + margin: 0 0.25em; + } + + form#dash-pkg-notify input[type=submit] { + margin-top: -0.25em; + } + + form#dash-pkg-notify p { + margin: 0; + } + +table.dash-stats .key { + width: 50%; +} /* dev dashboard: admin actions (add news items, todo list, etc) */ -ul.admin-actions { float: right; list-style: none; margin-top: -2.5em; } -ul.admin-actions li { display: inline; padding-left: 1.5em; } +ul.admin-actions { + float: right; + list-style: none; + margin-top: -2.5em; +} + + ul.admin-actions li { + display: inline; + padding-left: 1.5em; + } /* todo lists (public and private) */ -.todo-table .complete { color: green; } -.todo-table .incomplete { color: red; } -.todo-info { margin: 0; color: #999; } -.todo-list h4 { margin-top: 0; margin-bottom: 0.4em; } +.todo-table .complete { + color: green; +} + +.todo-table .incomplete { + color: red; +} +.todo-info { + margin: 0; color: #999; +} + +.todo-list h4 { + margin-top: 0; + margin-bottom: 0.4em; +} /* dev: signoff page */ -#dev-signoffs ul { list-style: none; margin: 0; padding: 0; } -#dev-signoffs .signoff-yes { color: green; font-weight: bold; } -#dev-signoffs .signoff-no { color: red; } -#dev-signoffs .signed-username { color: #888; margin-left: 0.5em; } +#dev-signoffs ul { + list-style: none; + margin: 0; + padding: 0; +} + +#dev-signoffs .signoff-yes { + color: green; + font-weight: bold; +} + +#dev-signoffs .signoff-no { + color: red; +} + +#dev-signoffs .signed-username { + color: #888; + margin-left: 0.5em; +} /* iso testing feedback form */ -#releng-feedback label { width: auto; display: inline; font-weight: normal; } -#releng-feedback ul { padding-left: 1em; } -#releng-feedback li { list-style: none; } -#releng-feedback ul+.helptext { position: relative; top: -0.9em; } +#releng-feedback label { + width: auto; + display: inline; + font-weight: normal; +} + +#releng-feedback ul { + padding-left: 1em; +} + +#releng-feedback li { + list-style: none; +} + +#releng-feedback ul+.helptext { + position: relative; + top: -0.9em; +} /* highlight current website in the navbar */ -#archnavbar.anb-home ul li#anb-home a { color: white !important; } -#archnavbar.anb-packages ul li#anb-packages a { color: white !important; } -#archnavbar.anb-download ul li#anb-download a { color: white !important; } +#archnavbar.anb-home ul li#anb-home a, +#archnavbar.anb-packages ul li#anb-packages a, +#archnavbar.anb-download ul li#anb-download a { + color: white !important; +} diff --git a/public/views.py b/public/views.py index 1aae2846..bb9cd454 100644 --- a/public/views.py +++ b/public/views.py @@ -31,11 +31,13 @@ def index(request): } def userlist(request, user_type='hackers'): - users = User.objects.order_by('username').select_related('userprofile') + users = User.objects.order_by( + 'username').select_related('userprofile') if user_type == 'hackers': users = users.filter(is_active=True, groups__name="Hackers") elif user_type == 'fellows': - users = users.filter(is_active=False, groups__name__in=["Hackers"]) + users = users.filter(is_active=False, + groups__name__in=["Hackers"]) else: raise Http404 diff --git a/templates/packages/flaghelp.html b/templates/packages/flaghelp.html index 4a9d1cdf..e33ba0f5 100644 --- a/templates/packages/flaghelp.html +++ b/templates/packages/flaghelp.html @@ -25,8 +25,7 @@ <h3>Flagging Packages</h3> <p>The message box portion of the flag utility is optional, and meant for short messages only. If you need more than 200 characters for your message, then file a bug report, email the maintainer directly, or send - an email to the <a target="_blank" - href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org" + an email to the <a target="_blank" href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org" title="Visit the parabola dev mailing list">parabola mailing list</a> with your additional text.</p> diff --git a/templates/packages/search.html b/templates/packages/search.html index 2a314853..4a61298e 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -156,16 +156,15 @@ <h3>Package Search</h3> </div><!-- #pkglist-results --> {% else %} <div class="box"> - <p>We couldn't find any packages matching your query. Try searching again - using different criteria.</p> + <p>We couldn't find any packages matching your query. Try searching again + using different criteria.</p> </div> {% endif %} <div id="pkglist-about" class="box"> - <p>You are browsing the Parabola package database. From here you can - find detailed information about packages located in the official - supported repositories. If you need the sourceball from where a - package is built, you can look at our <a + <p>You are browsing the Parabola package database. From here you can find + detailed information about packages located in the official supported repositories. + If you need the sourceball from where a package is built, you can look at our <a href='http://repo.parabolagnulinux.org/sources/packages' title='Sourceballed packages'>sources repo</a>.</p> </div> diff --git a/templates/releng/add.html b/templates/releng/add.html index 428812c9..402ceac6 100644 --- a/templates/releng/add.html +++ b/templates/releng/add.html @@ -6,17 +6,17 @@ <div class="box"> <h2>Parabola Releng Testbuild Feedback Entry</h2> - <p>This page allows you to submit feedback after testing an Parabola - installation using a release engineering testbuild. Mark all the - options you used during the installation; at the end you can specify - whether everything went OK. Be sure to only denote a successful - install after having checked the installation properly. Some options - require you to check several things (such as config files), this will - be mentioned alongside the option.</p> <p>There is also an overview of - all feedback on the <a href="{% url releng-test-overview %}">results - page</a>. Once we have builds that are properly tested (enough - successful feedback for all important features of the ISO or a - slightly earlier ISO), we can release new official media.</p> + <p>This page allows you to submit feedback after testing an Parabola installation + using a release engineering testbuild. Mark all the options you used during the + installation; at the end you can specify whether everything went OK. Be + sure to only denote a successful install after having checked the + installation properly. Some options require you to check several things (such as + config files), this will be mentioned alongside the option.</p> + <p>There is also an overview of all feedback on the + <a href="{% url releng-test-overview %}">results page</a>. Once we have + builds that are properly tested (enough successful feedback for all + important features of the ISO or a slightly earlier ISO), we can release new + official media.</p> <div id="releng-feedback"> <form action="" method="post">{% csrf_token %} {{ form.as_p }} -- cgit v1.2.3-54-g00ecf From 6602e4bb9c6d1046fa0f0d30797d7d712ad3cdd7 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Tue, 29 Nov 2011 19:15:11 -0500 Subject: Untrack bin/ and lib/ they aren't in archweb --- bin/activate | 76 - bin/activate.csh | 32 - bin/activate.fish | 79 - bin/activate_this.py | 32 - bin/easy_install | 10 - bin/easy_install-2.7 | 10 - bin/pip | 10 - bin/pip-2.7 | 10 - bin/python | Bin 3120 -> 0 bytes bin/python2 | Bin 3120 -> 0 bytes lib/python2.7/UserDict.py | 1 - lib/python2.7/_abcoll.py | 1 - lib/python2.7/_weakrefset.py | 1 - lib/python2.7/abc.py | 1 - lib/python2.7/codecs.py | 1 - lib/python2.7/config | 1 - lib/python2.7/copy_reg.py | 1 - lib/python2.7/distutils/__init__.py | 91 - lib/python2.7/distutils/distutils.cfg | 6 - lib/python2.7/encodings | 1 - lib/python2.7/fnmatch.py | 1 - lib/python2.7/genericpath.py | 1 - lib/python2.7/lib-dynload | 1 - lib/python2.7/linecache.py | 1 - lib/python2.7/locale.py | 1 - lib/python2.7/ntpath.py | 1 - lib/python2.7/orig-prefix.txt | 1 - lib/python2.7/os.py | 1 - lib/python2.7/posixpath.py | 1 - lib/python2.7/re.py | 1 - .../distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO | 531 ---- .../EGG-INFO/SOURCES.txt | 84 - .../EGG-INFO/dependency_links.txt | 1 - .../EGG-INFO/entry_points.txt | 61 - .../EGG-INFO/top_level.txt | 4 - .../distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe | 1 - .../distribute-0.6.14-py2.7.egg/easy_install.py | 5 - .../distribute-0.6.14-py2.7.egg/pkg_resources.py | 2693 -------------------- .../setuptools/__init__.py | 104 - .../setuptools/archive_util.py | 208 -- .../distribute-0.6.14-py2.7.egg/setuptools/cli.exe | Bin 7168 -> 0 bytes .../setuptools/command/__init__.py | 22 - .../setuptools/command/alias.py | 82 - .../setuptools/command/bdist_egg.py | 540 ---- .../setuptools/command/bdist_rpm.py | 82 - .../setuptools/command/bdist_wininst.py | 41 - .../setuptools/command/build_ext.py | 294 --- .../setuptools/command/build_py.py | 268 -- .../setuptools/command/develop.py | 141 - .../setuptools/command/easy_install.py | 1865 -------------- .../setuptools/command/egg_info.py | 457 ---- .../setuptools/command/install.py | 124 - .../setuptools/command/install_egg_info.py | 123 - .../setuptools/command/install_lib.py | 82 - .../setuptools/command/install_scripts.py | 53 - .../setuptools/command/register.py | 10 - .../setuptools/command/rotate.py | 82 - .../setuptools/command/saveopts.py | 25 - .../setuptools/command/sdist.py | 252 -- .../setuptools/command/setopt.py | 164 -- .../setuptools/command/test.py | 180 -- .../setuptools/command/upload.py | 183 -- .../setuptools/command/upload_docs.py | 178 -- .../setuptools/depends.py | 246 -- .../distribute-0.6.14-py2.7.egg/setuptools/dist.py | 816 ------ .../setuptools/extension.py | 36 - .../distribute-0.6.14-py2.7.egg/setuptools/gui.exe | Bin 7168 -> 0 bytes .../setuptools/package_index.py | 830 ------ .../setuptools/sandbox.py | 282 -- .../setuptools/tests/__init__.py | 370 --- .../setuptools/tests/doctest.py | 2679 ------------------- .../setuptools/tests/server.py | 48 - .../setuptools/tests/test_build_ext.py | 20 - .../setuptools/tests/test_develop.py | 82 - .../setuptools/tests/test_easy_install.py | 243 -- .../setuptools/tests/test_packageindex.py | 112 - .../setuptools/tests/test_resources.py | 565 ---- .../setuptools/tests/test_sandbox.py | 66 - .../setuptools/tests/test_upload_docs.py | 65 - .../distribute-0.6.14-py2.7.egg/site.py | 82 - lib/python2.7/site-packages/easy-install.pth | 4 - .../pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO | 348 --- .../pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt | 57 - .../EGG-INFO/dependency_links.txt | 1 - .../pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt | 4 - .../pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe | 1 - .../pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt | 1 - .../pip-0.8.1-py2.7.egg/pip/__init__.py | 261 -- .../pip-0.8.1-py2.7.egg/pip/_pkgutil.py | 589 ----- .../pip-0.8.1-py2.7.egg/pip/backwardcompat.py | 55 - .../pip-0.8.1-py2.7.egg/pip/basecommand.py | 203 -- .../pip-0.8.1-py2.7.egg/pip/baseparser.py | 231 -- .../pip-0.8.1-py2.7.egg/pip/commands/__init__.py | 1 - .../pip-0.8.1-py2.7.egg/pip/commands/bundle.py | 33 - .../pip-0.8.1-py2.7.egg/pip/commands/completion.py | 60 - .../pip-0.8.1-py2.7.egg/pip/commands/freeze.py | 109 - .../pip-0.8.1-py2.7.egg/pip/commands/help.py | 32 - .../pip-0.8.1-py2.7.egg/pip/commands/install.py | 247 -- .../pip-0.8.1-py2.7.egg/pip/commands/search.py | 116 - .../pip-0.8.1-py2.7.egg/pip/commands/uninstall.py | 42 - .../pip-0.8.1-py2.7.egg/pip/commands/unzip.py | 9 - .../pip-0.8.1-py2.7.egg/pip/commands/zip.py | 346 --- .../pip-0.8.1-py2.7.egg/pip/download.py | 470 ---- .../pip-0.8.1-py2.7.egg/pip/exceptions.py | 17 - .../site-packages/pip-0.8.1-py2.7.egg/pip/index.py | 686 ----- .../pip-0.8.1-py2.7.egg/pip/locations.py | 45 - .../site-packages/pip-0.8.1-py2.7.egg/pip/log.py | 181 -- .../site-packages/pip-0.8.1-py2.7.egg/pip/req.py | 1432 ----------- .../pip-0.8.1-py2.7.egg/pip/runner.py | 18 - .../site-packages/pip-0.8.1-py2.7.egg/pip/util.py | 479 ---- .../pip-0.8.1-py2.7.egg/pip/vcs/__init__.py | 238 -- .../pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py | 138 - .../pip-0.8.1-py2.7.egg/pip/vcs/git.py | 204 -- .../pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py | 162 -- .../pip-0.8.1-py2.7.egg/pip/vcs/subversion.py | 260 -- .../site-packages/pip-0.8.1-py2.7.egg/pip/venv.py | 53 - .../site-packages/setuptools-0.6c11-py2.7.egg-info | 9 - lib/python2.7/site-packages/setuptools.pth | 1 - lib/python2.7/site.py | 713 ------ lib/python2.7/sre.py | 1 - lib/python2.7/sre_compile.py | 1 - lib/python2.7/sre_constants.py | 1 - lib/python2.7/sre_parse.py | 1 - lib/python2.7/stat.py | 1 - lib/python2.7/types.py | 1 - lib/python2.7/warnings.py | 1 - 126 files changed, 23719 deletions(-) delete mode 100644 bin/activate delete mode 100644 bin/activate.csh delete mode 100644 bin/activate.fish delete mode 100644 bin/activate_this.py delete mode 100755 bin/easy_install delete mode 100755 bin/easy_install-2.7 delete mode 100755 bin/pip delete mode 100755 bin/pip-2.7 delete mode 100755 bin/python delete mode 100755 bin/python2 delete mode 120000 lib/python2.7/UserDict.py delete mode 120000 lib/python2.7/_abcoll.py delete mode 120000 lib/python2.7/_weakrefset.py delete mode 120000 lib/python2.7/abc.py delete mode 120000 lib/python2.7/codecs.py delete mode 120000 lib/python2.7/config delete mode 120000 lib/python2.7/copy_reg.py delete mode 100644 lib/python2.7/distutils/__init__.py delete mode 100644 lib/python2.7/distutils/distutils.cfg delete mode 120000 lib/python2.7/encodings delete mode 120000 lib/python2.7/fnmatch.py delete mode 120000 lib/python2.7/genericpath.py delete mode 120000 lib/python2.7/lib-dynload delete mode 120000 lib/python2.7/linecache.py delete mode 120000 lib/python2.7/locale.py delete mode 120000 lib/python2.7/ntpath.py delete mode 100644 lib/python2.7/orig-prefix.txt delete mode 120000 lib/python2.7/os.py delete mode 120000 lib/python2.7/posixpath.py delete mode 120000 lib/python2.7/re.py delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py delete mode 100644 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py delete mode 100755 lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py delete mode 100644 lib/python2.7/site-packages/easy-install.pth delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe delete mode 100644 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py delete mode 100755 lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py delete mode 100644 lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info delete mode 100644 lib/python2.7/site-packages/setuptools.pth delete mode 100644 lib/python2.7/site.py delete mode 120000 lib/python2.7/sre.py delete mode 120000 lib/python2.7/sre_compile.py delete mode 120000 lib/python2.7/sre_constants.py delete mode 120000 lib/python2.7/sre_parse.py delete mode 120000 lib/python2.7/stat.py delete mode 120000 lib/python2.7/types.py delete mode 120000 lib/python2.7/warnings.py diff --git a/bin/activate b/bin/activate deleted file mode 100644 index 796cc838..00000000 --- a/bin/activate +++ /dev/null @@ -1,76 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "$_OLD_VIRTUAL_PATH" ] ; then - PATH="$_OLD_VIRTUAL_PATH" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then - PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then - hash -r - fi - - if [ -n "$_OLD_VIRTUAL_PS1" ] ; then - PS1="$_OLD_VIRTUAL_PS1" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "$1" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelavent variables -deactivate nondestructive - -VIRTUAL_ENV="/srv/http/web" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "$PYTHONHOME" ] ; then - _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" - unset PYTHONHOME -fi - -if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then - _OLD_VIRTUAL_PS1="$PS1" - if [ "x" != x ] ; then - PS1="$PS1" - else - if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" - else - PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" - fi - fi - export PS1 -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then - hash -r -fi diff --git a/bin/activate.csh b/bin/activate.csh deleted file mode 100644 index 0774525d..00000000 --- a/bin/activate.csh +++ /dev/null @@ -1,32 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi <davidedb@gmail.com>. - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelavent variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/srv/http/web" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if ("" != "") then - set env_name = "" -else - if (`basename "$VIRTUAL_ENV"` == "__") then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` - else - set env_name = `basename "$VIRTUAL_ENV"` - endif -endif -set prompt = "[$env_name] $prompt" -unset env_name - -rehash - diff --git a/bin/activate.fish b/bin/activate.fish deleted file mode 100644 index 39c644a7..00000000 --- a/bin/activate.fish +++ /dev/null @@ -1,79 +0,0 @@ -# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) -# you cannot run it directly - -function deactivate -d "Exit virtualenv and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - functions -e fish_prompt - set -e _OLD_FISH_PROMPT_OVERRIDE - end - - set -e VIRTUAL_ENV - if test "$argv[1]" != "nondestructive" - # Self destruct! - functions -e deactivate - end -end - -# unset irrelavent variables -deactivate nondestructive - -set -gx VIRTUAL_ENV "/srv/http/web" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# unset PYTHONHOME if set -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish shell uses a function, instead of env vars, - # to produce the prompt. Overriding the existing function is easy. - # However, adding to the current prompt, instead of clobbering it, - # is a little more work. - set -l oldpromptfile (tempfile) - if test $status - # save the current fish_prompt function... - echo "function _old_fish_prompt" >> $oldpromptfile - echo -n \# >> $oldpromptfile - functions fish_prompt >> $oldpromptfile - # we've made the "_old_fish_prompt" file, source it. - . $oldpromptfile - rm -f $oldpromptfile - - if test -n "" - # We've been given us a prompt override. - # - # FIXME: Unsure how to handle this *safely*. We could just eval() - # whatever is given, but the risk is a bit much. - echo "activate.fish: Alternative prompt prefix is not supported under fish-shell." 1>&2 - echo "activate.fish: Alter the fish_prompt in this file as needed." 1>&2 - end - - # with the original prompt function renamed, we can override with our own. - function fish_prompt - set -l _checkbase (basename "$VIRTUAL_ENV") - if test $_checkbase = "__" - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt) - else - printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt) - end - end - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - end -end - diff --git a/bin/activate_this.py b/bin/activate_this.py deleted file mode 100644 index aff6927d..00000000 --- a/bin/activate_this.py +++ /dev/null @@ -1,32 +0,0 @@ -"""By using execfile(this_file, dict(__file__=this_file)) you will -activate this virtualenv environment. - -This can be used when you must use an existing Python interpreter, not -the virtualenv bin/python -""" - -try: - __file__ -except NameError: - raise AssertionError( - "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))") -import sys -import os - -base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -if sys.platform == 'win32': - site_packages = os.path.join(base, 'Lib', 'site-packages') -else: - site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages') -prev_sys_path = list(sys.path) -import site -site.addsitedir(site_packages) -sys.real_prefix = sys.prefix -sys.prefix = base -# Move the added items to the front of the path: -new_sys_path = [] -for item in list(sys.path): - if item not in prev_sys_path: - new_sys_path.append(item) - sys.path.remove(item) -sys.path[:0] = new_sys_path diff --git a/bin/easy_install b/bin/easy_install deleted file mode 100755 index eeb31a5c..00000000 --- a/bin/easy_install +++ /dev/null @@ -1,10 +0,0 @@ -#!/srv/http/web/bin/python2 -# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install' -__requires__ = 'distribute==0.6.14' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install')() - ) diff --git a/bin/easy_install-2.7 b/bin/easy_install-2.7 deleted file mode 100755 index 09c3bf26..00000000 --- a/bin/easy_install-2.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/srv/http/web/bin/python2 -# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install-2.7' -__requires__ = 'distribute==0.6.14' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install-2.7')() - ) diff --git a/bin/pip b/bin/pip deleted file mode 100755 index 2d851f27..00000000 --- a/bin/pip +++ /dev/null @@ -1,10 +0,0 @@ -#!/srv/http/web/bin/python2 -# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip' -__requires__ = 'pip==0.8.1' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('pip==0.8.1', 'console_scripts', 'pip')() - ) diff --git a/bin/pip-2.7 b/bin/pip-2.7 deleted file mode 100755 index 23e5741e..00000000 --- a/bin/pip-2.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/srv/http/web/bin/python2 -# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip-2.7' -__requires__ = 'pip==0.8.1' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('pip==0.8.1', 'console_scripts', 'pip-2.7')() - ) diff --git a/bin/python b/bin/python deleted file mode 100755 index 888b52ee..00000000 Binary files a/bin/python and /dev/null differ diff --git a/bin/python2 b/bin/python2 deleted file mode 100755 index 888b52ee..00000000 Binary files a/bin/python2 and /dev/null differ diff --git a/lib/python2.7/UserDict.py b/lib/python2.7/UserDict.py deleted file mode 120000 index 1dcde33c..00000000 --- a/lib/python2.7/UserDict.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/UserDict.py \ No newline at end of file diff --git a/lib/python2.7/_abcoll.py b/lib/python2.7/_abcoll.py deleted file mode 120000 index e39c38d2..00000000 --- a/lib/python2.7/_abcoll.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/_abcoll.py \ No newline at end of file diff --git a/lib/python2.7/_weakrefset.py b/lib/python2.7/_weakrefset.py deleted file mode 120000 index a3c1cd4f..00000000 --- a/lib/python2.7/_weakrefset.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/_weakrefset.py \ No newline at end of file diff --git a/lib/python2.7/abc.py b/lib/python2.7/abc.py deleted file mode 120000 index cb3e5d16..00000000 --- a/lib/python2.7/abc.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/abc.py \ No newline at end of file diff --git a/lib/python2.7/codecs.py b/lib/python2.7/codecs.py deleted file mode 120000 index 50169dc7..00000000 --- a/lib/python2.7/codecs.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/codecs.py \ No newline at end of file diff --git a/lib/python2.7/config b/lib/python2.7/config deleted file mode 120000 index 154af7a5..00000000 --- a/lib/python2.7/config +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/config \ No newline at end of file diff --git a/lib/python2.7/copy_reg.py b/lib/python2.7/copy_reg.py deleted file mode 120000 index 5dc0af34..00000000 --- a/lib/python2.7/copy_reg.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/copy_reg.py \ No newline at end of file diff --git a/lib/python2.7/distutils/__init__.py b/lib/python2.7/distutils/__init__.py deleted file mode 100644 index 7ebb41c0..00000000 --- a/lib/python2.7/distutils/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -import os -import sys -import warnings -import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib - # Important! To work on pypy, this must be a module that resides in the - # lib-python/modified-x.y.z directory - -dirname = os.path.dirname - -distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils') -if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)): - warnings.warn( - "The virtualenv distutils package at %s appears to be in the same location as the system distutils?") -else: - __path__.insert(0, distutils_path) - exec open(os.path.join(distutils_path, '__init__.py')).read() - -import dist -import sysconfig - - -## patch build_ext (distutils doesn't know how to get the libs directory -## path on windows - it hardcodes the paths around the patched sys.prefix) - -if sys.platform == 'win32': - from distutils.command.build_ext import build_ext as old_build_ext - class build_ext(old_build_ext): - def finalize_options (self): - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, basestring): - self.library_dirs = self.library_dirs.split(os.pathsep) - - self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs")) - old_build_ext.finalize_options(self) - - from distutils.command import build_ext as build_ext_module - build_ext_module.build_ext = build_ext - -## distutils.dist patches: - -old_find_config_files = dist.Distribution.find_config_files -def find_config_files(self): - found = old_find_config_files(self) - system_distutils = os.path.join(distutils_path, 'distutils.cfg') - #if os.path.exists(system_distutils): - # found.insert(0, system_distutils) - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - user_filename = os.path.join(sys.prefix, user_filename) - if os.path.isfile(user_filename): - for item in list(found): - if item.endswith('pydistutils.cfg'): - found.remove(item) - found.append(user_filename) - return found -dist.Distribution.find_config_files = find_config_files - -## distutils.sysconfig patches: - -old_get_python_inc = sysconfig.get_python_inc -def sysconfig_get_python_inc(plat_specific=0, prefix=None): - if prefix is None: - prefix = sys.real_prefix - return old_get_python_inc(plat_specific, prefix) -sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__ -sysconfig.get_python_inc = sysconfig_get_python_inc - -old_get_python_lib = sysconfig.get_python_lib -def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - if standard_lib and prefix is None: - prefix = sys.real_prefix - return old_get_python_lib(plat_specific, standard_lib, prefix) -sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__ -sysconfig.get_python_lib = sysconfig_get_python_lib - -old_get_config_vars = sysconfig.get_config_vars -def sysconfig_get_config_vars(*args): - real_vars = old_get_config_vars(*args) - if sys.platform == 'win32': - lib_dir = os.path.join(sys.real_prefix, "libs") - if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars: - real_vars['LIBDIR'] = lib_dir # asked for all - elif isinstance(real_vars, list) and 'LIBDIR' in args: - real_vars = real_vars + [lib_dir] # asked for list - return real_vars -sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__ -sysconfig.get_config_vars = sysconfig_get_config_vars diff --git a/lib/python2.7/distutils/distutils.cfg b/lib/python2.7/distutils/distutils.cfg deleted file mode 100644 index 1af230ec..00000000 --- a/lib/python2.7/distutils/distutils.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# This is a config file local to this virtualenv installation -# You may include options that will be used by all distutils commands, -# and by easy_install. For instance: -# -# [easy_install] -# find_links = http://mylocalsite diff --git a/lib/python2.7/encodings b/lib/python2.7/encodings deleted file mode 120000 index 1250ad86..00000000 --- a/lib/python2.7/encodings +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/encodings \ No newline at end of file diff --git a/lib/python2.7/fnmatch.py b/lib/python2.7/fnmatch.py deleted file mode 120000 index ec3e10cf..00000000 --- a/lib/python2.7/fnmatch.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/fnmatch.py \ No newline at end of file diff --git a/lib/python2.7/genericpath.py b/lib/python2.7/genericpath.py deleted file mode 120000 index cb8897ce..00000000 --- a/lib/python2.7/genericpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/genericpath.py \ No newline at end of file diff --git a/lib/python2.7/lib-dynload b/lib/python2.7/lib-dynload deleted file mode 120000 index c706a1eb..00000000 --- a/lib/python2.7/lib-dynload +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/lib-dynload \ No newline at end of file diff --git a/lib/python2.7/linecache.py b/lib/python2.7/linecache.py deleted file mode 120000 index 943c4297..00000000 --- a/lib/python2.7/linecache.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/linecache.py \ No newline at end of file diff --git a/lib/python2.7/locale.py b/lib/python2.7/locale.py deleted file mode 120000 index 92c243c6..00000000 --- a/lib/python2.7/locale.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/locale.py \ No newline at end of file diff --git a/lib/python2.7/ntpath.py b/lib/python2.7/ntpath.py deleted file mode 120000 index 5659ae14..00000000 --- a/lib/python2.7/ntpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/ntpath.py \ No newline at end of file diff --git a/lib/python2.7/orig-prefix.txt b/lib/python2.7/orig-prefix.txt deleted file mode 100644 index e25db584..00000000 --- a/lib/python2.7/orig-prefix.txt +++ /dev/null @@ -1 +0,0 @@ -/usr \ No newline at end of file diff --git a/lib/python2.7/os.py b/lib/python2.7/os.py deleted file mode 120000 index 950fc8d2..00000000 --- a/lib/python2.7/os.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/os.py \ No newline at end of file diff --git a/lib/python2.7/posixpath.py b/lib/python2.7/posixpath.py deleted file mode 120000 index 30cb8ca5..00000000 --- a/lib/python2.7/posixpath.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/posixpath.py \ No newline at end of file diff --git a/lib/python2.7/re.py b/lib/python2.7/re.py deleted file mode 120000 index 56a07316..00000000 --- a/lib/python2.7/re.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/re.py \ No newline at end of file diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO deleted file mode 100644 index 629e916b..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO +++ /dev/null @@ -1,531 +0,0 @@ -Metadata-Version: 1.0 -Name: distribute -Version: 0.6.14 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: http://packages.python.org/distribute -Author: The fellowship of the packaging -Author-email: distutils-sig@python.org -License: PSF or ZPL -Description: =============================== - Installing and Using Distribute - =============================== - - .. contents:: **Table of Contents** - - ----------- - Disclaimers - ----------- - - About the fork - ============== - - `Distribute` is a fork of the `Setuptools` project. - - Distribute is intended to replace Setuptools as the standard method - for working with Python module distributions. - - The fork has two goals: - - - Providing a backward compatible version to replace Setuptools - and make all distributions that depend on Setuptools work as - before, but with less bugs and behaviorial issues. - - This work is done in the 0.6.x series. - - Starting with version 0.6.2, Distribute supports Python 3. - Installing and using distribute for Python 3 code works exactly - the same as for Python 2 code, but Distribute also helps you to support - Python 2 and Python 3 from the same source code by letting you run 2to3 - on the code as a part of the build process, by setting the keyword parameter - ``use_2to3`` to True. See http://packages.python.org/distribute for more - information. - - - Refactoring the code, and releasing it in several distributions. - This work is being done in the 0.7.x series but not yet released. - - The roadmap is still evolving, and the page that is up-to-date is - located at : `http://packages.python.org/distribute/roadmap`. - - If you install `Distribute` and want to switch back for any reason to - `Setuptools`, get to the `Uninstallation instructions`_ section. - - More documentation - ================== - - You can get more information in the Sphinx-based documentation, located - at http://packages.python.org/distribute. This documentation includes the old - Setuptools documentation that is slowly replaced, and brand new content. - - About the installation process - ============================== - - The `Distribute` installer modifies your installation by de-activating an - existing installation of `Setuptools` in a bootstrap process. This process - has been tested in various installation schemes and contexts but in case of a - bug during this process your Python installation might be left in a broken - state. Since all modified files and directories are copied before the - installation starts, you will be able to get back to a normal state by reading - the instructions in the `Uninstallation instructions`_ section. - - In any case, it is recommended to save you `site-packages` directory before - you start the installation of `Distribute`. - - ------------------------- - Installation Instructions - ------------------------- - - Distribute is only released as a source distribution. - - It can be installed using pip, and can be done so with the source tarball, - or by using the ``distribute_setup.py`` script provided online. - - ``distribute_setup.py`` is the simplest and preferred way on all systems. - - distribute_setup.py - =================== - - Download - `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_ - and execute it, using the Python interpreter of your choice. - - If your shell has the ``curl`` program you can do:: - - $ curl -O http://python-distribute.org/distribute_setup.py - $ python distribute_setup.py - - Notice this file is also provided in the source release. - - pip - === - - Run easy_install or pip:: - - $ pip install distribute - - Source installation - =================== - - Download the source tarball, uncompress it, then run the install command:: - - $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz - $ tar -xzvf distribute-0.6.14.tar.gz - $ cd distribute-0.6.14 - $ python setup.py install - - --------------------------- - Uninstallation Instructions - --------------------------- - - Like other distutils-based distributions, Distribute doesn't provide an - uninstaller yet. It's all done manually! We are all waiting for PEP 376 - support in Python. - - Distribute is installed in three steps: - - 1. it gets out of the way an existing installation of Setuptools - 2. it installs a `fake` setuptools installation - 3. it installs distribute - - Distribute can be removed like this: - - - remove the ``distribute*.egg`` file located in your site-packages directory - - remove the ``setuptools.pth`` file located in you site-packages directory - - remove the easy_install script located in you ``sys.prefix/bin`` directory - - remove the ``setuptools*.egg`` directory located in your site-packages directory, - if any. - - If you want to get back to setuptools: - - - reinstall setuptools using its instruction. - - Lastly: - - - remove the *.OLD.* directory located in your site-packages directory if any, - **once you have checked everything was working correctly again**. - - ------------------------- - Quick help for developers - ------------------------- - - To create an egg which is compatible with Distribute, use the same - practice as with Setuptools, e.g.:: - - from setuptools import setup - - setup(... - ) - - To use `pkg_resources` to access data files in the egg, you should - require the Setuptools distribution explicitly:: - - from setuptools import setup - - setup(... - install_requires=['setuptools'] - ) - - Only if you need Distribute-specific functionality should you depend - on it explicitly. In this case, replace the Setuptools dependency:: - - from setuptools import setup - - setup(... - install_requires=['distribute'] - ) - - ----------- - Install FAQ - ----------- - - - **Why is Distribute wrapping my Setuptools installation?** - - Since Distribute is a fork, and since it provides the same package - and modules, it renames the existing Setuptools egg and inserts a - new one which merely wraps the Distribute code. This way, full - backwards compatibility is kept for packages which rely on the - Setuptools modules. - - At the same time, packages can meet their dependency on Setuptools - without actually installing it (which would disable Distribute). - - - **How does Distribute interact with virtualenv?** - - Everytime you create a virtualenv it will install setuptools by default. - You either need to re-install Distribute in it right after or pass the - ``--distribute`` option when creating it. - - Once installed, your virtualenv will use Distribute transparently. - - Although, if you have Setuptools installed in your system-wide Python, - and if the virtualenv you are in was generated without the `--no-site-packages` - option, the Distribute installation will stop. - - You need in this case to build a virtualenv with the `--no-site-packages` - option or to install `Distribute` globally. - - - **How does Distribute interacts with zc.buildout?** - - You can use Distribute in your zc.buildout, with the --distribute option, - starting at zc.buildout 1.4.2:: - - $ python bootstrap.py --distribute - - For previous zc.buildout versions, *the only thing* you need to do - is use the bootstrap at `http://python-distribute.org/bootstrap.py`. Run - that bootstrap and ``bin/buildout`` (and all other buildout-generated - scripts) will transparently use distribute instead of setuptools. You do - not need a specific buildout release. - - A shared eggs directory is no problem (since 0.6.6): the setuptools egg is - left in place unmodified. So other buildouts that do not yet use the new - bootstrap continue to work just fine. And there is no need to list - ``distribute`` somewhere in your eggs: using the bootstrap is enough. - - The source code for the bootstrap script is located at - `http://bitbucket.org/tarek/buildout-distribute`. - - - - ----------------------------- - Feedback and getting involved - ----------------------------- - - - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig - - Issue tracker: http://bitbucket.org/tarek/distribute/issues/ - - Code Repository: http://bitbucket.org/tarek/distribute - - ======= - CHANGES - ======= - - ------ - 0.6.14 - ------ - - * Issue 170: Fixed unittest failure. Thanks to Toshio. - * Issue 171: Fixed race condition in unittests cause deadlocks in test suite. - * Issue 143: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. - * Issue 174: Fixed the edit mode when its used with setuptools itself - - ------ - 0.6.13 - ------ - - * Issue 160: 2.7 gives ValueError("Invalid IPv6 URL") - * Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv - * Issue 163: scan index links before external links, and don't use the md5 when - comparing two distributions - - ------ - 0.6.12 - ------ - - * Issue 149: Fixed various failures on 2.3/2.4 - - ------ - 0.6.11 - ------ - - * Found another case of SandboxViolation - fixed - * Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings - * Added indexsidebar.html into MANIFEST.in - * Issue 108: Fixed TypeError with Python3.1 - * Issue 121: Fixed --help install command trying to actually install. - * Issue 112: Added an os.makedirs so that Tarek's solution will work. - * Issue 133: Added --no-find-links to easy_install - * Added easy_install --user - * Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account - * Issue 134: removed spurious UserWarnings. Patch by VanLindberg - * Issue 138: cant_write_to_target error when setup_requires is used. - * Issue 147: respect the sys.dont_write_bytecode flag - - ------ - 0.6.10 - ------ - - * Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - - ----- - 0.6.9 - ----- - - * Issue 90: unknown setuptools version can be added in the working set - * Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. - * Issue 89: added a side bar with a download link to the doc. - * Issue 86: fixed missing sentence in pkg_resources doc. - * Added a nicer error message when a DistributionNotFound is raised. - * Issue 80: test_develop now works with Python 3.1 - * Issue 93: upload_docs now works if there is an empty sub-directory. - * Issue 70: exec bit on non-exec files - * Issue 99: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). - * Issue 101: Allowing ``os.devnull`` in Sandbox - * Issue 92: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) - * Issue 103: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. - * Issue 104: remvoved the assertion when the installation fails, - with a nicer message for the end user. - * Issue 100: making sure there's no SandboxViolation when - the setup script patches setuptools. - - ----- - 0.6.8 - ----- - - * Added "check_packages" in dist. (added in Setuptools 0.6c11) - * Fixed the DONT_PATCH_SETUPTOOLS state. - - ----- - 0.6.7 - ----- - - * Issue 58: Added --user support to the develop command - * Issue 11: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" - * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. - * Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. - * Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. - * Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. - * Issue 64: use_setuptools no longer rebuilds the distribute egg every - time it is run - * use_setuptools now properly respects the requested version - * use_setuptools will no longer try to import a distribute egg for the - wrong Python version - * Issue 74: no_fake should be True by default. - * Issue 72: avoid a bootstrapping issue with easy_install -U - - ----- - 0.6.6 - ----- - - * Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - - ----- - 0.6.5 - ----- - - * Issue 65: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - - * Issue 67: Fixed doc typo (PEP 381/382) - - * Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - - * When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - - * Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - - ----- - 0.6.4 - ----- - - * Added the generation of `distribute_setup_3k.py` during the release. - This close http://bitbucket.org/tarek/distribute/issue/52. - - * Added an upload_docs command to easily upload project documentation to - PyPI's http://packages.python.org. - This close http://bitbucket.org/tarek/distribute/issue/56. - - * Fixed a bootstrap bug on the use_setuptools() API. - - ----- - 0.6.3 - ----- - - setuptools - ========== - - * Fixed a bunch of calls to file() that caused crashes on Python 3. - - bootstrapping - ============= - - * Fixed a bug in sorting that caused bootstrap to fail on Python 3. - - ----- - 0.6.2 - ----- - - setuptools - ========== - - * Added Python 3 support; see docs/python3.txt. - This closes http://bugs.python.org/setuptools/issue39. - - * Added option to run 2to3 automatically when installing on Python 3. - This closes http://bitbucket.org/tarek/distribute/issue/31. - - * Fixed invalid usage of requirement.parse, that broke develop -d. - This closes http://bugs.python.org/setuptools/issue44. - - * Fixed script launcher for 64-bit Windows. - This closes http://bugs.python.org/setuptools/issue2. - - * KeyError when compiling extensions. - This closes http://bugs.python.org/setuptools/issue41. - - bootstrapping - ============= - - * Fixed bootstrap not working on Windows. - This closes http://bitbucket.org/tarek/distribute/issue/49. - - * Fixed 2.6 dependencies. - This closes http://bitbucket.org/tarek/distribute/issue/50. - - * Make sure setuptools is patched when running through easy_install - This closes http://bugs.python.org/setuptools/issue40. - - ----- - 0.6.1 - ----- - - setuptools - ========== - - * package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes http://bitbucket.org/tarek/distribute/issue/16 and - http://bitbucket.org/tarek/distribute/issue/18. - - * zip_ok is now False by default. This closes - http://bugs.python.org/setuptools/issue33. - - * Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20. - - * Fixed invalid bootstraping with easy_install installation - http://bitbucket.org/tarek/distribute/issue/40. - Thanks to Florian Schulze for the help. - - * Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - - bootstrapping - ============= - - * The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes http://bitbucket.org/tarek/distribute/issue/10. - - --- - 0.6 - --- - - setuptools - ========== - - * Packages required at build time where not fully present at install time. - This closes http://bitbucket.org/tarek/distribute/issue/12. - - * Protected against failures in tarfile extraction. This closes - http://bitbucket.org/tarek/distribute/issue/10. - - * Made Jython api_tests.txt doctest compatible. This closes - http://bitbucket.org/tarek/distribute/issue/7. - - * sandbox.py replaced builtin type file with builtin function open. This - closes http://bitbucket.org/tarek/distribute/issue/6. - - * Immediately close all file handles. This closes - http://bitbucket.org/tarek/distribute/issue/3. - - * Added compatibility with Subversion 1.6. This references - http://bitbucket.org/tarek/distribute/issue/1. - - pkg_resources - ============= - - * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes - http://bitbucket.org/tarek/distribute/issue/5. - - * Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes http://bitbucket.org/tarek/distribute/issue/13. - - * Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes http://bitbucket.org/tarek/distribute/issue/9. - - * Corrected inconsistency between documentation and code of add_entry. - This closes http://bitbucket.org/tarek/distribute/issue/8. - - * Immediately close all file handles. This closes - http://bitbucket.org/tarek/distribute/issue/3. - - easy_install - ============ - - * Immediately close all file handles. This closes - http://bitbucket.org/tarek/distribute/issue/3. - - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt deleted file mode 100644 index fc0a26bd..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt +++ /dev/null @@ -1,84 +0,0 @@ -CHANGES.txt -CONTRIBUTORS.txt -DEVGUIDE.txt -MANIFEST.in -README.txt -distribute_setup.py -easy_install.py -launcher.c -pkg_resources.py -setup.cfg -setup.py -site.py -distribute.egg-info/PKG-INFO -distribute.egg-info/SOURCES.txt -distribute.egg-info/dependency_links.txt -distribute.egg-info/entry_points.txt -distribute.egg-info/top_level.txt -distribute.egg-info/zip-safe -docs/Makefile -docs/conf.py -docs/easy_install.txt -docs/index.txt -docs/pkg_resources.txt -docs/python3.txt -docs/roadmap.txt -docs/setuptools.txt -docs/using.txt -docs/_templates/indexsidebar.html -docs/_theme/nature/theme.conf -docs/_theme/nature/static/nature.css_t -docs/_theme/nature/static/pygments.css -setuptools/__init__.py -setuptools/archive_util.py -setuptools/cli.exe -setuptools/depends.py -setuptools/dist.py -setuptools/extension.py -setuptools/gui.exe -setuptools/package_index.py -setuptools/sandbox.py -setuptools/command/__init__.py -setuptools/command/alias.py -setuptools/command/bdist_egg.py -setuptools/command/bdist_rpm.py -setuptools/command/bdist_wininst.py -setuptools/command/build_ext.py -setuptools/command/build_py.py -setuptools/command/develop.py -setuptools/command/easy_install.py -setuptools/command/egg_info.py -setuptools/command/install.py -setuptools/command/install_egg_info.py -setuptools/command/install_lib.py -setuptools/command/install_scripts.py -setuptools/command/register.py -setuptools/command/rotate.py -setuptools/command/saveopts.py -setuptools/command/sdist.py -setuptools/command/setopt.py -setuptools/command/test.py -setuptools/command/upload.py -setuptools/command/upload_docs.py -setuptools/tests/__init__.py -setuptools/tests/doctest.py -setuptools/tests/server.py -setuptools/tests/test_build_ext.py -setuptools/tests/test_develop.py -setuptools/tests/test_easy_install.py -setuptools/tests/test_packageindex.py -setuptools/tests/test_resources.py -setuptools/tests/test_sandbox.py -setuptools/tests/test_upload_docs.py -setuptools/tests/win_script_wrapper.txt -setuptools/tests/indexes/test_links_priority/external.html -setuptools/tests/indexes/test_links_priority/simple/foobar/index.html -tests/api_tests.txt -tests/install_test.py -tests/manual_test.py -tests/test_distribute_setup.py -tests/shlib_test/hello.c -tests/shlib_test/hello.pyx -tests/shlib_test/hellolib.c -tests/shlib_test/setup.py -tests/shlib_test/test_hello.py \ No newline at end of file diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt deleted file mode 100644 index 8b137891..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt deleted file mode 100644 index 9fd41758..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt +++ /dev/null @@ -1,61 +0,0 @@ -[distutils.commands] -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -rotate = setuptools.command.rotate:rotate -develop = setuptools.command.develop:develop -setopt = setuptools.command.setopt:setopt -build_py = setuptools.command.build_py:build_py -saveopts = setuptools.command.saveopts:saveopts -egg_info = setuptools.command.egg_info:egg_info -register = setuptools.command.register:register -upload_docs = setuptools.command.upload_docs:upload_docs -install_egg_info = setuptools.command.install_egg_info:install_egg_info -alias = setuptools.command.alias:alias -easy_install = setuptools.command.easy_install:easy_install -install_scripts = setuptools.command.install_scripts:install_scripts -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -bdist_egg = setuptools.command.bdist_egg:bdist_egg -install = setuptools.command.install:install -test = setuptools.command.test:test -install_lib = setuptools.command.install_lib:install_lib -build_ext = setuptools.command.build_ext:build_ext -sdist = setuptools.command.sdist:sdist - -[egg_info.writers] -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -PKG-INFO = setuptools.command.egg_info:write_pkg_info -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -depends.txt = setuptools.command.egg_info:warn_depends_obsolete - -[console_scripts] -easy_install = setuptools.command.easy_install:main -easy_install-2.7 = setuptools.command.easy_install:main - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[distutils.setup_keywords] -dependency_links = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -extras_require = setuptools.dist:check_extras -package_data = setuptools.dist:check_package_data -install_requires = setuptools.dist:check_requirements -use_2to3 = setuptools.dist:assert_bool -use_2to3_fixers = setuptools.dist:assert_string_list -include_package_data = setuptools.dist:assert_bool -exclude_package_data = setuptools.dist:check_package_data -namespace_packages = setuptools.dist:check_nsp -test_suite = setuptools.dist:check_test_suite -eager_resources = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool -test_loader = setuptools.dist:check_importable -packages = setuptools.dist:check_packages -convert_2to3_doctests = setuptools.dist:assert_string_list -tests_require = setuptools.dist:check_requirements - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt deleted file mode 100644 index ef77c7c1..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt +++ /dev/null @@ -1,4 +0,0 @@ -easy_install -pkg_resources -setuptools -site diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe deleted file mode 100644 index 8b137891..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py deleted file mode 100755 index d87e9840..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py deleted file mode 100755 index 30dbc188..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py +++ /dev/null @@ -1,2693 +0,0 @@ -"""Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -import sys, os, zipimport, time, re, imp, types -from urlparse import urlparse, urlunparse - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -# This marker is used to simplify the process that checks is the -# setuptools package was installed by the Setuptools project -# or by the Distribute project, in case Setuptools creates -# a distribution with the same version. -# -# The bootstrapping script for instance, will check if this -# attribute is present to decide wether to reinstall the package -_distribute = True - -def _bypass_ensure_directory(name, mode=0777): - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) - - - - - - - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform(); m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - - - - - - - - - - - - - - - - - - - - - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - import platform - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - import plistlib - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - try: - from distutils.util import get_platform - except ImportError: - from sysconfig import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - - - - - - - - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - - - - - - - - - - - - - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry,True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - - - - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set. If it's added, any - callbacks registered with the ``subscribe()`` method will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if dist.key in self.by_key: - return # ignore hidden distros - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, replacement=True): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if _override_setuptools(req) and replacement: - req = Requirement.parse('distribute') - - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None: - if env is None: - env = Environment(self.entries) - dist = best[req.key] = env.best_match(req, self, installer) - if dist is None: - #msg = ("The '%s' distribution was not found on this " - # "system, and is required by this application.") - #raise DistributionNotFound(msg % req) - - # unfortunately, zc.buildout uses a str(err) - # to get the name of the distribution here.. - raise DistributionNotFound(req) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, - plugin_env, full_env=None, installer=None, fallback=True - ): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - map(shadow_set.add, self) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError,v: - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - map(shadow_set.add, resolvees) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - - - - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - - - - - - - - - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'2.4'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - - - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - - - - - - - - - - - - - - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self.cached_files[target_path] = 1 - return target_path - - - - - - - - - - - - - - - - - - - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0555) & 07777 - os.chmod(tempname, mode) - - - - - - - - - - - - - - - - - - - - - - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - - - - - - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return StringIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec script_code in namespace, namespace - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - - - - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - zip_stat = self.zipinfo[zip_path] - t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] - date_time = ( - (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd - (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. - ) - timestamp = time.mktime(date_time) - - try: - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if os.path.isfile(real_path): - stat = os.stat(real_path) - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, don't bother extracting - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - stat = os.stat(real_path) - - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, somebody did it just ahead of - # us, so we're done - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - - - - - - - - - - - - - - - - - - - - - - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() - return metadata - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - - - - - - - - - - - - - - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = zipimport._zip_directory_cache[importer.archive] - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - -class ImpWrapper: - """PEP 302 Importer that wraps Python's "normal" import algorithm""" - - def __init__(self, path=None): - self.path = path - - def find_module(self, fullname, path=None): - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [self.path] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(file, filename, etc) - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "normal" import algorithm""" - - def __init__(self, file, filename, etc): - self.file = file - self.filename = filename - self.etc = etc - - def load_module(self, fullname): - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - - - -def get_importer(path_item): - """Retrieve a PEP 302 "importer" for the given path item - - If there is no importer, this returns a wrapper around the builtin import - machinery. The returned importer is only cached if it was created by a - path hook. - """ - try: - importer = sys.path_importer_cache[path_item] - except KeyError: - for hook in sys.path_hooks: - try: - importer = hook(path_item) - except ImportError: - pass - else: - break - else: - importer = None - - sys.path_importer_cache.setdefault(path_item,importer) - if importer is None: - try: - importer = ImpWrapper(path_item) - except ImportError: - pass - return importer - -try: - from pkgutil import get_importer, ImpImporter -except ImportError: - pass # Python 2.3 or 2.4, use our own implementation -else: - ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation - del ImpLoader, ImpImporter - - - - - - -_distribution_finders = {} - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_in_zip(importer, path_item, only=False): - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_in_zip) - -def StringIO(*args, **kw): - """Thunk to load the real StringIO on demand""" - global StringIO - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO - return StringIO(*args,**kw) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - for line in open(os.path.join(path_item, entry)): - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): - yield item - break -register_finder(ImpWrapper,find_on_path) - -_namespace_handlers = {} -_namespace_packages = {} - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = types.ModuleType(packageName) - module.__path__ = []; _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer,path_item,packageName,module) - if subpath is not None: - path = module.__path__; path.append(subpath) - loader.load_module(packageName); module.__path__ = path - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(ImpWrapper,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P<name>[^-]+)" - r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) - -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer)) - - - - #@classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) - - parse = classmethod(parse) - - - - - - - - - #@classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - parse_group = classmethod(parse_group) - - #@classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - parse_map = classmethod(parse_map) - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urlparse(location) - if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) - return location - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - def __init__(self, - location=None, metadata=None, project_name=None, version=None, - py_version=PY_MAJOR, platform=None, precedence = EGG_DIST - ): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - #@classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in (".egg",".egg-info"): - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - from_location = classmethod(from_location) - - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version, - self.platform - ) - ) - def __hash__(self): return hash(self.hashcmp) - def __lt__(self, other): - return self.hashcmp < other.hashcmp - def __le__(self, other): - return self.hashcmp <= other.hashcmp - def __gt__(self, other): - return self.hashcmp > other.hashcmp - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - #@property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - key = property(key) - - #@property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - parsed_version = property(parsed_version) - - #@property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata('PKG-INFO'): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or PKG-INFO file", self - ) - version = property(version) - - - - - #@property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: extra = safe_extra(extra) - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - _dep_map = property(_dep_map) - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - map(declare_namespace, self._get_metadata('namespace_packages.txt')) - - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError,attr - return getattr(self._provider, attr) - - #@classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - from_filename = classmethod(from_filename) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - - - - - - - - - - - - - - - - - - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - - if self.project_name == 'setuptools': - try: - version = self.version - except ValueError: - version = '' - if '0.7' in version: - raise ValueError( - "A 0.7-series setuptools cannot be installed " - "with distribute. Found one at %s" % str(self.location)) - - if not loc: - return - - if path is sys.path: - self.check_version_conflict() - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= map(_normalize_cached, path) - - bp = None - for p, item in enumerate(npath): - if item==nloc: - break - elif item==bdir and self.precedence==EGG_DIST: - # if it's an .egg, give it precedence over its directory - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while 1: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - p = np # ha! - - return - - - - def check_version_conflict(self): - if self.key=='distribute': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages - ): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - - - - #@property - def extras(self): - return [dep for dep in self._dep_map if dep] - extras = property(extras) - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - - - - - - - - - - - - - - - - - - - - - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = lines.next(); p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - - - - - - - - - - - - - - - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key <> self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': return False - elif action=='T': return True - elif action=='+': last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - #@staticmethod - def parse(s, replacement=True): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs) == 1: - founded_req = reqs[0] - # if asked for setuptools distribution - # and if distribute is installed, we want to give - # distribute instead - if _override_setuptools(founded_req) and replacement: - distribute = list(parse_requirements('distribute')) - if len(distribute) == 1: - return distribute[0] - return founded_req - else: - return founded_req - - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - - parse = staticmethod(parse) - -state_machine = { - # =>< - '<' : '--T', - '<=': 'T-T', - '>' : 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _override_setuptools(req): - """Return True when distribute wants to override a setuptools dependency. - - We want to override when the requirement is setuptools and the version is - a variant of 0.6. - - """ - if req.project_name == 'setuptools': - if not len(req.specs): - # Just setuptools: ok - return True - for comparator, version in req.specs: - if comparator in ['==', '>=', '>']: - if '0.7' in version: - # We want some setuptools not from the 0.6 series. - return False - return True - return False - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -working_set = WorkingSet() -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[]; map(working_set.add_entry,sys.path) # match order - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py deleted file mode 100755 index 9de373f9..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" -from setuptools.extension import Extension, Library -from setuptools.dist import Distribution, Feature, _get_unpatched -import distutils.core, setuptools.command -from setuptools.depends import Require -from distutils.core import Command as _Command -from distutils.util import convert_path -import os -import sys - -__version__ = '0.6' -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -# This marker is used to simplify the process that checks is the -# setuptools package was installed by the Setuptools project -# or by the Distribute project, in case Setuptools creates -# a distribution with the same version. -# -# The distribute_setup script for instance, will check if this -# attribute is present to decide whether to reinstall the package -# or not. -_distribute = True - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - if ('.' not in name and os.path.isdir(fn) and - os.path.isfile(os.path.join(fn,'__init__.py')) - ): - out.append(prefix+name); stack.append((fn,prefix+name+'.')) - for pat in list(exclude)+['ez_setup', 'distribute_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -import distutils.core -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -import distutils.filelist -distutils.filelist.findall = findall # fix findall bug in distutils. - -# sys.dont_write_bytecode was introduced in Python 2.6. -if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or - (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))): - _dont_write_bytecode = True -else: - _dont_write_bytecode = False diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py deleted file mode 100755 index ab786f3d..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile, tarfile, os, shutil -from pkg_resources import ensure_directory -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - - - - - - - - - - - - - - - - - - - - - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - - - - - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) - - paths = {filename:('',extract_dir)} - for base, dirs, files in os.walk(filename): - src,dst = paths[base] - for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) - for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) - if not target: - continue # skip non-files - ensure_directory(target) - f = os.path.join(base,f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - - - - - - - - - - - - - - - - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - z = zipfile.ZipFile(filename) - try: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name: - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - f = open(target,'wb') - try: - f.write(data) - finally: - f.close() - del data - finally: - z.close() - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - - try: - tarobj.chown = lambda *args: None # don't do any chowning! - for member in tarobj: - if member.isfile() or member.isdir(): - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name: - dst = os.path.join(extract_dir, *name.split('/')) - dst = progress_filter(name, dst) - if dst: - if dst.endswith(os.sep): - dst = dst[:-1] - try: - tarobj._extract_member(member,dst) # XXX Ugh - except tarfile.ExtractError: - pass # chown/chmod/mkfifo/mknode/makedev failed - return True - finally: - tarobj.close() - - - - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe deleted file mode 100644 index 8906ff77..00000000 Binary files a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe and /dev/null differ diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py deleted file mode 100755 index 152406b3..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', -] - -from setuptools.command import install_scripts -import sys - -if sys.version>='2.5': - # In Python 2.5 and above, distutils includes its own upload command - __all__.remove('upload') - -from distutils.command.bdist import bdist - - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py deleted file mode 100755 index f5368b29..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py +++ /dev/null @@ -1,82 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * -from setuptools.command.setopt import edit_config, option_base, config_file - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) - if arg.split()<>[arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args)<>1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print "Command Aliases" - print "---------------" - for alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - - elif len(self.args)==1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print "setup.py alias", format_alias(alias, aliases) - return - else: - print "No alias definition found for %r" % alias - return - else: - alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) - - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source+name+' '+command - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py deleted file mode 100755 index 90e1f525..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,540 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -# This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command -from distutils.dir_util import remove_tree, mkpath -try: - from distutils.sysconfig import get_python_version, get_python_lib -except ImportError: - from sysconfig import get_python_version - from distutils.sysconfig import get_python_lib - -from distutils import log -from distutils.errors import DistutilsSetupError -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from types import CodeType -from setuptools.extension import Library - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " __loader__ = None; del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' - -class bdist_egg(Command): - - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - - - - - - - - - - - - - - - - - def initialize_options (self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(get_python_lib())) - old, self.distribution.data_files = self.distribution.data_files,[] - - for item in old: - if isinstance(item,tuple) and len(item)==2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep - ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - - def get_outputs(self): - return [self.egg_output] - - - def call_command(self,cmdname,**kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - - def run(self): - # Generate metadata first - self.run_command("egg_info") - - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') - - to_compile.extend(self.make_init_files()) - if to_compile: - cmd.byte_compile(to_compile) - - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - - - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base,name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - - def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation',{}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' - - - def copy_metadata_to(self, target_dir): - prefix = os.path.join(self.egg_info,'') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir:''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) - for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext,Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base,dirs,files = walker.next() - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base,dirs,files - for bdf in walker: - yield bdf - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): - return flag - if not can_scan(): return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe)<>flag: - os.unlink(fn) - elif safe is not None and bool(safe)==flag: - f=open(fn,'wt'); f.write('\n'); f.close() - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base,name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] - f = open(filename,'rb'); f.read(8) # skip magic & date - code = marshal.load(f); f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 - log.warn("%s: top-level module may be 'python -m' script", module) - safe = False - return safe - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name - for const in code.co_consts: - if isinstance(const,basestring): - yield const - elif isinstance(const,CodeType): - for name in iter_symbols(const): - yield name - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, - mode='w' -): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir)+1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits - - compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in os.walk(base_dir): - visit(z, dirname, files) - z.close() - else: - for dirname, dirs, files in os.walk(base_dir): - visit(None, dirname, file) - return zip_filename -# diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py deleted file mode 100755 index 8c48da35..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,82 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - - - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec - - - - - - - - - - - - - - - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py deleted file mode 100755 index 93e6846d..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,41 +0,0 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys - -class bdist_wininst(_bdist_wininst): - - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - dist_files = getattr(self.distribution, 'dist_files', []) - - if self.target_version: - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - pyversion = self.target_version - - # fix 2.5 bdist_wininst ignoring --target-version spec - bad = ('bdist_wininst','any',installer_name) - if bad in dist_files: - dist_files.remove(bad) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - pyversion = 'any' - good = ('bdist_wininst', pyversion, installer_name) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug - return cmd - - def run(self): - self._is_running = True - try: - _bdist_wininst.run(self) - finally: - self._is_running = False - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py deleted file mode 100755 index 4a94572c..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py +++ /dev/null @@ -1,294 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler, get_config_var -get_config_var("LDSHARED") # make sure _config_vars is initialized -from distutils.sysconfig import _config_vars -from distutils import log -from distutils.errors import * - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True - except ImportError: - pass - -def if_dl(s): - if have_rtld: - return s - return '' - - - - - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - - if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) - if fullname not in self.ext_map: - return filename - ext = self.ext_map[fullname] - if isinstance(ext,Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) - elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) - else: - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _config_vars.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" - _config_vars['CCSHARED'] = " -dynamiclib" - _config_vars['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _config_vars.clear() - _config_vars.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - - - def get_export_symbols(self, ext): - if isinstance(ext,Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext,Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) - if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False - - def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") - if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - if compile: - from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name=='nt': - # Build shared libraries - # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py deleted file mode 100755 index a01e2843..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py +++ /dev/null @@ -1,268 +0,0 @@ -import os.path, sys, fnmatch -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob - -try: - from distutils.util import Mixin2to3 as _Mixin2to3 - # add support for converting doctests that is missing in 3.1 distutils - from distutils import log - from lib2to3.refactor import RefactoringTool, get_fixers_from_package - import setuptools - class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - - class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): - # See of the distribution option has been set, otherwise check the - # setuptools default. - if self.distribution.use_2to3 is not True: - return - if not files: - return - log.info("Fixing "+" ".join(files)) - if not self.fixer_names: - self.fixer_names = [] - for p in setuptools.lib2to3_fixer_packages: - self.fixer_names.extend(get_fixers_from_package(p)) - if self.distribution.use_2to3_fixers is not None: - for p in self.distribution.use_2to3_fixers: - self.fixer_names.extend(get_fixers_from_package(p)) - if doctests: - if setuptools.run_2to3_on_doctests: - r = DistutilsRefactoringTool(self.fixer_names) - r.refactor(files, write=True, doctests_only=True) - else: - _Mixin2to3.run_2to3(self, files) - -except ImportError: - class Mixin2to3: - def run_2to3(self, files, doctests=True): - # Nothing done in 2.x - pass - -class build_py(_build_py, Mixin2to3): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - def finalize_options(self): - _build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] - self.__updated_files = [] - self.__doctests_2to3 = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - self.run_2to3(self.__updated_files, False) - self.run_2to3(self.__updated_files, True) - self.run_2to3(self.__doctests_2to3, True) - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self,attr): - if attr=='data_files': # lazily compute data files - self.data_files = files = self._get_data_files(); return files - return _build_py.__getattr__(self,attr) - - def build_module(self, module, module_file, package): - outfile, copied = _build_py.build_module(self, module, module_file, package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append( (package, src_dir, build_dir, filenames) ) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - lastdir = None - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - if copied and srcfile in self.distribution.convert_2to3_doctests: - self.__doctests_2to3.append(outf) - - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d,f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d!=prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f==oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d],[]).append(path) - - def get_data_files(self): pass # kludge 2.4 for lazy computation - - if sys.version<"2.4": # Python 2.4 already has this code - def get_outputs(self, include_bytecode=1): - """Return complete list of files copied to the build directory - - This includes both '.py' files and data files, as well as '.pyc' - and '.pyo' files if 'include_bytecode' is true. (This method is - needed for the 'install_lib' command to do its job properly, and to - generate a correct installation manifest.) - """ - return _build_py.get_outputs(self, include_bytecode) + [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files - for filename in filenames - ] - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = _build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): - break - else: - return init_py - - f = open(init_py,'rU') - if 'declare_namespace' not in f.read(): - from distutils import log - log.warn( - "WARNING: %s is a namespace package, but its __init__.py does\n" - "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n", package - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) - - - def get_package_dir(self, package): - res = _build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - seen = {} - return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f,1) # ditch dupes - ] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - raise DistutilsSetupError( -"""Error: setup script specifies an absolute path: - - %s - -setup() arguments must *always* be /-separated paths relative to the -setup.py directory, *never* absolute paths. -""" % path - ) - - - - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py deleted file mode 100755 index 93b7773c..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py +++ /dev/null @@ -1,141 +0,0 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path, subst_vars -from pkg_resources import Distribution, PathMetadata, normalize_path -from distutils import log -from distutils.errors import DistutilsError, DistutilsOptionError -import os, setuptools, glob - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) - self.args = [ei.egg_name] - - - - - easy_install.finalize_options(self) - self.expand_basedirs() - self.expand_dirs() - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = normalize_path(self.egg_base) - if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to "+target - ) - - # Make a distribution for the package's source - self.dist = Distribution( - target, - PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name - ) - - p = self.egg_base.replace(os.sep,'/') - if p!= os.curdir: - p = '../' * (p.count('/')+1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) - - def install_for_development(self): - # Ensure metadata is up-to-date - self.run_command('egg_info') - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - self.install_site_py() # ensure that target dir is site-safe - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link,"w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - contents = [line.rstrip() for line in open(self.egg_link)] - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path,'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py deleted file mode 100755 index 27fd00c7..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py +++ /dev/null @@ -1,1865 +0,0 @@ -#!python -"""\ -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ http://packages.python.org/distribute/easy_install.html - -""" -import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random -from glob import glob -from setuptools import Command, _dont_write_bytecode -from setuptools.sandbox import run_setup -from distutils import log, dir_util -from distutils.util import convert_path, subst_vars -from distutils.sysconfig import get_python_lib, get_config_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from pkg_resources import yield_lines, normalize_path, resource_string, \ - ensure_directory, get_distribution, find_distributions, \ - Environment, Requirement, Distribution, \ - PathMetadata, EggMetadata, WorkingSet, \ - DistributionNotFound, VersionConflict, \ - DEVELOP_DIST - -sys_executable = os.path.normpath(sys.executable) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - -import site -HAS_USER_SITE = not sys.version < "2.6" and site.ENABLE_USER_SITE - -def samefile(p1,p2): - if hasattr(os.path,'samefile') and ( - os.path.exists(p1) and os.path.exists(p2) - ): - return os.path.samefile(p1,p2) - return ( - os.path.normpath(os.path.normcase(p1)) == - os.path.normpath(os.path.normcase(p2)) - ) - -if sys.version_info <= (3,): - def _to_ascii(s): - return s - def isascii(s): - try: - unicode(s, 'ascii') - return True - except UnicodeError: - return False -else: - def _to_ascii(s): - return s.encode('ascii') - def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("delete-conflicting", "D", "no longer needed; don't use this"), - ("ignore-conflicts-at-my-risk", None, - "no longer needed; don't use this"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed") - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable', - 'no-deps', 'local-snapshots-ok', 'version' - ] - - if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % site.USER_SITE)) - boolean_options.append('user') - - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - if HAS_USER_SITE: - whereami = os.path.abspath(__file__) - self.user = whereami.startswith(site.USER_SITE) - else: - self.user = 0 - - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if HAS_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.delete_conflicting = None - self.ignore_conflicts_at_my_risk = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - for filename in blockers: - if os.path.exists(filename) or os.path.islink(filename): - log.info("Deleting %s", filename) - if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): - rmtree(filename) - else: - os.unlink(filename) - - def finalize_options(self): - if self.version: - print 'distribute %s' % get_distribution('distribute').version - sys.exit() - - py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - - self.config_vars = {'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - } - - if HAS_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - # fix the install_dir if "--user" was used - #XXX: duplicate of the code in the setup command - if self.user and HAS_USER_SITE: - self.create_home_path() - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - if os.name == 'posix': - self.select_scheme("unix_user") - else: - self.select_scheme(os.name + "_user") - - self.expand_basedirs() - self.expand_dirs() - - self._expand('install_dir','script_dir','build_directory','site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "http://pypi.python.org/simple" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path+sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path+sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.delete_conflicting and self.ignore_conflicts_at_my_risk: - raise DistutilsOptionError( - "Can't use both --delete-conflicting and " - "--ignore-conflicts-at-my-risk at the same time" - ) - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def run(self): - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in xrange(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0,sys.maxint) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - if self.delete_conflicting or self.ignore_conflicts_at_my_risk: - log.warn( - "Note: The -D, --delete-conflicting and" - " --ignore-conflicts-at-my-risk no longer have any purpose" - " and should not be used." - ) - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - print 'install_dir', self.install_dir - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() - os.unlink(testfile) - except (OSError,IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep) - if instdir not in map(normalize_path, filter(None,PYTHONPATH)): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - def cant_write_to_target(self): - msg = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" - else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - http://packages.python.org/distribute/easy_install.html - -Please make the appropriate changes for your system and try again. -""" - raise DistutilsError(msg) - - - - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - f = open(pth_file,'w') - except (OSError,IOError): - self.cant_write_to_target() - else: - try: - f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,)) - f.close(); f=None - executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: f.close() - if os.path.exists(ok_file): os.unlink(ok_file) - if os.path.exists(pth_file): os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - self.install_script( - dist, script_name, - dist.get_metadata('scripts/'+script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base,filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self,spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - - - - - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - if not self.editable: self.install_site_py() - - try: - if not isinstance(spec,Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy, - self.local_index - ) - - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location==download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.check_conflicts(self.egg_distribution(download))] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - - - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - self.local_index.add(dist) - if not self.editable: - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound, e: - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict, e: - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return True - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - log.warn( - "%r already exists in %s; build directory %s will not be kept", - spec.key, self.build_directory, setup_base - ) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst); shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in get_script_args(dist): - self.write_script(*args) - - - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - if is_script and dev_path: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(spec)r\n" - "from pkg_resources import require; require(%(spec)r)\n" - "del require\n" - "__file__ = %(dev_path)r\n" - "execfile(__file__)\n" - ) % locals() - elif is_script: - script_text = get_script_header(script_text) + ( - "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n" - "__requires__ = %(spec)r\n" - "import pkg_resources\n" - "pkg_resources.run_script(%(spec)r, %(script_name)r)\n" - ) % locals() - self.write_script(script_name, _to_ascii(script_text), 'b') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target,0755) - - - - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None - ): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) - ) - if len(setups)>1: - raise DistutilsError( - "Multiple setup scripts in %s" % os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - self.check_conflicts(dist) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution(None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform="win32" - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') - egg_tmp = egg_path+'.tmp' - egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') - f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): - if k<>'target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) - f.close() - script_dir = os.path.join(egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - def process(src,dst): - s = src.lower() - for old,new in prefixes: - if s.startswith(old): - src = new+src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile); stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level','native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') - if not os.path.exists(txt): - f = open(txt,'w') - f.write('\n'.join(locals()[name])+'\n') - f.close() - - def check_conflicts(self, dist): - """Verify that there are no conflicting "old-style" packages""" - - return dist # XXX temporarily disable until new strategy is stable - from imp import find_module, get_suffixes - from glob import glob - - blockers = [] - names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr - - exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out - for ext,mode,typ in get_suffixes(): - exts[ext] = 1 - - for path,files in expand_paths([self.install_dir]+self.all_site_dirs): - for filename in files: - base,ext = os.path.splitext(filename) - if base in names: - if not ext: - # no extension, check for package - try: - f, filename, descr = find_module(base, [path]) - except ImportError: - continue - else: - if f: f.close() - if filename not in blockers: - blockers.append(filename) - elif ext in exts and base!='site': # XXX ugh - blockers.append(os.path.join(path,filename)) - if blockers: - self.found_conflicts(dist, blockers) - - return dist - - def found_conflicts(self, dist, blockers): - if self.delete_conflicting: - log.warn("Attempting to delete conflicting packages:") - return self.delete_blockers(blockers) - - msg = """\ -------------------------------------------------------------------------- -CONFLICT WARNING: - -The following modules or packages have the same names as modules or -packages being installed, and will be *before* the installed packages in -Python's search path. You MUST remove all of the relevant files and -directories before you will be able to use the package(s) you are -installing: - - %s - -""" % '\n '.join(blockers) - - if self.ignore_conflicts_at_my_risk: - msg += """\ -(Note: you can run EasyInstall on '%s' with the ---delete-conflicting option to attempt deletion of the above files -and/or directories.) -""" % dist.project_name - else: - msg += """\ -Note: you can attempt this installation again with EasyInstall, and use -either the --delete-conflicting (-D) option or the ---ignore-conflicts-at-my-risk option, to either delete the above files -and directories, or to ignore the conflicts, respectively. Note that if -you ignore the conflicts, the installed package(s) may not work. -""" - msg += """\ -------------------------------------------------------------------------- -""" - sys.stderr.write(msg) - sys.stderr.flush() - if not self.ignore_conflicts_at_my_risk: - raise DistutilsError("Installation aborted due to conflicts") - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += """ - -Because this distribution was installed --multi-version, before you can -import modules from this package in an application, you will need to -'import pkg_resources' and then use a 'require()' call similar to one of -these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose>2: - v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') - if self.dry_run: - args.insert(0,'-n') - log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit, v: - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - args.append(dist_dir) - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def update_pth(self,dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - if dist.key=='distribute': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = []; to_chmod = [] - - def pf(src,dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - to_chmod.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src,dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if _dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - - - - - - - - def no_default_version_msg(self): - return """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - http://packages.python.org/distribute/easy_install.html#custom-installation-locations - -Please make the appropriate changes for your system and try again.""" % ( - self.install_dir, os.environ.get('PYTHONPATH','') - ) - - - - - - - - - - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string(Requirement.parse("distribute"), "site.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy,'rb') - current = f.read() - # we want str, not bytes - if sys.version_info >= (3,): - current = current.decode() - - f.close() - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy,'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - - - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.iteritems(): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0700)" % path) - os.makedirs(path, 0700) - - - - - - - - INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) - - from distutils.util import subst_vars - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - - - - - - - - - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep)) - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - for plat_specific in (0,1): - site_lib = get_python_lib(plat_specific) - if site_lib not in sitedirs: sitedirs.append(site_lib) - - if HAS_USER_SITE: - sitedirs.append(site.USER_SITE) - - sitedirs = map(normalize_path, sitedirs) - - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth','setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname,name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename,'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended-12) - - import struct, StringIO, ConfigParser - tag, cfglen, bmlen = struct.unpack("<iii",f.read(12)) - if tag not in (0x1234567A, 0x1234567B): - return None # not a valid tag - - f.seek(prepended-(12+cfglen)) - cfg = ConfigParser.RawConfigParser({'version':'','target_version':''}) - try: - cfg.readfp(StringIO.StringIO(f.read(cfglen).split(chr(0),1)[0])) - except ConfigParser.Error: - return None - if not cfg.has_section('metadata') or not cfg.has_section('Setup'): - return None - return cfg - - finally: - f.close() - - - - - - - - -def get_exe_prefixes(exe_filename): - """Get exe->egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/') - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts)<>2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB','PLATLIB'): - for pth in yield_lines(z.read(name)): - pth = pth.strip().replace('\\','/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) - finally: - z.close() - prefixes = [(x.lower(),y) for x, y in prefixes] - prefixes.sort(); prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename; self.sitedirs=map(normalize_path, sitedirs) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load(); Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - map(self.add, find_distributions(path, True)) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename,'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative,self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename,'wt') - f.write(data); f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self,dist): - """Add `dist` to the distribution map""" - if (dist.location not in self.paths and ( - dist.location not in self.sitedirs or - dist.location == os.getcwd() #account for '.' being in PYTHONPATH - )): - self.paths.append(dist.location) - self.dirty = True - Environment.add(self,dist) - - def remove(self,dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location); self.dirty = True - Environment.remove(self,dist) - - - def make_relative(self,path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep=='/' and '/' or os.sep - while len(npath)>=baselen: - if npath==self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - -def get_script_header(script_text, executable=sys_executable, wininst=False): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re - first = (script_text+'\n').splitlines()[0] - match = first_line_re.match(first) - options = '' - if match: - options = match.group(1) or '' - if options: options = ' '+options - if wininst: - executable = "python.exe" - else: - executable = nt_quote_arg(executable) - hdr = "#!%(executable)s%(options)s\n" % locals() - if not isascii(hdr): - # Non-ascii path to sys.executable, use -x to prevent warnings - if options: - if options.strip().startswith('-'): - options = ' -x'+options.strip()[1:] - # else: punt, we can't do it, let the warning happen anyway - else: - options = ' -x' - executable = fix_jython_executable(executable, options) - hdr = "#!%(executable)s%(options)s\n" % locals() - return hdr - -def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - exc = sys.exc_info() - raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg))) - -def uncache_zipdir(path): - """Ensure that the importer caches dont have stale info for `path`""" - from zipimport import _zip_directory_cache as zdc - _uncache(path, zdc) - _uncache(path, sys.path_importer_cache) - -def _uncache(path, cache): - if path in cache: - del cache[path] - else: - path = normalize_path(path) - for p in cache: - if normalize_path(p)==path: - del cache[p] - return - -def is_python(text, filename='<string>'): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - fp = open(executable) - magic = fp.read(2) - fp.close() - except (OSError,IOError): return executable - return magic == '#!' - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - - result = [] - needquote = False - nb = 0 - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - nb += 1 - elif c == '"': - # double preceding backslashes, then add a \" - result.append('\\' * (nb*2) + '\\"') - nb = 0 - else: - if nb: - result.append('\\' * nb) - nb = 0 - result.append(c) - - if nb: - result.append('\\' * nb) - - if needquote: - result.append('\\' * nb) # double the trailing backslashes - result.append('"') - - return ''.join(result) - - - - - - - - - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): pass - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error, e: - log.debug("chmod failed: %s", e) - -def fix_jython_executable(executable, options): - if sys.platform.startswith('java') and is_sh(executable): - # Workaround Jython's sys.executable being a .sh (an invalid - # shebang line interpreter) - if options: - # Can't apply the workaround, leave it broken - log.warn("WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - else: - return '/usr/bin/env %s' % executable - return executable - - -def get_script_args(dist, executable=sys_executable, wininst=False): - """Yield write_script() argument tuples for a distribution's entrypoints""" - spec = str(dist.as_requirement()) - header = get_script_header("", executable, wininst) - for group in 'console_scripts', 'gui_scripts': - for name, ep in dist.get_entry_map(group).items(): - script_text = ( - "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n" - "__requires__ = %(spec)r\n" - "import sys\n" - "from pkg_resources import load_entry_point\n" - "\n" - "if __name__ == '__main__':" - "\n" - " sys.exit(\n" - " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n" - " )\n" - ) % locals() - if sys.platform=='win32' or wininst: - # On Windows/wininst, add a .py extension and an .exe launcher - if group=='gui_scripts': - ext, launcher = '-script.pyw', 'gui.exe' - old = ['.pyw'] - new_header = re.sub('(?i)python.exe','pythonw.exe',header) - else: - ext, launcher = '-script.py', 'cli.exe' - old = ['.py','.pyc','.pyo'] - new_header = re.sub('(?i)pythonw.exe','python.exe',header) - - if os.path.exists(new_header[2:-1]) or sys.platform!='win32': - hdr = new_header - else: - hdr = header - yield (name+ext, hdr+script_text, 't', [name+x for x in old]) - yield ( - name+'.exe', resource_string('setuptools', launcher), - 'b' # write in binary mode - ) - else: - # On other platforms, we assume the right thing to do is to - # just write the stub with no extension. - yield (name, header+script_text) - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error, err: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error, err: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools; argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0; sys.argv.append(argv0); main() - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - import distutils.core - - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" - - def gen_usage (script_name): - script = os.path.basename(script_name) - return USAGE % vars() - - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - def find_config_files(self): - files = Distribution.find_config_files(self) - if 'setup.cfg' in files: - files.remove('setup.cfg') - return files - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py deleted file mode 100755 index 46cdf4e0..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py +++ /dev/null @@ -1,457 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -# This module should be kept compatible with Python 2.3 -import os, re, sys -from setuptools import Command -from distutils.errors import * -from distutils import log -from setuptools.command.sdist import sdist -from distutils.util import convert_path -from distutils.filelist import FileList -from pkg_resources import parse_requirements, safe_name, parse_version, \ - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename -from sdist import walk_revctrl - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} - - - - - - - - def initialize_options(self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - self.vtags = None - - def save_version_info(self, filename): - from setopt import edit_config - edit_config( - filename, - {'egg_info': - {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()} - } - ) - - - - - - - - - - - - - - - - - - - - - - - def finalize_options (self): - self.egg_name = safe_name(self.distribution.get_name()) - self.vtags = self.tags() - self.egg_version = self.tagged_version() - - try: - list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) - ) - except ValueError: - raise DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if sys.version_info >= (3,): - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def tagged_version(self): - return safe_version(self.distribution.get_version() + self.vtags) - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def tags(self): - version = '' - if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() - if self.tag_date: - import time; version += time.strftime("-%Y%m%d") - return version - - - - - - - - - - - - - - - - - - def get_svn_revision(self): - revision = 0 - urlre = re.compile('url="([^"]+)"') - revre = re.compile('committed-rev="(\d+)"') - - for base,dirs,files in os.walk(os.curdir): - if '.svn' not in dirs: - dirs[:] = [] - continue # no sense walking uncontrolled subdirs - dirs.remove('.svn') - f = open(os.path.join(base,'.svn','entries')) - data = f.read() - f.close() - - if data.startswith('10') or data.startswith('9') or data.startswith('8'): - data = map(str.splitlines,data.split('\n\x0c\n')) - del data[0][0] # get rid of the '8' or '9' or '10' - dirurl = data[0][3] - localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0]) - elif data.startswith('<?xml'): - dirurl = urlre.search(data).group(1) # get repository URL - localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0]) - else: - log.warn("unrecognized .svn/entries format; skipping %s", base) - dirs[:] = [] - continue - if base==os.curdir: - base_url = dirurl+'/' # save the root url - elif not dirurl.startswith(base_url): - dirs[:] = [] - continue # not part of the same svn tree, skip it - revision = max(revision, localrev) - - return str(revision or get_pkg_info_revision()) - - - - - - - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-"*78+'\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - -class FileList(FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - if os.path.exists(path): - self.files.append(path) - - - - - - - - - -class manifest_maker(sdist): - - template = "MANIFEST.in" - - def initialize_options (self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def write_manifest (self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) - - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): - sdist.warn(self, msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def prune_file_list (self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) - - -def write_file (filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - if sys.version_info >= (3,): - contents = contents.encode("utf-8") - f = open(filename, "wb") # always write POSIX-style manifest - f.write(contents) - f.close() - - - - - - - - - - - - - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution,'zip_safe',None) - import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe) - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [k.split('.',1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') - - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value, force) - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep,basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - f.close() - return 0 - - - -# diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py deleted file mode 100755 index 247c4f25..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py +++ /dev/null @@ -1,124 +0,0 @@ -import setuptools, sys, glob -from distutils.command.install import install as _install -from distutils.errors import DistutilsArgError - -class install(_install): - """Use easy_install to install the package, w/dependencies""" - - user_options = _install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = _install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - _install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! - - def finalize_options(self): - _install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return _install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) - - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) - else: - self.do_egg_install() - - - - - - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in install._nc - ] + install.new_commands - - - - - - - - - - - - - - - - -# diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py deleted file mode 100755 index dd95552e..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,123 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wt') - for pkg in nsp: - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,types,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() - - def _get_all_ns_packages(self): - nsp = {} - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp['.'.join(pkg)] = 1 - pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py deleted file mode 100755 index 82afa142..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py deleted file mode 100755 index 6ce1b993..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py +++ /dev/null @@ -1,53 +0,0 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os -from distutils import log - -class install_scripts(_install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - from setuptools.command.easy_install import get_script_args - from setuptools.command.easy_install import sys_executable - - self.run_command("egg_info") - if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - is_wininst = getattr( - self.get_finalized_command("bdist_wininst"), '_is_running', False - ) - for args in get_script_args(dist, executable, is_wininst): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target,0755) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py deleted file mode 100755 index 3b2e0859..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -from distutils.command.register import register as _register - -class register(_register): - __doc__ = _register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - _register.run(self) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py deleted file mode 100755 index 11b6eae8..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py +++ /dev/null @@ -1,82 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t,f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py deleted file mode 100755 index 1180a440..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py +++ /dev/null @@ -1,25 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.command.setopt import edit_config, option_base - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - commands = dist.command_options.keys() - settings = {} - - for cmd in commands: - - if cmd=='saveopts': - continue # don't save our own options! - - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val - - edit_config(self.filename, settings, self.dry_run) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py deleted file mode 100755 index 3442fe4b..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py +++ /dev/null @@ -1,252 +0,0 @@ -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -from distutils import log -import os, re, sys, pkg_resources -from glob import glob - -entities = [ - ("<","<"), (">", ">"), (""", '"'), ("'", "'"), - ("&", "&") -] - -def unescape(data): - for old,new in entities: - data = data.replace(old,new) - return data - -def re_finder(pattern, postproc=None): - def find(dirname, filename): - f = open(filename,'rU') - data = f.read() - f.close() - for match in pattern.finditer(data): - path = match.group(1) - if postproc: - path = postproc(path) - yield joinpath(dirname,path) - return find - -def joinpath(prefix,suffix): - if not prefix: - return suffix - return os.path.join(prefix,suffix) - - - - - - - - - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - -def _default_revctrl(dirname=''): - for path, finder in finders: - path = joinpath(dirname,path) - if os.path.isfile(path): - for path in finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in _default_revctrl(path): - yield item - -def externals_finder(dirname, filename): - """Find any 'svn:externals' directories""" - found = False - f = open(filename,'rt') - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts)==2: - kind,length = parts - data = f.read(int(length)) - if kind=='K' and data=='svn:externals': - found = True - elif kind=='V' and found: - f.close() - break - else: - f.close() - return - - for line in data.splitlines(): - parts = line.split() - if parts: - yield joinpath(dirname, parts[0]) - - -entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) - -def entries_finder(dirname, filename): - f = open(filename,'rU') - data = f.read() - f.close() - if data.startswith('10') or data.startswith('9') or data.startswith('8'): - for record in map(str.splitlines, data.split('\n\x0c\n')[1:]): - # subversion 1.6/1.5/1.4 - if not record or len(record)>=6 and record[5]=="delete": - continue # skip deleted - yield joinpath(dirname, record[0]) - elif data.startswith('<?xml'): - for match in entries_pattern.finditer(data): - yield joinpath(dirname,unescape(match.group(1))) - else: - log.warn("unrecognized .svn/entries format in %s", dirname) - - -finders = [ - (convert_path('CVS/Entries'), - re_finder(re.compile(r"^\w?/([^/]+)/", re.M))), - (convert_path('.svn/entries'), entries_finder), - (convert_path('.svn/dir-props'), externals_finder), - (convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4 -] - - - - - - - - - - - - - - - - -class sdist(_sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) - self.check_readme() - self.check_metadata() - self.make_distribution() - - dist_files = getattr(self.distribution,'dist_files',[]) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def add_defaults(self): - standards = [('README', 'README.txt'), - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if os.path.exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if os.path.exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = filter(os.path.isfile, glob(pattern)) - if files: - self.filelist.extend(files) - - # getting python files - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def read_template(self): - try: - _sdist.read_template(self) - except: - # grody hack to close the template file (MANIFEST.in) - # this prevents easy_install's attempt at deleting the file from - # dying and thus masking the real error - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() - raise - - def check_readme(self): - alts = ("README", "README.txt") - for f in alts: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " +', '.join(alts) - ) - - - def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os,'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - - - - - - - - - - - - - - - - -# diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py deleted file mode 100755 index dbf3a94e..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py +++ /dev/null @@ -1,164 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind=='local': - return 'setup.cfg' - if kind=='global': - return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' - ) - if kind=='user': - dot = os.name=='posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - - - - - - - - - - - - - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from ConfigParser import RawConfigParser - log.debug("Reading configuration from %s", filename) - opts = RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option,value in options.items(): - if value is None: - log.debug("Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section,option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section,option,value) - - log.info("Writing %s", filename) - if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames)>1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} - }, - self.dry_run - ) - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py deleted file mode 100755 index b7aef969..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py +++ /dev/null @@ -1,180 +0,0 @@ -from setuptools import Command -from distutils.errors import DistutilsOptionError -import sys -from pkg_resources import * -from unittest import TestLoader, main - -class ScanningLoader(TestLoader): - - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] - else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests)!=1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - - - def finalize_options(self): - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module+".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0,'--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - - - - def with_project_on_sys_path(self, func): - if getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs(self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.with_project_on_sys_path(self.run_tests) - - - def run_tests(self): - import unittest - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = loader_class() - ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py deleted file mode 100755 index 1f49745e..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py +++ /dev/null @@ -1,183 +0,0 @@ -"""distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to PyPI).""" - -from distutils.errors import * -from distutils.core import Command -from distutils.spawn import spawn -from distutils import log -try: - from hashlib import md5 -except ImportError: - from md5 import md5 -import os -import socket -import platform -import ConfigParser -import httplib -import base64 -import urlparse -import cStringIO as StringIO - -class upload(Command): - - description = "upload binary package to PyPI" - - DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('sign', 's', - 'sign files to upload using gpg'), - ('identity=', 'i', 'GPG identity used to sign files'), - ] - boolean_options = ['show-response', 'sign'] - - def initialize_options(self): - self.username = '' - self.password = '' - self.repository = '' - self.show_response = 0 - self.sign = False - self.identity = None - - def finalize_options(self): - if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) - if os.environ.has_key('HOME'): - rc = os.path.join(os.environ['HOME'], '.pypirc') - if os.path.exists(rc): - self.announce('Using PyPI login from %s' % rc) - config = ConfigParser.ConfigParser({ - 'username':'', - 'password':'', - 'repository':''}) - config.read(rc) - if not self.repository: - self.repository = config.get('server-login', 'repository') - if not self.username: - self.username = config.get('server-login', 'username') - if not self.password: - self.password = config.get('server-login', 'password') - if not self.repository: - self.repository = self.DEFAULT_REPOSITORY - - def run(self): - if not self.distribution.dist_files: - raise DistutilsOptionError("No dist file created in earlier command") - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - - def upload_file(self, command, pyversion, filename): - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - f = open(filename,'rb') - content = f.read() - f.close() - basename = os.path.basename(filename) - comment = '' - if command=='bdist_egg' and self.distribution.has_ext_modules(): - comment = "built on %s" % platform.platform(terse=1) - data = { - ':action':'file_upload', - 'protcol_version':'1', - 'name':self.distribution.get_name(), - 'version':self.distribution.get_version(), - 'content':(basename,content), - 'filetype':command, - 'pyversion':pyversion, - 'md5_digest':md5(content).hexdigest(), - } - if command == 'bdist_rpm': - dist, version, id = platform.dist() - if dist: - comment = 'built for %s %s' % (dist, version) - elif command == 'bdist_dumb': - comment = 'built for %s' % platform.platform(terse=1) - data['comment'] = comment - - if self.sign: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - open(filename+".asc").read()) - - # set up the authentication - auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip() - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = '\n--' + boundary - end_boundary = sep_boundary + '--' - body = StringIO.StringIO() - for key, value in data.items(): - # handle multiple entries for the same name - if type(value) != type([]): - value = [value] - for value in value: - if type(value) is tuple: - fn = ';filename="%s"' % value[0] - value = value[1] - else: - fn = "" - value = str(value) - body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) - body.write(fn) - body.write("\n\n") - body.write(value) - if value and value[-1] == '\r': - body.write('\n') # write an extra newline (lurve Macs) - body.write(end_boundary) - body.write("\n") - body = body.getvalue() - - self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse.urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - http = httplib.HTTPConnection(netloc) - elif schema == 'https': - http = httplib.HTTPSConnection(netloc) - else: - raise AssertionError, "unsupported schema "+schema - - data = '' - loglevel = log.INFO - try: - http.connect() - http.putrequest("POST", url) - http.putheader('Content-type', - 'multipart/form-data; boundary=%s'%boundary) - http.putheader('Content-length', str(len(body))) - http.putheader('Authorization', auth) - http.endheaders() - http.send(body) - except socket.error, e: - self.announce(str(e), log.ERROR) - return - - r = http.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print '-'*75, r.read(), '-'*75 diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py deleted file mode 100755 index 213f7b58..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -"""upload_docs - -Implements a Distutils 'upload_docs' subcommand (upload documentation to -PyPI's packages.python.org). -""" - -import os -import socket -import zipfile -import httplib -import base64 -import urlparse -import tempfile -import sys - -from distutils import log -from distutils.errors import DistutilsOptionError - -try: - from distutils.command.upload import upload -except ImportError: - from setuptools.command.upload import upload - -_IS_PYTHON3 = sys.version > '3' - -try: - bytes -except NameError: - bytes = str - -def b(str_or_bytes): - """Return bytes by either encoding the argument as ASCII or simply return - the argument as-is.""" - if not isinstance(str_or_bytes, bytes): - return str_or_bytes.encode('ascii') - else: - return str_or_bytes - - -class upload_docs(upload): - - description = 'Upload documentation to PyPI' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('upload-dir=', None, 'directory to upload'), - ] - boolean_options = upload.boolean_options - - def initialize_options(self): - upload.initialize_options(self) - self.upload_dir = None - - def finalize_options(self): - upload.finalize_options(self) - if self.upload_dir is None: - build = self.get_finalized_command('build') - self.upload_dir = os.path.join(build.build_base, 'docs') - self.mkpath(self.upload_dir) - self.ensure_dirname('upload_dir') - self.announce('Using upload directory %s' % self.upload_dir) - - def create_zipfile(self): - name = self.distribution.metadata.get_name() - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, "%s.zip" % name) - zip_file = zipfile.ZipFile(tmp_file, "w") - for root, dirs, files in os.walk(self.upload_dir): - if root == self.upload_dir and not files: - raise DistutilsOptionError( - "no files found in upload directory '%s'" - % self.upload_dir) - for name in files: - full = os.path.join(root, name) - relative = root[len(self.upload_dir):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - zip_file.close() - return tmp_file - - def run(self): - zip_file = self.create_zipfile() - self.upload_file(zip_file) - - def upload_file(self, filename): - content = open(filename, 'rb').read() - meta = self.distribution.metadata - data = { - ':action': 'doc_upload', - 'name': meta.get_name(), - 'content': (os.path.basename(filename), content), - } - # set up the authentication - credentials = self.username + ':' + self.password - if _IS_PYTHON3: # base64 only works with bytes in Python 3. - encoded_creds = base64.encodebytes(credentials.encode('utf8')) - auth = bytes("Basic ") - else: - encoded_creds = base64.encodestring(credentials) - auth = "Basic " - auth += encoded_creds.strip() - - # Build up the MIME payload for the POST data - boundary = b('--------------GHSKFJDLGDS7543FJKLFHRE75642756743254') - sep_boundary = b('\n--') + boundary - end_boundary = sep_boundary + b('--') - body = [] - for key, values in data.items(): - # handle multiple entries for the same name - if type(values) != type([]): - values = [values] - for value in values: - if type(value) is tuple: - fn = b(';filename="%s"' % value[0]) - value = value[1] - else: - fn = b("") - body.append(sep_boundary) - body.append(b('\nContent-Disposition: form-data; name="%s"'%key)) - body.append(fn) - body.append(b("\n\n")) - body.append(b(value)) - if value and value[-1] == b('\r'): - body.append(b('\n')) # write an extra newline (lurve Macs) - body.append(end_boundary) - body.append(b("\n")) - body = b('').join(body) - - self.announce("Submitting documentation to %s" % (self.repository), - log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse.urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - conn = httplib.HTTPConnection(netloc) - elif schema == 'https': - conn = httplib.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported schema "+schema) - - data = '' - loglevel = log.INFO - try: - conn.connect() - conn.putrequest("POST", url) - conn.putheader('Content-type', - 'multipart/form-data; boundary=%s'%boundary) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - except socket.error, e: - self.announce(str(e), log.ERROR) - return - - r = conn.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'http://packages.python.org/%s/' % meta.get_name() - self.announce('Upload successful. Visit %s' % location, - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print '-'*75, r.read(), '-'*75 diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py deleted file mode 100755 index 4b7b3437..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import generators -import sys, imp, marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - - def version_ok(self,version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version)<>"unknown" and version >= self.requested_version - - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module,self.attribute,default,paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - - def is_present(self,paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - - def is_current(self,paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr<eof: - - op = bytes[ptr] - - if op>=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * 65536L - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - - - - - - - - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - - - - - - - - - - - - - - - - - - - - - - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix,mode,kind) = find_module(module,paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) - - finally: - if f: - f.close() - - return extract_constant(code,symbol,default) - - - - - - - - -def extract_constant(code,symbol,default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default - -if sys.platform.startswith('java') or sys.platform == 'cli': - # XXX it'd be better to test assertions about bytecode instead... - del extract_constant, get_module_constant - __all__.remove('extract_constant') - __all__.remove('get_module_constant') - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py deleted file mode 100755 index fd4ca66b..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py +++ /dev/null @@ -1,816 +0,0 @@ -__all__ = ['Distribution'] - -import re -from distutils.core import Distribution as _Distribution -from setuptools.depends import Require -from setuptools.command.install import install -from setuptools.command.sdist import sdist -from setuptools.command.install_lib import install_lib -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd -import os, distutils.log - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) - -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "%r is declared as a package namespace, but %r is not:" - " please correct this in setup.py", nsp, parent - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - - - - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError, e: - raise DistutilsSetupError(e) - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' -- a dictionary mapping option names to 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__ (self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - self.require_features = [] - self.features = {} - self.dist_files = [] - self.src_root = attrs and attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs is not None: - self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, (int,long,float)): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg - ): - working_set.add(dist) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - if getattr(self, 'convert_2to3_doctests', None): - # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] - else: - self.convert_2to3_doctests = [] - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - try: - cmd = self._egg_fetcher - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in opts.keys(): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report = True - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - - - - - - - - - - - - - - - - - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - - - - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - - - - - - - - - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - map(self.exclude_package, packages) - - - - - - - - - - - - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - - - - - - - - - - - - - - - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - - - - - - - - - - - - - - - - - - - -class Feature: - """A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras - ): - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) - - - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only" - ".-separated package names in setup.py", pkgname - ) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py deleted file mode 100755 index d186c7a2..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py +++ /dev/null @@ -1,36 +0,0 @@ -from distutils.core import Extension as _Extension -from setuptools.dist import _get_unpatched -_Extension = _get_unpatched(_Extension) - -try: - from Pyrex.Distutils.build_ext import build_ext -except ImportError: - have_pyrex = False -else: - have_pyrex = True - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - if not have_pyrex: - # convert .pyx extensions to .c - def __init__(self,*args,**kw): - _Extension.__init__(self,*args,**kw) - sources = [] - for s in self.sources: - if s.endswith('.pyx'): - sources.append(s[:-3]+'c') - else: - sources.append(s) - self.sources = sources - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -import sys, distutils.core, distutils.extension -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe deleted file mode 100644 index 474838d5..00000000 Binary files a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe and /dev/null differ diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py deleted file mode 100755 index 1d467f78..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py +++ /dev/null @@ -1,830 +0,0 @@ -"""PyPI and direct package downloading""" -import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO -import httplib -from pkg_resources import * -from distutils import log -from distutils.errors import DistutilsError -try: - from hashlib import md5 -except ImportError: - from md5 import md5 -from fnmatch import translate - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' - 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -_SOCKET_TIMEOUT = 15 - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver = None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - - return base,py_ver - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse.urlparse(url) - base = urllib2.unquote(path.split('/')[-1]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ): - yield dist - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - - if basename.endswith('.exe'): - win_base, py_ver = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, "win32" - ) - - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name(location, basename, metadata, - py_version=None, precedence=SOURCE_DIST, platform=None -): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version: - for i,p in enumerate(parts[2:]): - if len(p)==5 and p.startswith('py2.'): - return # It's a bdist_dumb, not an sdist -- bail out - - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = map(str.strip, rel.lower().split(',')) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urlparse.urljoin(url, htmldecode(match.group(1))) - - for tag in ("<th>Home Page", "<th>Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - yield urlparse.urljoin(url, htmldecode(match.group(1))) - -user_agent = "Python-urllib/%s distribute/%s" % ( - sys.version[:3], require('distribute')[0].version -) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',), - *args, **kw - ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - - - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - map(self.add, dists) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - f = self.open_url(url, "Download error: %s -- Some packages may not be found!") - if f is None: return - self.fetched_urls[url] = self.fetched_urls[f.url] = True - - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - if sys.version_info >= (3,): - charset = f.headers.get_param('charset') or 'latin-1' - page = page.decode(charset, "ignore") - f.close() - for match in HREF.finditer(page): - link = urlparse.urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: - page = self.process_index(url, page) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - map(self.add, dists) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]): - return True - msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n" - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) - - def scan_egg_link(self, path, entry): - lines = filter(None, map(str.strip, open(os.path.join(path, entry)))) - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = map( - urllib2.unquote, link[len(self.index_url):].split('/') - ) - if len(parts)==2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - # process an index page into the package-page index - for match in HREF.finditer(page): - try: - scan( urlparse.urljoin(url, htmldecode(match.group(1))) ) - except ValueError: - pass - - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page - ) - else: - return "" # no sense double-scanning non-package pages - - - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key,())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan(); self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - - - - - def check_md5(self, cs, info, filename, tfp): - if re.match('md5=[0-9a-f]{32}$', info): - self.debug("Validating md5 checksum for %s", filename) - if cs.hexdigest()<>info[4:]: - tfp.close() - os.unlink(filename) - raise DistutilsError( - "MD5 validation failed for "+os.path.basename(filename)+ - "; possible download problem?" - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - map(self.scan_url, self.to_scan) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = (self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - - def fetch_distribution(self, - requirement, tmpdir, force_scan=False, source=False, develop_ok=False, - local_index=None - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(req, env=None): - if env is None: - env = self - # Find a matching distribution; may be called more than once - - for dist in env[req.key]: - - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - self.info("Best match: %s", dist) - return dist.clone( - location=self.download(dist.location, tmpdir) - ) - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(requirement) - - if local_index is not None: - dist = dist or find(requirement, local_index) - - if dist is None and self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - return dist - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - - - - - - - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - file.close() - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp, tfp, info = None, None, None - try: - if '#' in url: - url, info = url.split('#', 1) - fp = self.open_url(url) - if isinstance(fp, urllib2.HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - cs = md5() - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - size = int(headers["Content-Length"]) - self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - cs.update(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - if info: self.check_md5(cs, info, filename, tfp) - return headers - finally: - if fp: fp.close() - if tfp: tfp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - - def open_url(self, url, warning=None): - if url.startswith('file:'): - return local_open(url) - try: - return open_with_auth(url) - except (ValueError, httplib.InvalidURL), v: - msg = ' '.join([str(arg) for arg in v.args]) - if warning: - self.warn(warning, msg) - else: - raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError, v: - return v - except urllib2.URLError, v: - if warning: - self.warn(warning, v.reason) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) - except httplib.BadStatusLine, v: - if warning: - self.warn(warning, v.line) - else: - raise DistutilsError('%s returned a bad status line. ' - 'The server might be down, %s' % \ - (url, v.line)) - except httplib.HTTPException, v: - if warning: - self.warn(warning, v) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v)) - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name = filter(None,urlparse.urlparse(url)[2].split('/')) - if name: - name = name[-1] - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme=='file': - return urllib2.url2pathname(urlparse.urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - - - def scan_url(self, url): - self.process_url(url, True) - - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout -q %s %s" % (url, filename)) - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - -def uchr(c): - if not isinstance(c, int): - return c - if c>255: return unichr(c) - return chr(c) - -def decode_entity(match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - from htmlentitydefs import name2codepoint - what = name2codepoint.get(what, match.group(0)) - return uchr(what) - -def htmldecode(text): - """Decode HTML entities in the given text.""" - return entity_sub(decode_entity, text) - - - - - - - - - - - - - - - -def socket_timeout(timeout=15): - def _socket_timeout(func): - def _socket_timeout(*args, **kwargs): - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - try: - return func(*args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - return _socket_timeout - return _socket_timeout - - -def open_with_auth(url): - """Open a urllib2 request, handling HTTP authentication""" - - scheme, netloc, path, params, query, frag = urlparse.urlparse(url) - - if scheme in ('http', 'https'): - auth, host = urllib2.splituser(netloc) - else: - auth = None - - if auth: - auth = "Basic " + urllib2.unquote(auth).encode('base64').strip() - new_url = urlparse.urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib2.Request(url) - - request.add_header('User-Agent', user_agent) - fp = urllib2.urlopen(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url) - if s2==scheme and h2==host: - fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2)) - - return fp - -# adding a timeout to avoid freezing package_index -open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) - - - - - - - - - - - -def fix_sf_url(url): - return url # backward compatibility - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse.urlparse(url) - filename = urllib2.url2pathname(path) - if os.path.isfile(filename): - return urllib2.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - if f=='index.html': - fp = open(os.path.join(filename,f),'rb') - body = fp.read() - fp.close() - break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) - else: - body = ("<html><head><title>%s" % url) + \ - "%s" % '\n'.join(files) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - return urllib2.HTTPError(url, status, message, - {'content-type':'text/html'}, cStringIO.StringIO(body)) - - - - - - - - - - - - - -# this line is a kludge to keep the trailing blank lines for pje's editor diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py deleted file mode 100755 index a06d4483..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py +++ /dev/null @@ -1,282 +0,0 @@ -import os, sys, __builtin__, tempfile, operator -_os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open -from distutils.errors import DistutilsError -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - save_modules = sys.modules.copy() - try: - tempfile.tempdir = temp_dir - os.chdir(setup_dir) - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit, v: - if v.args and v.args[0]: - raise - # Normal exit, just return - finally: - sys.modules.update(save_modules) - for key in list(sys.modules): - if key not in save_modules: del sys.modules[key] - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - if _file: - __builtin__.file = self._file - __builtin__.open = self._open - self._active = True - return func() - finally: - self._active = False - if _file: - __builtin__.file = _file - __builtin__.open = _open - self._copy(_os) - - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull,] -else: - _EXCEPTIONS = [] - -try: - from win32com.client.gencache import GetGeneratePath - _EXCEPTIONS.append(GetGeneratePath()) - del GetGeneratePath -except ImportError: - # it appears pywin32 is not installed, so no need to exclude. - pass - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - self._exceptions = [os.path.normcase(os.path.realpath(path)) for path in exceptions] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - if _file: - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self,path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - if (self._exempted(realpath) or realpath == self._sandbox - or realpath.startswith(self._prefix)): - return True - finally: - self._active = active - - def _exempted(self, filepath): - exception_matches = map(filepath.startswith, self._exceptions) - return True in exception_matches - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - def open(self, file, flags, mode=0777): - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode) - return _os.open(file,flags,mode) - - -WRITE_FLAGS = reduce( - operator.or_, - [getattr(_os, a, 0) for a in - "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] -) - - - - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py deleted file mode 100755 index 9af44a88..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py +++ /dev/null @@ -1,370 +0,0 @@ -"""Tests for the 'setuptools' package""" -from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader -import distutils.core, distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -import setuptools, setuptools.dist -from setuptools import Feature -from distutils.core import Extension -extract_constant, get_module_constant = None, None -from setuptools.depends import * -from distutils.version import StrictVersion, LooseVersion -from distutils.util import convert_path -import sys, os.path - -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite( - os.path.join('tests', 'api_tests.txt'), - optionflags=doctest.ELLIPSIS, package='pkg_resources', - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core_setup_stop_after = None - - - - -class DependsTests(TestCase): - - def testExtractConst(self): - if not extract_constant: return # skip on non-bytecode platforms - - def f1(): - global x,y,z - x = "test" - y = z - - # unrecognized name - self.assertEqual(extract_constant(f1.func_code,'q', -1), None) - - # constant assigned - self.assertEqual(extract_constant(f1.func_code,'x', -1), "test") - - # expression assigned - self.assertEqual(extract_constant(f1.func_code,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(extract_constant(f1.func_code,'z', -1), None) - - - def testFindModule(self): - self.assertRaises(ImportError, find_module, 'no-such.-thing') - self.assertRaises(ImportError, find_module, 'setuptools.non-existent') - f,p,i = find_module('setuptools.tests'); f.close() - - def testModuleExtract(self): - if not get_module_constant: return # skip on non-bytecode platforms - from email import __version__ - self.assertEqual( - get_module_constant('email','__version__'), __version__ - ) - self.assertEqual( - get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - if not extract_constant: return # skip on non-bytecode platforms - - req = Require('Email','1.0.3','email') - - self.assertEqual(req.name, 'Email') - self.assertEqual(req.module, 'email') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Email-1.0.3') - - from email import __version__ - self.assertEqual(req.get_version(), __version__) - self.assert_(req.version_ok('1.0.9')) - self.assert_(not req.version_ok('0.9.1')) - self.assert_(not req.version_ok('unknown')) - - self.assert_(req.is_present()) - self.assert_(req.is_current()) - - req = Require('Email 3000','03000','email',format=LooseVersion) - self.assert_(req.is_present()) - self.assert_(not req.is_current()) - self.assert_(not req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.assert_(not req.is_present()) - self.assert_(not req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.assert_(req.is_present(paths)) - self.assert_(req.is_current(paths)) - - -class DistroTests(TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - - def testDistroType(self): - self.assert_(isinstance(self.dist,setuptools.dist.Distribution)) - - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - - - - - - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.assert_(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.assert_(not self.dist.has_contents_for('a')) - - self.assert_(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.assert_(not self.dist.has_contents_for('b')) - - self.assert_(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.assert_(not self.dist.has_contents_for('c')) - - - - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - - - - - - - - - - - - - - -class FeatureTests(TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.assert_(not - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.assert_( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.assert_( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.assert_( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.assert_( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.assert_( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.assert_(not 'without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.assert_(not 'bar_et' in dist.py_modules) - self.assert_(not 'pkg.bar' in dist.packages) - self.assert_('pkg.baz' in dist.packages) - self.assert_('scripts/baz_it' in dist.scripts) - self.assert_(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.assert_(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) - - - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py deleted file mode 100755 index be399a9d..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py +++ /dev/null @@ -1,2679 +0,0 @@ -# Module doctest. -# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org). -# Major enhancements and refactoring by: -# Jim Fulton -# Edward Loper - -# Provided as-is; use at your own risk; no warranty; no promises; enjoy! - -try: - basestring -except NameError: - basestring = str,unicode - -try: - enumerate -except NameError: - def enumerate(seq): - return zip(range(len(seq)),seq) - -r"""Module doctest -- a framework for running examples in docstrings. - -In simplest use, end each module M to be tested with: - -def _test(): - import doctest - doctest.testmod() - -if __name__ == "__main__": - _test() - -Then running the module as a script will cause the examples in the -docstrings to get executed and verified: - -python M.py - -This won't display anything unless an example fails, in which case the -failing example(s) and the cause(s) of the failure(s) are printed to stdout -(why not stderr? because stderr is a lame hack <0.2 wink>), and the final -line of output is "Test failed.". - -Run it with the -v switch instead: - -python M.py -v - -and a detailed report of all examples tried is printed to stdout, along -with assorted summaries at the end. - -You can force verbose mode by passing "verbose=True" to testmod, or prohibit -it by passing "verbose=False". In either of those cases, sys.argv is not -examined by testmod. - -There are a variety of other ways to run doctests, including integration -with the unittest framework, and support for running non-Python text -files containing doctests. There are also many ways to override parts -of doctest's default behaviors. See the Library Reference Manual for -details. -""" - -__docformat__ = 'reStructuredText en' - -__all__ = [ - # 0, Option Flags - 'register_optionflag', - 'DONT_ACCEPT_TRUE_FOR_1', - 'DONT_ACCEPT_BLANKLINE', - 'NORMALIZE_WHITESPACE', - 'ELLIPSIS', - 'IGNORE_EXCEPTION_DETAIL', - 'COMPARISON_FLAGS', - 'REPORT_UDIFF', - 'REPORT_CDIFF', - 'REPORT_NDIFF', - 'REPORT_ONLY_FIRST_FAILURE', - 'REPORTING_FLAGS', - # 1. Utility Functions - 'is_private', - # 2. Example & DocTest - 'Example', - 'DocTest', - # 3. Doctest Parser - 'DocTestParser', - # 4. Doctest Finder - 'DocTestFinder', - # 5. Doctest Runner - 'DocTestRunner', - 'OutputChecker', - 'DocTestFailure', - 'UnexpectedException', - 'DebugRunner', - # 6. Test Functions - 'testmod', - 'testfile', - 'run_docstring_examples', - # 7. Tester - 'Tester', - # 8. Unittest Support - 'DocTestSuite', - 'DocFileSuite', - 'set_unittest_reportflags', - # 9. Debugging Support - 'script_from_examples', - 'testsource', - 'debug_src', - 'debug', -] - -import __future__ - -import sys, traceback, inspect, linecache, os, re, types -import unittest, difflib, pdb, tempfile -import warnings -from StringIO import StringIO - -# Don't whine about the deprecated is_private function in this -# module's tests. -warnings.filterwarnings("ignore", "is_private", DeprecationWarning, - __name__, 0) - -# There are 4 basic classes: -# - Example: a pair, plus an intra-docstring line number. -# - DocTest: a collection of examples, parsed from a docstring, plus -# info about where the docstring came from (name, filename, lineno). -# - DocTestFinder: extracts DocTests from a given object's docstring and -# its contained objects' docstrings. -# - DocTestRunner: runs DocTest cases, and accumulates statistics. -# -# So the basic picture is: -# -# list of: -# +------+ +---------+ +-------+ -# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results| -# +------+ +---------+ +-------+ -# | Example | -# | ... | -# | Example | -# +---------+ - -# Option constants. - -OPTIONFLAGS_BY_NAME = {} -def register_optionflag(name): - flag = 1 << len(OPTIONFLAGS_BY_NAME) - OPTIONFLAGS_BY_NAME[name] = flag - return flag - -DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') -DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') -NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') -ELLIPSIS = register_optionflag('ELLIPSIS') -IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') - -COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | - DONT_ACCEPT_BLANKLINE | - NORMALIZE_WHITESPACE | - ELLIPSIS | - IGNORE_EXCEPTION_DETAIL) - -REPORT_UDIFF = register_optionflag('REPORT_UDIFF') -REPORT_CDIFF = register_optionflag('REPORT_CDIFF') -REPORT_NDIFF = register_optionflag('REPORT_NDIFF') -REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') - -REPORTING_FLAGS = (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF | - REPORT_ONLY_FIRST_FAILURE) - -# Special string markers for use in `want` strings: -BLANKLINE_MARKER = '' -ELLIPSIS_MARKER = '...' - -###################################################################### -## Table of Contents -###################################################################### -# 1. Utility Functions -# 2. Example & DocTest -- store test cases -# 3. DocTest Parser -- extracts examples from strings -# 4. DocTest Finder -- extracts test cases from objects -# 5. DocTest Runner -- runs test cases -# 6. Test Functions -- convenient wrappers for testing -# 7. Tester Class -- for backwards compatibility -# 8. Unittest Support -# 9. Debugging Support -# 10. Example Usage - -###################################################################### -## 1. Utility Functions -###################################################################### - -def is_private(prefix, base): - """prefix, base -> true iff name prefix + "." + base is "private". - - Prefix may be an empty string, and base does not contain a period. - Prefix is ignored (although functions you write conforming to this - protocol may make use of it). - Return true iff base begins with an (at least one) underscore, but - does not both begin and end with (at least) two underscores. - - >>> is_private("a.b", "my_func") - False - >>> is_private("____", "_my_func") - True - >>> is_private("someclass", "__init__") - False - >>> is_private("sometypo", "__init_") - True - >>> is_private("x.y.z", "_") - True - >>> is_private("_x.y.z", "__") - False - >>> is_private("", "") # senseless but consistent - False - """ - warnings.warn("is_private is deprecated; it wasn't useful; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning, stacklevel=2) - return base[:1] == "_" and not base[:2] == "__" == base[-2:] - -def _extract_future_flags(globs): - """ - Return the compiler-flags associated with the future features that - have been imported into the given namespace (globs). - """ - flags = 0 - for fname in __future__.all_feature_names: - feature = globs.get(fname, None) - if feature is getattr(__future__, fname): - flags |= feature.compiler_flag - return flags - -def _normalize_module(module, depth=2): - """ - Return the module specified by `module`. In particular: - - If `module` is a module, then return module. - - If `module` is a string, then import and return the - module with that name. - - If `module` is None, then return the calling module. - The calling module is assumed to be the module of - the stack frame at the given depth in the call stack. - """ - if inspect.ismodule(module): - return module - elif isinstance(module, (str, unicode)): - return __import__(module, globals(), locals(), ["*"]) - elif module is None: - return sys.modules[sys._getframe(depth).f_globals['__name__']] - else: - raise TypeError("Expected a module, string, or None") - -def _indent(s, indent=4): - """ - Add the given number of space characters to the beginning every - non-blank line in `s`, and return the result. - """ - # This regexp matches the start of non-blank lines: - return re.sub('(?m)^(?!$)', indent*' ', s) - -def _exception_traceback(exc_info): - """ - Return a string containing a traceback message for the given - exc_info tuple (as returned by sys.exc_info()). - """ - # Get a traceback message. - excout = StringIO() - exc_type, exc_val, exc_tb = exc_info - traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) - return excout.getvalue() - -# Override some StringIO methods. -class _SpoofOut(StringIO): - def getvalue(self): - result = StringIO.getvalue(self) - # If anything at all was written, make sure there's a trailing - # newline. There's no way for the expected output to indicate - # that a trailing newline is missing. - if result and not result.endswith("\n"): - result += "\n" - # Prevent softspace from screwing up the next test case, in - # case they used print with a trailing comma in an example. - if hasattr(self, "softspace"): - del self.softspace - return result - - def truncate(self, size=None): - StringIO.truncate(self, size) - if hasattr(self, "softspace"): - del self.softspace - -# Worst-case linear-time ellipsis matching. -def _ellipsis_match(want, got): - """ - Essentially the only subtle case: - >>> _ellipsis_match('aa...aa', 'aaa') - False - """ - if want.find(ELLIPSIS_MARKER)==-1: - return want == got - - # Find "the real" strings. - ws = want.split(ELLIPSIS_MARKER) - assert len(ws) >= 2 - - # Deal with exact matches possibly needed at one or both ends. - startpos, endpos = 0, len(got) - w = ws[0] - if w: # starts with exact match - if got.startswith(w): - startpos = len(w) - del ws[0] - else: - return False - w = ws[-1] - if w: # ends with exact match - if got.endswith(w): - endpos -= len(w) - del ws[-1] - else: - return False - - if startpos > endpos: - # Exact end matches required more characters than we have, as in - # _ellipsis_match('aa...aa', 'aaa') - return False - - # For the rest, we only need to find the leftmost non-overlapping - # match for each piece. If there's no overall match that way alone, - # there's no overall match period. - for w in ws: - # w may be '' at times, if there are consecutive ellipses, or - # due to an ellipsis at the start or end of `want`. That's OK. - # Search for an empty string succeeds, and doesn't change startpos. - startpos = got.find(w, startpos, endpos) - if startpos < 0: - return False - startpos += len(w) - - return True - -def _comment_line(line): - "Return a commented form of the given line" - line = line.rstrip() - if line: - return '# '+line - else: - return '#' - -class _OutputRedirectingPdb(pdb.Pdb): - """ - A specialized version of the python debugger that redirects stdout - to a given stream when interacting with the user. Stdout is *not* - redirected when traced code is executed. - """ - def __init__(self, out): - self.__out = out - pdb.Pdb.__init__(self) - - def trace_dispatch(self, *args): - # Redirect stdout to the given stream. - save_stdout = sys.stdout - sys.stdout = self.__out - # Call Pdb's trace dispatch method. - try: - return pdb.Pdb.trace_dispatch(self, *args) - finally: - sys.stdout = save_stdout - -# [XX] Normalize with respect to os.path.pardir? -def _module_relative_path(module, path): - if not inspect.ismodule(module): - raise TypeError, 'Expected a module: %r' % module - if path.startswith('/'): - raise ValueError, 'Module-relative files may not have absolute paths' - - # Find the base directory for the path. - if hasattr(module, '__file__'): - # A normal module/package - basedir = os.path.split(module.__file__)[0] - elif module.__name__ == '__main__': - # An interactive session. - if len(sys.argv)>0 and sys.argv[0] != '': - basedir = os.path.split(sys.argv[0])[0] - else: - basedir = os.curdir - else: - # A module w/o __file__ (this includes builtins) - raise ValueError("Can't resolve paths relative to the module " + - module + " (it has no __file__)") - - # Combine the base directory and the path. - return os.path.join(basedir, *(path.split('/'))) - -###################################################################### -## 2. Example & DocTest -###################################################################### -## - An "example" is a pair, where "source" is a -## fragment of source code, and "want" is the expected output for -## "source." The Example class also includes information about -## where the example was extracted from. -## -## - A "doctest" is a collection of examples, typically extracted from -## a string (such as an object's docstring). The DocTest class also -## includes information about where the string was extracted from. - -class Example: - """ - A single doctest example, consisting of source code and expected - output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. - """ - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Normalize inputs. - if not source.endswith('\n'): - source += '\n' - if want and not want.endswith('\n'): - want += '\n' - if exc_msg is not None and not exc_msg.endswith('\n'): - exc_msg += '\n' - # Store properties. - self.source = source - self.want = want - self.lineno = lineno - self.indent = indent - if options is None: options = {} - self.options = options - self.exc_msg = exc_msg - -class DocTest: - """ - A collection of doctest examples that should be run in a single - namespace. Each `DocTest` defines the following attributes: - - - examples: the list of examples. - - - globs: The namespace (aka globals) that the examples should - be run in. - - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). - - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. - - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. - - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. - """ - def __init__(self, examples, globs, name, filename, lineno, docstring): - """ - Create a new DocTest containing the given examples. The - DocTest's globals are initialized with a copy of `globs`. - """ - assert not isinstance(examples, basestring), \ - "DocTest no longer accepts str; use DocTestParser instead" - self.examples = examples - self.docstring = docstring - self.globs = globs.copy() - self.name = name - self.filename = filename - self.lineno = lineno - - def __repr__(self): - if len(self.examples) == 0: - examples = 'no examples' - elif len(self.examples) == 1: - examples = '1 example' - else: - examples = '%d examples' % len(self.examples) - return ('' % - (self.name, self.filename, self.lineno, examples)) - - - # This lets us sort tests by name: - def __cmp__(self, other): - if not isinstance(other, DocTest): - return -1 - return cmp((self.name, self.filename, self.lineno, id(self)), - (other.name, other.filename, other.lineno, id(other))) - -###################################################################### -## 3. DocTestParser -###################################################################### - -class DocTestParser: - """ - A class used to parse strings containing doctest examples. - """ - # This regular expression is used to find doctest examples in a - # string. It defines three groups: `source` is the source code - # (including leading indentation and prompts); `indent` is the - # indentation of the first (PS1) line of the source code; and - # `want` is the expected output (including leading indentation). - _EXAMPLE_RE = re.compile(r''' - # Source consists of a PS1 line followed by zero or more PS2 lines. - (?P - (?:^(?P [ ]*) >>> .*) # PS1 line - (?:\n [ ]* \.\.\. .*)*) # PS2 lines - \n? - # Want consists of any non-blank lines that do not start with PS1. - (?P (?:(?![ ]*$) # Not a blank line - (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line - )*) - ''', re.MULTILINE | re.VERBOSE) - - # A regular expression for handling `want` strings that contain - # expected exceptions. It divides `want` into three pieces: - # - the traceback header line (`hdr`) - # - the traceback stack (`stack`) - # - the exception message (`msg`), as generated by - # traceback.format_exception_only() - # `msg` may have multiple lines. We assume/require that the - # exception message is the first non-indented line starting with a word - # character following the traceback header line. - _EXCEPTION_RE = re.compile(r""" - # Grab the traceback header. Different versions of Python have - # said different things on the first traceback line. - ^(?P Traceback\ \( - (?: most\ recent\ call\ last - | innermost\ last - ) \) : - ) - \s* $ # toss trailing whitespace on the header. - (?P .*?) # don't blink: absorb stuff until... - ^ (?P \w+ .*) # a line *starts* with alphanum. - """, re.VERBOSE | re.MULTILINE | re.DOTALL) - - # A callable returning a true value iff its argument is a blank line - # or contains a single comment. - _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match - - def parse(self, string, name=''): - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - string = string.expandtabs() - # If all lines begin with the same indentation, then strip it. - min_indent = self._min_indent(string) - if min_indent > 0: - string = '\n'.join([l[min_indent:] for l in string.split('\n')]) - - output = [] - charno, lineno = 0, 0 - # Find all doctest examples in the string: - for m in self._EXAMPLE_RE.finditer(string): - # Add the pre-example text to `output`. - output.append(string[charno:m.start()]) - # Update lineno (lines before this example) - lineno += string.count('\n', charno, m.start()) - # Extract info from the regexp match. - (source, options, want, exc_msg) = \ - self._parse_example(m, name, lineno) - # Create an Example, and add it to the list. - if not self._IS_BLANK_OR_COMMENT(source): - output.append( Example(source, want, exc_msg, - lineno=lineno, - indent=min_indent+len(m.group('indent')), - options=options) ) - # Update lineno (lines inside this example) - lineno += string.count('\n', m.start(), m.end()) - # Update charno. - charno = m.end() - # Add any remaining post-example text to `output`. - output.append(string[charno:]) - return output - - def get_doctest(self, string, globs, name, filename, lineno): - """ - Extract all doctest examples from the given string, and - collect them into a `DocTest` object. - - `globs`, `name`, `filename`, and `lineno` are attributes for - the new `DocTest` object. See the documentation for `DocTest` - for more information. - """ - return DocTest(self.get_examples(string, name), globs, - name, filename, lineno, string) - - def get_examples(self, string, name=''): - """ - Extract all doctest examples from the given string, and return - them as a list of `Example` objects. Line numbers are - 0-based, because it's most common in doctests that nothing - interesting appears on the same line as opening triple-quote, - and so the first interesting line is called \"line 1\" then. - - The optional argument `name` is a name identifying this - string, and is only used for error messages. - """ - return [x for x in self.parse(string, name) - if isinstance(x, Example)] - - def _parse_example(self, m, name, lineno): - """ - Given a regular expression match from `_EXAMPLE_RE` (`m`), - return a pair `(source, want)`, where `source` is the matched - example's source code (with prompts and indentation stripped); - and `want` is the example's expected output (with indentation - stripped). - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - # Get the example's indentation level. - indent = len(m.group('indent')) - - # Divide source into lines; check that they're properly - # indented; and then strip their indentation & prompts. - source_lines = m.group('source').split('\n') - self._check_prompt_blank(source_lines, indent, name, lineno) - self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) - source = '\n'.join([sl[indent+4:] for sl in source_lines]) - - # Divide want into lines; check that it's properly indented; and - # then strip the indentation. Spaces before the last newline should - # be preserved, so plain rstrip() isn't good enough. - want = m.group('want') - want_lines = want.split('\n') - if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): - del want_lines[-1] # forget final newline & spaces after it - self._check_prefix(want_lines, ' '*indent, name, - lineno + len(source_lines)) - want = '\n'.join([wl[indent:] for wl in want_lines]) - - # If `want` contains a traceback message, then extract it. - m = self._EXCEPTION_RE.match(want) - if m: - exc_msg = m.group('msg') - else: - exc_msg = None - - # Extract options from the source. - options = self._find_options(source, name, lineno) - - return source, options, want, exc_msg - - # This regular expression looks for option directives in the - # source code of an example. Option directives are comments - # starting with "doctest:". Warning: this may give false - # positives for string-literals that contain the string - # "#doctest:". Eliminating these false positives would require - # actually parsing the string; but we limit them by ignoring any - # line containing "#doctest:" that is *followed* by a quote mark. - _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$', - re.MULTILINE) - - def _find_options(self, source, name, lineno): - """ - Return a dictionary containing option overrides extracted from - option directives in the given source string. - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - options = {} - # (note: with the current regexp, this will match at most once:) - for m in self._OPTION_DIRECTIVE_RE.finditer(source): - option_strings = m.group(1).replace(',', ' ').split() - for option in option_strings: - if (option[0] not in '+-' or - option[1:] not in OPTIONFLAGS_BY_NAME): - raise ValueError('line %r of the doctest for %s ' - 'has an invalid option: %r' % - (lineno+1, name, option)) - flag = OPTIONFLAGS_BY_NAME[option[1:]] - options[flag] = (option[0] == '+') - if options and self._IS_BLANK_OR_COMMENT(source): - raise ValueError('line %r of the doctest for %s has an option ' - 'directive on a line with no example: %r' % - (lineno, name, source)) - return options - - # This regular expression finds the indentation of every non-blank - # line in a string. - _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE) - - def _min_indent(self, s): - "Return the minimum indentation of any non-blank line in `s`" - indents = [len(indent) for indent in self._INDENT_RE.findall(s)] - if len(indents) > 0: - return min(indents) - else: - return 0 - - def _check_prompt_blank(self, lines, indent, name, lineno): - """ - Given the lines of a source string (including prompts and - leading indentation), check to make sure that every prompt is - followed by a space character. If any line is not followed by - a space character, then raise ValueError. - """ - for i, line in enumerate(lines): - if len(line) >= indent+4 and line[indent+3] != ' ': - raise ValueError('line %r of the docstring for %s ' - 'lacks blank after %s: %r' % - (lineno+i+1, name, - line[indent:indent+3], line)) - - def _check_prefix(self, lines, prefix, name, lineno): - """ - Check that every line in the given list starts with the given - prefix; if any line does not, then raise a ValueError. - """ - for i, line in enumerate(lines): - if line and not line.startswith(prefix): - raise ValueError('line %r of the docstring for %s has ' - 'inconsistent leading whitespace: %r' % - (lineno+i+1, name, line)) - - -###################################################################### -## 4. DocTest Finder -###################################################################### - -class DocTestFinder: - """ - A class used to extract the DocTests that are relevant to a given - object, from its docstring and the docstrings of its contained - objects. Doctests can currently be extracted from the following - object types: modules, functions, classes, methods, staticmethods, - classmethods, and properties. - """ - - def __init__(self, verbose=False, parser=DocTestParser(), - recurse=True, _namefilter=None, exclude_empty=True): - """ - Create a new doctest finder. - - The optional argument `parser` specifies a class or - function that should be used to create new DocTest objects (or - objects that implement the same interface as DocTest). The - signature for this factory function should match the signature - of the DocTest constructor. - - If the optional argument `recurse` is false, then `find` will - only examine the given object, and not any contained objects. - - If the optional argument `exclude_empty` is false, then `find` - will include tests for objects with empty docstrings. - """ - self._parser = parser - self._verbose = verbose - self._recurse = recurse - self._exclude_empty = exclude_empty - # _namefilter is undocumented, and exists only for temporary backward- - # compatibility support of testmod's deprecated isprivate mess. - self._namefilter = _namefilter - - def find(self, obj, name=None, module=None, globs=None, - extraglobs=None): - """ - Return a list of the DocTests that are defined by the given - object's docstring, or by any of its contained objects' - docstrings. - - The optional parameter `module` is the module that contains - the given object. If the module is not specified or is None, then - the test finder will attempt to automatically determine the - correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - - Contained objects whose module does not match `module` are ignored. - - If `module` is False, no attempt to find the module will be made. - This is obscure, of use mostly in tests: if `module` is False, or - is None but cannot be found automatically, then all objects are - considered to belong to the (non-existent) module, so all contained - objects will (recursively) be searched for doctests. - - The globals for each DocTest is formed by combining `globs` - and `extraglobs` (bindings in `extraglobs` override bindings - in `globs`). A new copy of the globals dictionary is created - for each DocTest. If `globs` is not specified, then it - defaults to the module's `__dict__`, if specified, or {} - otherwise. If `extraglobs` is not specified, then it defaults - to {}. - - """ - # If name was not specified, then extract it from the object. - if name is None: - name = getattr(obj, '__name__', None) - if name is None: - raise ValueError("DocTestFinder.find: name must be given " - "when obj.__name__ doesn't exist: %r" % - (type(obj),)) - - # Find the module that contains the given object (if obj is - # a module, then module=obj.). Note: this may fail, in which - # case module will be None. - if module is False: - module = None - elif module is None: - module = inspect.getmodule(obj) - - # Read the module's source code. This is used by - # DocTestFinder._find_lineno to find the line number for a - # given object's docstring. - try: - file = inspect.getsourcefile(obj) or inspect.getfile(obj) - source_lines = linecache.getlines(file) - if not source_lines: - source_lines = None - except TypeError: - source_lines = None - - # Initialize globals, and merge in extraglobs. - if globs is None: - if module is None: - globs = {} - else: - globs = module.__dict__.copy() - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - # Recursively expore `obj`, extracting DocTests. - tests = [] - self._find(tests, obj, name, module, source_lines, globs, {}) - return tests - - def _filter(self, obj, prefix, base): - """ - Return true if the given object should not be examined. - """ - return (self._namefilter is not None and - self._namefilter(prefix, base)) - - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is object.func_globals - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - else: - raise ValueError("object must be a class or function") - - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - if self._verbose: - print 'Finding tests in %s' % name - - # If we've already processed this object, then ignore it. - if id(obj) in seen: - return - seen[id(obj)] = 1 - - # Find a test for this object, and add it to the list of tests. - test = self._get_test(obj, name, module, globs, source_lines) - if test is not None: - tests.append(test) - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - valname = '%s.%s' % (name, valname) - # Recurse to functions & classes. - if ((inspect.isfunction(val) or inspect.isclass(val)) and - self._from_module(module, val)): - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a module's __test__ dictionary. - if inspect.ismodule(obj) and self._recurse: - for valname, val in getattr(obj, '__test__', {}).items(): - if not isinstance(valname, basestring): - raise ValueError("DocTestFinder.find: __test__ keys " - "must be strings: %r" % - (type(valname),)) - if not (inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or inspect.ismodule(val) or - isinstance(val, basestring)): - raise ValueError("DocTestFinder.find: __test__ values " - "must be strings, functions, methods, " - "classes, or modules: %r" % - (type(val),)) - valname = '%s.__test__.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = getattr(obj, valname).im_func - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - def _get_test(self, obj, name, module, globs, source_lines): - """ - Return a DocTest for the given object, if it defines a docstring; - otherwise, return None. - """ - # Extract the object's docstring. If it doesn't have one, - # then return None (no test for this object). - if isinstance(obj, basestring): - docstring = obj - else: - try: - if obj.__doc__ is None: - docstring = '' - else: - docstring = obj.__doc__ - if not isinstance(docstring, basestring): - docstring = str(docstring) - except (TypeError, AttributeError): - docstring = '' - - # Find the docstring's location in the file. - lineno = self._find_lineno(obj, source_lines) - - # Don't bother if the docstring is empty. - if self._exclude_empty and not docstring: - return None - - # Return a DocTest for this object. - if module is None: - filename = None - else: - filename = getattr(module, '__file__', module.__name__) - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - return self._parser.get_doctest(docstring, globs, name, - filename, lineno) - - def _find_lineno(self, obj, source_lines): - """ - Return a line number of the given object's docstring. Note: - this method assumes that the object has a docstring. - """ - lineno = None - - # Find the line number for modules. - if inspect.ismodule(obj): - lineno = 0 - - # Find the line number for classes. - # Note: this could be fooled if a class is defined multiple - # times in a single file. - if inspect.isclass(obj): - if source_lines is None: - return None - pat = re.compile(r'^\s*class\s*%s\b' % - getattr(obj, '__name__', '-')) - for i, line in enumerate(source_lines): - if pat.match(line): - lineno = i - break - - # Find the line number for functions & methods. - if inspect.ismethod(obj): obj = obj.im_func - if inspect.isfunction(obj): obj = obj.func_code - if inspect.istraceback(obj): obj = obj.tb_frame - if inspect.isframe(obj): obj = obj.f_code - if inspect.iscode(obj): - lineno = getattr(obj, 'co_firstlineno', None)-1 - - # Find the line number where the docstring starts. Assume - # that it's the first line that begins with a quote mark. - # Note: this could be fooled by a multiline function - # signature, where a continuation line begins with a quote - # mark. - if lineno is not None: - if source_lines is None: - return lineno+1 - pat = re.compile('(^|.*:)\s*\w*("|\')') - for lineno in range(lineno, len(source_lines)): - if pat.match(source_lines[lineno]): - return lineno - - # We couldn't find the line number. - return None - -###################################################################### -## 5. DocTest Runner -###################################################################### - -class DocTestRunner: - """ - A class used to run DocTest test cases, and accumulate statistics. - The `run` method is used to process a single DocTest case. It - returns a tuple `(f, t)`, where `t` is the number of test cases - tried, and `f` is the number of test cases that failed. - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> for test in tests: - ... print runner.run(test) - (0, 2) - (0, 1) - (0, 2) - (0, 2) - - The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated `(f, t)` - tuple: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 tests in _TestClass.square - 7 tests in 4 items. - 7 passed and 0 failed. - Test passed. - (0, 7) - - The aggregated number of tried examples and failed examples is - also available via the `tries` and `failures` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - - The comparison between expected outputs and actual outputs is done - by an `OutputChecker`. This comparison may be customized with a - number of option flags; see the documentation for `testmod` for - more information. If the option flags are insufficient, then the - comparison may also be customized by passing a subclass of - `OutputChecker` to the constructor. - - The test runner's display output can be controlled in two ways. - First, an output function (`out) can be passed to - `TestRunner.run`; this function will be called with strings that - should be displayed. It defaults to `sys.stdout.write`. If - capturing the output is not sufficient, then the display output - can be also customized by subclassing DocTestRunner, and - overriding the methods `report_start`, `report_success`, - `report_unexpected_exception`, and `report_failure`. - """ - # This divider string is used to separate failure messages, and to - # separate sections of the summary. - DIVIDER = "*" * 70 - - def __init__(self, checker=None, verbose=None, optionflags=0): - """ - Create a new test runner. - - Optional keyword arg `checker` is the `OutputChecker` that - should be used to compare the expected outputs and actual - outputs of doctest examples. - - Optional keyword arg 'verbose' prints lots of stuff if true, - only failures if false; by default, it's true iff '-v' is in - sys.argv. - - Optional argument `optionflags` can be used to control how the - test runner compares expected output to actual output, and how - it displays failures. See the documentation for `testmod` for - more information. - """ - self._checker = checker or OutputChecker() - if verbose is None: - verbose = '-v' in sys.argv - self._verbose = verbose - self.optionflags = optionflags - self.original_optionflags = optionflags - - # Keep track of the examples we've run. - self.tries = 0 - self.failures = 0 - self._name2ft = {} - - # Create a fake output target for capturing doctest output. - self._fakeout = _SpoofOut() - - #///////////////////////////////////////////////////////////////// - # Reporting methods - #///////////////////////////////////////////////////////////////// - - def report_start(self, out, test, example): - """ - Report that the test runner is about to process the given - example. (Only displays a message if verbose=True) - """ - if self._verbose: - if example.want: - out('Trying:\n' + _indent(example.source) + - 'Expecting:\n' + _indent(example.want)) - else: - out('Trying:\n' + _indent(example.source) + - 'Expecting nothing\n') - - def report_success(self, out, test, example, got): - """ - Report that the given example ran successfully. (Only - displays a message if verbose=True) - """ - if self._verbose: - out("ok\n") - - def report_failure(self, out, test, example, got): - """ - Report that the given example failed. - """ - out(self._failure_header(test, example) + - self._checker.output_difference(example, got, self.optionflags)) - - def report_unexpected_exception(self, out, test, example, exc_info): - """ - Report that the given example raised an unexpected exception. - """ - out(self._failure_header(test, example) + - 'Exception raised:\n' + _indent(_exception_traceback(exc_info))) - - def _failure_header(self, test, example): - out = [self.DIVIDER] - if test.filename: - if test.lineno is not None and example.lineno is not None: - lineno = test.lineno + example.lineno + 1 - else: - lineno = '?' - out.append('File "%s", line %s, in %s' % - (test.filename, lineno, test.name)) - else: - out.append('Line %s, in %s' % (example.lineno+1, test.name)) - out.append('Failed example:') - source = example.source - out.append(_indent(source)) - return '\n'.join(out) - - #///////////////////////////////////////////////////////////////// - # DocTest Running - #///////////////////////////////////////////////////////////////// - - def __run(self, test, compileflags, out): - """ - Run the examples in `test`. Write the outcome of each example - with one of the `DocTestRunner.report_*` methods, using the - writer function `out`. `compileflags` is the set of compiler - flags that should be used to execute examples. Return a tuple - `(f, t)`, where `t` is the number of examples tried, and `f` - is the number of examples that failed. The examples are run - in the namespace `test.globs`. - """ - # Keep track of the number of failures and tries. - failures = tries = 0 - - # Save the option flags (since option directives can be used - # to modify them). - original_optionflags = self.optionflags - - SUCCESS, FAILURE, BOOM = range(3) # `outcome` state - - check = self._checker.check_output - - # Process each example. - for examplenum, example in enumerate(test.examples): - - # If REPORT_ONLY_FIRST_FAILURE is set, then supress - # reporting after the first failure. - quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and - failures > 0) - - # Merge in the example's options. - self.optionflags = original_optionflags - if example.options: - for (optionflag, val) in example.options.items(): - if val: - self.optionflags |= optionflag - else: - self.optionflags &= ~optionflag - - # Record that we started this example. - tries += 1 - if not quiet: - self.report_start(out, test, example) - - # Use a special filename for compile(), so we can retrieve - # the source code during interactive debugging (see - # __patched_linecache_getlines). - filename = '' % (test.name, examplenum) - - # Run the example in the given context (globs), and record - # any exception that gets raised. (But don't intercept - # keyboard interrupts.) - try: - # Don't blink! This is where the user's code gets run. - exec compile(example.source, filename, "single", - compileflags, 1) in test.globs - self.debugger.set_continue() # ==== Example Finished ==== - exception = None - except KeyboardInterrupt: - raise - except: - exception = sys.exc_info() - self.debugger.set_continue() # ==== Example Finished ==== - - got = self._fakeout.getvalue() # the actual output - self._fakeout.truncate(0) - outcome = FAILURE # guilty until proved innocent or insane - - # If the example executed without raising any exceptions, - # verify its output. - if exception is None: - if check(example.want, got, self.optionflags): - outcome = SUCCESS - - # The example raised an exception: check if it was expected. - else: - exc_info = sys.exc_info() - exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] - if not quiet: - got += _exception_traceback(exc_info) - - # If `example.exc_msg` is None, then we weren't expecting - # an exception. - if example.exc_msg is None: - outcome = BOOM - - # We expected an exception: see whether it matches. - elif check(example.exc_msg, exc_msg, self.optionflags): - outcome = SUCCESS - - # Another chance if they didn't care about the detail. - elif self.optionflags & IGNORE_EXCEPTION_DETAIL: - m1 = re.match(r'[^:]*:', example.exc_msg) - m2 = re.match(r'[^:]*:', exc_msg) - if m1 and m2 and check(m1.group(0), m2.group(0), - self.optionflags): - outcome = SUCCESS - - # Report the outcome. - if outcome is SUCCESS: - if not quiet: - self.report_success(out, test, example, got) - elif outcome is FAILURE: - if not quiet: - self.report_failure(out, test, example, got) - failures += 1 - elif outcome is BOOM: - if not quiet: - self.report_unexpected_exception(out, test, example, - exc_info) - failures += 1 - else: - assert False, ("unknown outcome", outcome) - - # Restore the option flags (in case they were modified) - self.optionflags = original_optionflags - - # Record and return the number of failures and tries. - self.__record_outcome(test, failures, tries) - return failures, tries - - def __record_outcome(self, test, f, t): - """ - Record the fact that the given DocTest (`test`) generated `f` - failures out of `t` tried examples. - """ - f2, t2 = self._name2ft.get(test.name, (0,0)) - self._name2ft[test.name] = (f+f2, t+t2) - self.failures += f - self.tries += t - - __LINECACHE_FILENAME_RE = re.compile(r'[\w\.]+)' - r'\[(?P\d+)\]>$') - def __patched_linecache_getlines(self, filename, module_globals=None): - m = self.__LINECACHE_FILENAME_RE.match(filename) - if m and m.group('name') == self.test.name: - example = self.test.examples[int(m.group('examplenum'))] - return example.source.splitlines(True) - elif self.save_linecache_getlines.func_code.co_argcount>1: - return self.save_linecache_getlines(filename, module_globals) - else: - return self.save_linecache_getlines(filename) - - def run(self, test, compileflags=None, out=None, clear_globs=True): - """ - Run the examples in `test`, and display the results using the - writer function `out`. - - The examples are run in the namespace `test.globs`. If - `clear_globs` is true (the default), then this namespace will - be cleared after the test runs, to help with garbage - collection. If you would like to examine the namespace after - the test completes, then use `clear_globs=False`. - - `compileflags` gives the set of flags that should be used by - the Python compiler when running the examples. If not - specified, then it will default to the set of future-import - flags that apply to `globs`. - - The output of each example is checked using - `DocTestRunner.check_output`, and the results are formatted by - the `DocTestRunner.report_*` methods. - """ - self.test = test - - if compileflags is None: - compileflags = _extract_future_flags(test.globs) - - save_stdout = sys.stdout - if out is None: - out = save_stdout.write - sys.stdout = self._fakeout - - # Patch pdb.set_trace to restore sys.stdout during interactive - # debugging (so it's not still redirected to self._fakeout). - # Note that the interactive output will go to *our* - # save_stdout, even if that's not the real sys.stdout; this - # allows us to write test cases for the set_trace behavior. - save_set_trace = pdb.set_trace - self.debugger = _OutputRedirectingPdb(save_stdout) - self.debugger.reset() - pdb.set_trace = self.debugger.set_trace - - # Patch linecache.getlines, so we can see the example's source - # when we're inside the debugger. - self.save_linecache_getlines = linecache.getlines - linecache.getlines = self.__patched_linecache_getlines - - try: - return self.__run(test, compileflags, out) - finally: - sys.stdout = save_stdout - pdb.set_trace = save_set_trace - linecache.getlines = self.save_linecache_getlines - if clear_globs: - test.globs.clear() - - #///////////////////////////////////////////////////////////////// - # Summarization - #///////////////////////////////////////////////////////////////// - def summarize(self, verbose=None): - """ - Print a summary of all the test cases that have been run by - this DocTestRunner, and return a tuple `(f, t)`, where `f` is - the total number of failed examples, and `t` is the total - number of tried examples. - - The optional `verbose` argument controls how detailed the - summary is. If the verbosity is not specified, then the - DocTestRunner's verbosity is used. - """ - if verbose is None: - verbose = self._verbose - notests = [] - passed = [] - failed = [] - totalt = totalf = 0 - for x in self._name2ft.items(): - name, (f, t) = x - assert f <= t - totalt += t - totalf += f - if t == 0: - notests.append(name) - elif f == 0: - passed.append( (name, t) ) - else: - failed.append(x) - if verbose: - if notests: - print len(notests), "items had no tests:" - notests.sort() - for thing in notests: - print " ", thing - if passed: - print len(passed), "items passed all tests:" - passed.sort() - for thing, count in passed: - print " %3d tests in %s" % (count, thing) - if failed: - print self.DIVIDER - print len(failed), "items had failures:" - failed.sort() - for thing, (f, t) in failed: - print " %3d of %3d in %s" % (f, t, thing) - if verbose: - print totalt, "tests in", len(self._name2ft), "items." - print totalt - totalf, "passed and", totalf, "failed." - if totalf: - print "***Test Failed***", totalf, "failures." - elif verbose: - print "Test passed." - return totalf, totalt - - #///////////////////////////////////////////////////////////////// - # Backward compatibility cruft to maintain doctest.master. - #///////////////////////////////////////////////////////////////// - def merge(self, other): - d = self._name2ft - for name, (f, t) in other._name2ft.items(): - if name in d: - print "*** DocTestRunner.merge: '" + name + "' in both" \ - " testers; summing outcomes." - f2, t2 = d[name] - f = f + f2 - t = t + t2 - d[name] = f, t - -class OutputChecker: - """ - A class used to check the whether the actual output from a doctest - example matches the expected output. `OutputChecker` defines two - methods: `check_output`, which compares a given pair of outputs, - and returns true if they match; and `output_difference`, which - returns a string describing the differences between two outputs. - """ - def check_output(self, want, got, optionflags): - """ - Return True iff the actual output from an example (`got`) - matches the expected output (`want`). These strings are - always considered to match if they are identical; but - depending on what option flags the test runner is using, - several non-exact match types are also possible. See the - documentation for `TestRunner` for more information about - option flags. - """ - # Handle the common case first, for efficiency: - # if they're string-identical, always return true. - if got == want: - return True - - # The values True and False replaced 1 and 0 as the return - # value for boolean comparisons in Python 2.3. - if not (optionflags & DONT_ACCEPT_TRUE_FOR_1): - if (got,want) == ("True\n", "1\n"): - return True - if (got,want) == ("False\n", "0\n"): - return True - - # can be used as a special sequence to signify a - # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - # Replace in want with a blank line. - want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER), - '', want) - # If a line in got contains only spaces, then remove the - # spaces. - got = re.sub('(?m)^\s*?$', '', got) - if got == want: - return True - - # This flag causes doctest to ignore any differences in the - # contents of whitespace strings. Note that this can be used - # in conjunction with the ELLIPSIS flag. - if optionflags & NORMALIZE_WHITESPACE: - got = ' '.join(got.split()) - want = ' '.join(want.split()) - if got == want: - return True - - # The ELLIPSIS flag says to let the sequence "..." in `want` - # match any substring in `got`. - if optionflags & ELLIPSIS: - if _ellipsis_match(want, got): - return True - - # We didn't find any match; return false. - return False - - # Should we do a fancy diff? - def _do_a_fancy_diff(self, want, got, optionflags): - # Not unless they asked for a fancy diff. - if not optionflags & (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF): - return False - - # If expected output uses ellipsis, a meaningful fancy diff is - # too hard ... or maybe not. In two real-life failures Tim saw, - # a diff was a major help anyway, so this is commented out. - # [todo] _ellipsis_match() knows which pieces do and don't match, - # and could be the basis for a kick-ass diff in this case. - ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want: - ## return False - - # ndiff does intraline difference marking, so can be useful even - # for 1-line differences. - if optionflags & REPORT_NDIFF: - return True - - # The other diff types need at least a few lines to be helpful. - return want.count('\n') > 2 and got.count('\n') > 2 - - def output_difference(self, example, got, optionflags): - """ - Return a string describing the differences between the - expected output for a given example (`example`) and the actual - output (`got`). `optionflags` is the set of option flags used - to compare `want` and `got`. - """ - want = example.want - # If s are being used, then replace blank lines - # with in the actual output string. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got) - - # Check if we should use diff. - if self._do_a_fancy_diff(want, got, optionflags): - # Split want & got into lines. - want_lines = want.splitlines(True) # True == keep line ends - got_lines = got.splitlines(True) - # Use difflib to find their differences. - if optionflags & REPORT_UDIFF: - diff = difflib.unified_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'unified diff with -expected +actual' - elif optionflags & REPORT_CDIFF: - diff = difflib.context_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'context diff with expected followed by actual' - elif optionflags & REPORT_NDIFF: - engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) - diff = list(engine.compare(want_lines, got_lines)) - kind = 'ndiff with -expected +actual' - else: - assert 0, 'Bad diff option' - # Remove trailing whitespace on diff output. - diff = [line.rstrip() + '\n' for line in diff] - return 'Differences (%s):\n' % kind + _indent(''.join(diff)) - - # If we're not using diff, then simply list the expected - # output followed by the actual output. - if want and got: - return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got)) - elif want: - return 'Expected:\n%sGot nothing\n' % _indent(want) - elif got: - return 'Expected nothing\nGot:\n%s' % _indent(got) - else: - return 'Expected nothing\nGot nothing\n' - -class DocTestFailure(Exception): - """A DocTest example has failed in debugging mode. - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - got: the actual output - """ - def __init__(self, test, example, got): - self.test = test - self.example = example - self.got = got - - def __str__(self): - return str(self.test) - -class UnexpectedException(Exception): - """A DocTest example has encountered an unexpected exception - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - exc_info: the exception info - """ - def __init__(self, test, example, exc_info): - self.test = test - self.example = example - self.exc_info = exc_info - - def __str__(self): - return str(self.test) - -class DebugRunner(DocTestRunner): - r"""Run doc tests but raise an exception as soon as there is a failure. - - If an unexpected exception occurs, an UnexpectedException is raised. - It contains the test, the example, and the original exception: - - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException, failure: - ... pass - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - We wrap the original exception to give the calling application - access to the test and example information. - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> try: - ... runner.run(test) - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - If a failure or error occurs, the globals are left intact: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - Traceback (most recent call last): - ... - UnexpectedException: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} - - But the globals are cleared if there is no error: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - (0, 1) - - >>> test.globs - {} - - """ - - def run(self, test, compileflags=None, out=None, clear_globs=True): - r = DocTestRunner.run(self, test, compileflags, out, False) - if clear_globs: - test.globs.clear() - return r - - def report_unexpected_exception(self, out, test, example, exc_info): - raise UnexpectedException(test, example, exc_info) - - def report_failure(self, out, test, example, got): - raise DocTestFailure(test, example, got) - -###################################################################### -## 6. Test Functions -###################################################################### -# These should be backwards compatible. - -# For backward compatibility, a global instance of a DocTestRunner -# class, updated by testmod. -master = None - -def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, - raise_on_error=False, exclude_empty=False): - """m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - - Test examples in docstrings in functions and classes reachable - from module m (or the current module if m is not supplied), starting - with m.__doc__. Unless isprivate is specified, private names - are not skipped. - - Also test examples reachable from dict m.__test__ if it exists and is - not None. m.__test__ maps names to functions, classes and strings; - function and class docstrings are tested even if the name is private; - strings are tested directly, as if they were docstrings. - - Return (#failures, #tests). - - See doctest.__doc__ for an overview. - - Optional keyword arg "name" gives the name of the module; by default - use m.__name__. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use m.__dict__. A copy of this - dict is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. This is new in 2.4. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. This is new in 2.3. Possible values (see the - docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Deprecated in Python 2.4: - Optional keyword arg "isprivate" specifies a function used to - determine whether a name is private. The default function is - treat all functions as public. Optionally, "isprivate" can be - set to doctest.is_private to skip over functions marked as private - using the underscore naming convention; see its docs for details. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if isprivate is not None: - warnings.warn("the isprivate argument is deprecated; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning) - - # If no module was given, then use __main__. - if m is None: - # DWA - m will still be None if this wasn't invoked from the command - # line, in which case the following TypeError is about as good an error - # as we should expect - m = sys.modules.get('__main__') - - # Check that we were actually given a module. - if not inspect.ismodule(m): - raise TypeError("testmod: module required; %r" % (m,)) - - # If no name was given, then use the module's name. - if name is None: - name = m.__name__ - - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - for test in finder.find(m, name, globs=globs, extraglobs=extraglobs): - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def testfile(filename, module_relative=True, name=None, package=None, - globs=None, verbose=None, report=True, optionflags=0, - extraglobs=None, raise_on_error=False, parser=DocTestParser()): - """ - Test examples in the given file. Return (#failures, #tests). - - Optional keyword arg "module_relative" specifies how filenames - should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - - Optional keyword arg "name" gives the name of the test; by default - use the file's basename. - - Optional keyword argument "package" is a Python package or the - name of a Python package whose directory should be used as the - base directory for a module relative filename. If no package is - specified, then the calling module's directory is used as the base - directory for module relative filenames. It is an error to - specify "package" if "module_relative" is False. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use {}. A copy of this dict - is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Optional keyword arg "parser" specifies a DocTestParser (or - subclass) that should be used to extract tests from the files. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path - if module_relative: - package = _normalize_module(package) - filename = _module_relative_path(package, filename) - - # If no name was given, then use the file's name. - if name is None: - name = os.path.basename(filename) - - # Assemble the globals. - if globs is None: - globs = {} - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - # Read the file, convert it to a test, and run it. - s = open(filename).read() - test = parser.get_doctest(s, globs, name, filename, 0) - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def run_docstring_examples(f, globs, verbose=False, name="NoName", - compileflags=None, optionflags=0): - """ - Test examples in the given object's docstring (`f`), using `globs` - as globals. Optional argument `name` is used in failure messages. - If the optional argument `verbose` is true, then generate output - even if there are no failures. - - `compileflags` gives the set of flags that should be used by the - Python compiler when running the examples. If not specified, then - it will default to the set of future-import flags that apply to - `globs`. - - Optional keyword arg `optionflags` specifies options for the - testing and output. See the documentation for `testmod` for more - information. - """ - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(verbose=verbose, recurse=False) - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - for test in finder.find(f, name, globs=globs): - runner.run(test, compileflags=compileflags) - -###################################################################### -## 7. Tester -###################################################################### -# This is provided only for backwards compatibility. It's not -# actually used in any way. - -class Tester: - def __init__(self, mod=None, globs=None, verbose=None, - isprivate=None, optionflags=0): - - warnings.warn("class Tester is deprecated; " - "use class doctest.DocTestRunner instead", - DeprecationWarning, stacklevel=2) - if mod is None and globs is None: - raise TypeError("Tester.__init__: must specify mod or globs") - if mod is not None and not inspect.ismodule(mod): - raise TypeError("Tester.__init__: mod must be a module; %r" % - (mod,)) - if globs is None: - globs = mod.__dict__ - self.globs = globs - - self.verbose = verbose - self.isprivate = isprivate - self.optionflags = optionflags - self.testfinder = DocTestFinder(_namefilter=isprivate) - self.testrunner = DocTestRunner(verbose=verbose, - optionflags=optionflags) - - def runstring(self, s, name): - test = DocTestParser().get_doctest(s, self.globs, name, None, None) - if self.verbose: - print "Running string", name - (f,t) = self.testrunner.run(test) - if self.verbose: - print f, "of", t, "examples failed in string", name - return (f,t) - - def rundoc(self, object, name=None, module=None): - f = t = 0 - tests = self.testfinder.find(object, name, module=module, - globs=self.globs) - for test in tests: - (f2, t2) = self.testrunner.run(test) - (f,t) = (f+f2, t+t2) - return (f,t) - - def rundict(self, d, name, module=None): - import types - m = types.ModuleType(name) - m.__dict__.update(d) - if module is None: - module = False - return self.rundoc(m, name, module) - - def run__test__(self, d, name): - import types - m = types.ModuleType(name) - m.__test__ = d - return self.rundoc(m, name) - - def summarize(self, verbose=None): - return self.testrunner.summarize(verbose) - - def merge(self, other): - self.testrunner.merge(other.testrunner) - -###################################################################### -## 8. Unittest Support -###################################################################### - -_unittest_reportflags = 0 - -def set_unittest_reportflags(flags): - """Sets the unittest option flags. - - The old flag is returned so that a runner could restore the old - value if it wished to: - - >>> old = _unittest_reportflags - >>> set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True - - >>> import doctest - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - - Only reporting flags can be set: - - >>> set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) - - >>> set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - """ - global _unittest_reportflags - - if (flags & REPORTING_FLAGS) != flags: - raise ValueError("Only reporting flags allowed", flags) - old = _unittest_reportflags - _unittest_reportflags = flags - return old - - -class DocTestCase(unittest.TestCase): - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None): - - unittest.TestCase.__init__(self) - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - def setUp(self): - test = self._dt_test - - if self._dt_setUp is not None: - self._dt_setUp(test) - - def tearDown(self): - test = self._dt_test - - if self._dt_tearDown is not None: - self._dt_tearDown(test) - - test.globs.clear() - - def runTest(self): - test = self._dt_test - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - runner = DocTestRunner(optionflags=optionflags, - checker=self._dt_checker, verbose=False) - - try: - runner.DIVIDER = "-"*70 - failures, tries = runner.run( - test, out=new.write, clear_globs=False) - finally: - sys.stdout = old - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def format_failure(self, err): - test = self._dt_test - if test.lineno is None: - lineno = 'unknown line number' - else: - lineno = '%s' % test.lineno - lname = '.'.join(test.name.split('.')[-1:]) - return ('Failed doctest test for %s\n' - ' File "%s", line %s, in %s\n\n%s' - % (test.name, test.filename, lineno, lname, err) - ) - - def debug(self): - r"""Run the test case without results and without catching exceptions - - The unit test framework includes a debug method on test cases - and test suites to support post-mortem debugging. The test code - is run in such a way that errors are not caught. This way a - caller can catch the errors and initiate post-mortem debugging. - - The DocTestCase provides a debug method that raises - UnexpectedException errors if there is an unexepcted - exception: - - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - >>> try: - ... case.debug() - ... except UnexpectedException, failure: - ... pass - - The UnexpectedException contains the test, the example, and - the original exception: - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - - >>> try: - ... case.debug() - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - """ - - self.setUp() - runner = DebugRunner(optionflags=self._dt_optionflags, - checker=self._dt_checker, verbose=False) - runner.run(self._dt_test) - self.tearDown() - - def id(self): - return self._dt_test.name - - def __repr__(self): - name = self._dt_test.name.split('.') - return "%s (%s)" % (name[-1], '.'.join(name[:-1])) - - __str__ = __repr__ - - def shortDescription(self): - return "Doctest: " + self._dt_test.name - -def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, - **options): - """ - Convert doctest tests for a module to a unittest test suite. - - This converts each documentation string in a module that - contains doctest tests to a unittest test case. If any of the - tests in a doc string fail, then the test case fails. An exception - is raised showing the name of the file containing the test and a - (sometimes approximate) line number. - - The `module` argument provides the module to be tested. The argument - can be either a module or a module name. - - If no argument is given, the calling module is used. - - A number of options may be provided as keyword arguments: - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - """ - - if test_finder is None: - test_finder = DocTestFinder() - - module = _normalize_module(module) - tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) - if globs is None: - globs = module.__dict__ - if not tests: - # Why do we want to do this? Because it reveals a bug that might - # otherwise be hidden. - raise ValueError(module, "has no tests") - - tests.sort() - suite = unittest.TestSuite() - for test in tests: - if len(test.examples) == 0: - continue - if not test.filename: - filename = module.__file__ - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - test.filename = filename - suite.addTest(DocTestCase(test, **options)) - - return suite - -class DocFileCase(DocTestCase): - - def id(self): - return '_'.join(self._dt_test.name.split('.')) - - def __repr__(self): - return self._dt_test.filename - __str__ = __repr__ - - def format_failure(self, err): - return ('Failed doctest test for %s\n File "%s", line 0\n\n%s' - % (self._dt_test.name, self._dt_test.filename, err) - ) - -def DocFileTest(path, module_relative=True, package=None, - globs=None, parser=DocTestParser(), **options): - if globs is None: - globs = {} - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path. - if module_relative: - package = _normalize_module(package) - path = _module_relative_path(package, path) - - # Find the file and read it. - name = os.path.basename(path) - doc = open(path).read() - - # Convert it to a test, and wrap it in a DocFileCase. - test = parser.get_doctest(doc, globs, name, path, 0) - return DocFileCase(test, **options) - -def DocFileSuite(*paths, **kw): - """A unittest suite for one or more doctest files. - - The path to each doctest file is given as a string; the - interpretation of that string depends on the keyword argument - "module_relative". - - A number of options may be provided as keyword arguments: - - module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - - package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - - parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - """ - suite = unittest.TestSuite() - - # We do this here so that _normalize_module is called at the right - # level. If it were called in DocFileTest, then this function - # would be the caller and we might guess the package incorrectly. - if kw.get('module_relative', True): - kw['package'] = _normalize_module(kw.get('package')) - - for path in paths: - suite.addTest(DocFileTest(path, **kw)) - - return suite - -###################################################################### -## 9. Debugging Support -###################################################################### - -def script_from_examples(s): - r"""Extract script from text with examples. - - Converts text with examples to a Python script. Example input is - converted to regular code. Example output and all other words - are converted to comments: - - >>> text = ''' - ... Here are examples of simple math. - ... - ... Python has super accurate integer addition - ... - ... >>> 2 + 2 - ... 5 - ... - ... And very friendly error messages: - ... - ... >>> 1/0 - ... To Infinity - ... And - ... Beyond - ... - ... You can use logic if you want: - ... - ... >>> if 0: - ... ... blah - ... ... blah - ... ... - ... - ... Ho hum - ... ''' - - >>> print script_from_examples(text) - # Here are examples of simple math. - # - # Python has super accurate integer addition - # - 2 + 2 - # Expected: - ## 5 - # - # And very friendly error messages: - # - 1/0 - # Expected: - ## To Infinity - ## And - ## Beyond - # - # You can use logic if you want: - # - if 0: - blah - blah - # - # Ho hum - """ - output = [] - for piece in DocTestParser().parse(s): - if isinstance(piece, Example): - # Add the example's source code (strip trailing NL) - output.append(piece.source[:-1]) - # Add the expected output: - want = piece.want - if want: - output.append('# Expected:') - output += ['## '+l for l in want.split('\n')[:-1]] - else: - # Add non-example text. - output += [_comment_line(l) - for l in piece.split('\n')[:-1]] - - # Trim junk on both ends. - while output and output[-1] == '#': - output.pop() - while output and output[0] == '#': - output.pop(0) - # Combine the output, and return it. - return '\n'.join(output) - -def testsource(module, name): - """Extract the test sources from a doctest docstring as a script. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the doc string with tests to be debugged. - """ - module = _normalize_module(module) - tests = DocTestFinder().find(module) - test = [t for t in tests if t.name == name] - if not test: - raise ValueError(name, "not found in tests") - test = test[0] - testsrc = script_from_examples(test.docstring) - return testsrc - -def debug_src(src, pm=False, globs=None): - """Debug a single doctest docstring, in argument `src`'""" - testsrc = script_from_examples(src) - debug_script(testsrc, pm, globs) - -def debug_script(src, pm=False, globs=None): - "Debug a test script. `src` is the script, as a string." - import pdb - - # Note that tempfile.NameTemporaryFile() cannot be used. As the - # docs say, a file so created cannot be opened by name a second time - # on modern Windows boxes, and execfile() needs to open it. - srcfilename = tempfile.mktemp(".py", "doctestdebug") - f = open(srcfilename, 'w') - f.write(src) - f.close() - - try: - if globs: - globs = globs.copy() - else: - globs = {} - - if pm: - try: - execfile(srcfilename, globs, globs) - except: - print sys.exc_info()[1] - pdb.post_mortem(sys.exc_info()[2]) - else: - # Note that %r is vital here. '%s' instead can, e.g., cause - # backslashes to get treated as metacharacters on Windows. - pdb.run("execfile(%r)" % srcfilename, globs, globs) - - finally: - os.remove(srcfilename) - -def debug(module, name, pm=False): - """Debug a single doctest docstring. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the docstring with tests to be debugged. - """ - module = _normalize_module(module) - testsrc = testsource(module, name) - debug_script(testsrc, pm, module.__dict__) - -###################################################################### -## 10. Example Usage -###################################################################### -class _TestClass: - """ - A pointless class, for sanity-checking of docstring testing. - - Methods: - square() - get() - - >>> _TestClass(13).get() + _TestClass(-12).get() - 1 - >>> hex(_TestClass(13).square().get()) - '0xa9' - """ - - def __init__(self, val): - """val -> _TestClass object with associated value val. - - >>> t = _TestClass(123) - >>> print t.get() - 123 - """ - - self.val = val - - def square(self): - """square() -> square TestClass's associated value - - >>> _TestClass(13).square().get() - 169 - """ - - self.val = self.val ** 2 - return self - - def get(self): - """get() -> return TestClass's associated value. - - >>> x = _TestClass(-42) - >>> print x.get() - -42 - """ - - return self.val - -__test__ = {"_TestClass": _TestClass, - "string": r""" - Example of a string object, searched as-is. - >>> x = 1; y = 2 - >>> x + y, x * y - (3, 2) - """, - - "bool-int equivalence": r""" - In 2.2, boolean expressions displayed - 0 or 1. By default, we still accept - them. This can be disabled by passing - DONT_ACCEPT_TRUE_FOR_1 to the new - optionflags argument. - >>> 4 == 4 - 1 - >>> 4 == 4 - True - >>> 4 > 4 - 0 - >>> 4 > 4 - False - """, - - "blank lines": r""" - Blank lines can be marked with : - >>> print 'foo\n\nbar\n' - foo - - bar - - """, - - "ellipsis": r""" - If the ellipsis flag is used, then '...' can be used to - elide substrings in the desired output: - >>> print range(1000) #doctest: +ELLIPSIS - [0, 1, 2, ..., 999] - """, - - "whitespace normalization": r""" - If the whitespace normalization flag is used, then - differences in whitespace are ignored. - >>> print range(30) #doctest: +NORMALIZE_WHITESPACE - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29] - """, - } - -def _test(): - r = unittest.TextTestRunner() - r.run(DocTestSuite()) - -if __name__ == "__main__": - _test() - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py deleted file mode 100755 index f4aaaa1c..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" -import urllib2 -import sys -from threading import Thread -from BaseHTTPServer import HTTPServer -from SimpleHTTPServer import SimpleHTTPRequestHandler - -class IndexServer(HTTPServer): - """Basic single-threaded http server simulating a package index - - You can use this server in unittest like this:: - s = IndexServer() - s.start() - index_url = s.base_url() + 'mytestindex' - # do some test requests to the index - # The index files should be located in setuptools/tests/indexes - s.stop() - """ - def __init__(self): - HTTPServer.__init__(self, ('', 0), SimpleHTTPRequestHandler) - self._run = True - - def serve(self): - while True: - self.handle_request() - if not self._run: break - - def start(self): - self.thread = Thread(target=self.serve) - self.thread.start() - - def stop(self): - """self.shutdown is not supported on python < 2.6""" - self._run = False - try: - if sys.version > '2.6': - urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port, - None, 5) - else: - urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port) - except urllib2.URLError: - pass - self.thread.join() - - def base_url(self): - port = self.server_port - return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py deleted file mode 100755 index a520ced9..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext -from setuptools.command.build_ext import build_ext -from setuptools.dist import Distribution - -class TestBuildExtTest(unittest.TestCase): - - def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map - dist = Distribution() - cmd = build_ext(dist) - cmd.ext_map['foo/bar'] = '' - res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') - assert res == wanted - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py deleted file mode 100755 index a567dd5a..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py +++ /dev/null @@ -1,82 +0,0 @@ -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site -from StringIO import StringIO - -from distutils.errors import DistutilsError -from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_develop(self): - if sys.version < "2.6": - return - dist = Distribution() - dist.script_name = 'setup.py' - cmd = develop(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - cmd.run() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - content = os.listdir(site.USER_SITE) - content.sort() - self.assertEquals(content, ['UNKNOWN.egg-link', 'easy-install.pth']) - - def test_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError, e: - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py deleted file mode 100755 index 85616605..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py +++ /dev/null @@ -1,243 +0,0 @@ -"""Easy install Tests -""" -import sys -import os, shutil, tempfile, unittest -import site -from StringIO import StringIO -from setuptools.command.easy_install import easy_install, get_script_args, main -from setuptools.command.easy_install import PthDistributions -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from pkg_resources import Distribution as PRDistribution - -try: - import multiprocessing - import logging - _LOG = logging.getLogger('test_easy_install') - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - _MULTIPROC = True -except ImportError: - _MULTIPROC = False - _LOG = None - -class FakeDist(object): - def get_entry_map(self, group): - if group != 'console_scripts': - return {} - return {'name': 'ep'} - - def as_requirement(self): - return 'spec' - -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % sys.executable - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): - - def test_install_site_py(self): - dist = Distribution() - cmd = easy_install(dist) - cmd.sitepy_installed = False - cmd.install_dir = tempfile.mkdtemp() - try: - cmd.install_site_py() - sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assert_(os.path.exists(sitepy)) - finally: - shutil.rmtree(cmd.install_dir) - - def test_get_script_args(self): - dist = FakeDist() - - old_platform = sys.platform - try: - name, script = get_script_args(dist).next() - finally: - sys.platform = old_platform - - self.assertEquals(script, WANTED) - - def test_no_setup_cfg(self): - # makes sure easy_install as a command (main) - # doesn't use a setup.cfg file that is located - # in the current working directory - dir = tempfile.mkdtemp() - setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w') - setup_cfg.write('[easy_install]\nfind_links = http://example.com') - setup_cfg.close() - setup_py = open(os.path.join(dir, 'setup.py'), 'w') - setup_py.write(SETUP_PY) - setup_py.close() - - from setuptools.dist import Distribution - - def _parse_command_line(self): - msg = 'Error: a local setup.cfg was used' - opts = self.command_options - if 'easy_install' in opts: - assert 'find_links' not in opts['easy_install'], msg - return self._old_parse_command_line - - Distribution._old_parse_command_line = Distribution.parse_command_line - Distribution.parse_command_line = _parse_command_line - - old_wd = os.getcwd() - try: - os.chdir(dir) - main([]) - finally: - os.chdir(old_wd) - shutil.rmtree(dir) - - def test_no_find_links(self): - # new option '--no-find-links', that blocks find-links added at - # the project level - dist = Distribution() - cmd = easy_install(dist) - cmd.check_pth_processing = lambda : True - cmd.no_find_links = True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - self.assertEquals(cmd.package_index.scanned_urls, {}) - - # let's try without it (default behavior) - cmd = easy_install(dist) - cmd.check_pth_processing = lambda : True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - keys = cmd.package_index.scanned_urls.keys() - keys.sort() - self.assertEquals(keys, ['link1', 'link2']) - - -class TestPTHFileWriter(unittest.TestCase): - def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty - if a distribution is in site but also the cwd - ''' - pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assert_(not pth.dirty) - pth.add(PRDistribution(os.getcwd())) - self.assert_(pth.dirty) - - def test_add_from_site_is_ignored(self): - pth = PthDistributions('does-not_exist', ['/test/location/does-not-have-to-exist']) - self.assert_(not pth.dirty) - pth.add(PRDistribution('/test/location/does-not-have-to-exist')) - self.assert_(not pth.dirty) - - -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - if sys.version >= "2.6": - self.old_has_site = easy_install_pkg.HAS_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - easy_install_pkg.HAS_USER_SITE = self.old_has_site - easy_install_pkg.__file__ = self.old_file - - def test_user_install_implied(self): - easy_install_pkg.HAS_USER_SITE = True # disabled sometimes - #XXX: replace with something meaningfull - if sys.version < "2.6": - return #SKIP - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') - - def test_multiproc_atexit(self): - if not _MULTIPROC: - return - _LOG.info('this should not break') - - def test_user_install_not_implied_without_usersite_enabled(self): - easy_install_pkg.HAS_USER_SITE = False # usually enabled - #XXX: replace with something meaningfull - if sys.version < "2.6": - return #SKIP - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') - - def test_local_index(self): - # make sure the local index is used - # when easy_install looks for installed - # packages - new_location = tempfile.mkdtemp() - target = tempfile.mkdtemp() - egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: - f.write('Name: foo\n') - except: - f.close() - - sys.path.append(target) - old_ppath = os.environ.get('PYTHONPATH') - os.environ['PYTHONPATH'] = ':'.join(sys.path) - try: - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.install_dir = target - cmd.args = ['foo'] - cmd.ensure_finalized() - cmd.local_index.scan([new_location]) - res = cmd.easy_install('foo') - self.assertEquals(res.location, new_location) - finally: - sys.path.remove(target) - shutil.rmtree(new_location) - shutil.rmtree(target) - if old_ppath is not None: - os.environ['PYTHONPATH'] = old_ppath - else: - del os.environ['PYTHONPATH'] - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py deleted file mode 100755 index 42cb8c1e..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Package Index Tests -""" -# More would be better! -import sys -import os, shutil, tempfile, unittest, urllib2 -import pkg_resources -import setuptools.package_index -from server import IndexServer - -class TestPackageIndex(unittest.TestCase): - - def test_bad_urls(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1:0/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception, v: - self.assert_(url in str(v)) - else: - self.assert_(isinstance(v,urllib2.HTTPError)) - - # issue 16 - # easy_install inquant.contentmirror.plone breaks because of a typo - # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' - try: - v = index.open_url(url) - except Exception, v: - self.assert_(url in str(v)) - else: - self.assert_(isinstance(v, urllib2.HTTPError)) - - def _urlopen(*args): - import httplib - raise httplib.BadStatusLine('line') - - old_urlopen = urllib2.urlopen - urllib2.urlopen = _urlopen - url = 'http://example.com' - try: - try: - v = index.open_url(url) - except Exception, v: - self.assert_('line' in str(v)) - else: - raise AssertionError('Should have raise here!') - finally: - urllib2.urlopen = old_urlopen - - # issue 20 - url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' - try: - index.open_url(url) - except Exception, v: - self.assert_('nonnumeric port' in str(v)) - - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = ('
    ') - index.process_index(url, page) - - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - self.assert_(index.url_ok(url, True)) - - def test_links_priority(self): - """ - Download links from the pypi simple index should be used before - external download links. - http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error - - Usecase : - - someone uploads a package on pypi, a md5 is generated - - someone manually copies this link (with the md5 in the url) onto an - external page accessible from the package page. - - someone reuploads the package (with a different md5) - - while easy_installing, an MD5 error occurs because the external link - is used - -> Distribute should use the link from pypi, not the external one. - """ - # start an index server - server = IndexServer() - server.start() - index_url = server.base_url() + 'test_links_priority/simple/' - - # scan a test index - pi = setuptools.package_index.PackageIndex(index_url) - requirement = pkg_resources.Requirement.parse('foobar') - pi.find_packages(requirement) - server.stop() - - # the distribution has been found - self.assert_('foobar' in pi) - # we have only one link, because links are compared without md5 - self.assert_(len(pi['foobar'])==1) - # the link should be from the index - self.assert_('correct_md5' in pi['foobar'][0].location) - - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py deleted file mode 100755 index 883cfad1..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py +++ /dev/null @@ -1,565 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove -from unittest import TestCase, makeSuite; from pkg_resources import * -from setuptools.command.easy_install import get_script_header, is_sh -import os, pkg_resources, sys, StringIO -try: frozenset -except NameError: - from sets import ImmutableSet as frozenset - -class Metadata(EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - ad.add(Distribution.from_filename("FooPkg-1.3_1.egg")) - ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")) - ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.assert_(ad['FooPkg']) - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(Distribution.from_filename("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg") - foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]); ws.add(foo12); ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14); - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - - def testResolve(self): - ad = Environment([]); ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual( list(ws.resolve([],ad)), [] ) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - map(ws.add,targets) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises( VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad - ) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(UnknownExtra, d.requires, ["foo"]) - - def testSetuptoolsDistributeCombination(self): - # Ensure that installing a 0.7-series setuptools fails. PJE says that - # it will not co-exist. - ws = WorkingSet([]) - d = Distribution( - "/some/path", - project_name="setuptools", - version="0.7a1") - self.assertRaises(ValueError, ws.add, d) - # A 0.6-series is no problem - d2 = Distribution( - "/some/path", - project_name="setuptools", - version="0.6c9") - ws.add(d2) - - # a unexisting version needs to work - ws = WorkingSet([]) - d3 = Distribution( - "/some/path", - project_name="setuptools") - ws.add(d3) - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.assert_(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(len(m), len(self.submap_expect)) - for key, ep in self.submap_expect.iteritems(): - self.assertEqual(repr(m.get(key)), repr(ep)) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), - feature3=EntryPoint('feature3', 'this.module', extras=['something']) - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(m.keys(),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.assert_(parse_version('1.2') in r) - self.assert_(parse_version('1.1') not in r) - self.assert_('1.2' in r) - self.assert_('1.1' not in r) - self.assert_(foo_dist not in r) - self.assert_(twist11 not in r) - self.assert_(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.assert_(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.assert_(v not in r, (v,r)) - - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - frozenset(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - self.assert_(d("foo-0.3a4.egg") not in r1) - self.assert_(d("foo-0.3a1.egg") not in r1) - self.assert_(d("foo-0.3a4.egg") not in r2) - - self.assert_(d("foo-0.3a2.egg") in r1) - self.assert_(d("foo-0.3a2.egg") in r2) - self.assert_(d("foo-0.3a3.egg") in r2) - self.assert_(d("foo-0.3a5.egg") in r2) - - def testDistributeSetuptoolsOverride(self): - # Plain setuptools or distribute mean we return distribute. - self.assertEqual( - Requirement.parse('setuptools').project_name, 'distribute') - self.assertEqual( - Requirement.parse('distribute').project_name, 'distribute') - # setuptools lower than 0.7 means distribute - self.assertEqual( - Requirement.parse('setuptools==0.6c9').project_name, 'distribute') - self.assertEqual( - Requirement.parse('setuptools==0.6c10').project_name, 'distribute') - self.assertEqual( - Requirement.parse('setuptools>=0.6').project_name, 'distribute') - self.assertEqual( - Requirement.parse('setuptools < 0.7').project_name, 'distribute') - # setuptools 0.7 and higher means setuptools. - self.assertEqual( - Requirement.parse('setuptools == 0.7').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') - - - - - - - - - - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - self.assertEqual( - list( - pkg_resources.split_sections(""" - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - ) - ), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assert_(p1= (3,) and os.environ.get("LC_CTYPE") - in (None, "C", "POSIX")): - return - platform = sys.platform - sys.platform = 'java1.5.0_13' - stdout = sys.stdout - try: - # A mock sys.executable that uses a shebang line (this file) - exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') - self.assertEqual( - get_script_header('#!/usr/local/bin/python', executable=exe), - '#!/usr/bin/env %s\n' % exe) - - # Ensure we generate what is basically a broken shebang line - # when there's options, with a warning emitted - sys.stdout = sys.stderr = StringIO.StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python -x', - executable=exe), - '#!%s -x\n' % exe) - self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue()) - sys.stdout = sys.stderr = StringIO.StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe), - '#!%s -x\n' % self.non_ascii_exe) - self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue()) - finally: - sys.platform = platform - sys.stdout = stdout - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py deleted file mode 100755 index 1609ee86..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,66 +0,0 @@ -"""develop tests -""" -import sys -import os -import shutil -import unittest -import tempfile - -from setuptools.sandbox import DirectorySandbox, SandboxViolation - -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True - -class TestSandbox(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) - sandbox.run(self._file_writer(os.devnull)) - - def _file_writer(path): - def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() - return do_write - - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) - -if __name__ == '__main__': - unittest.main() diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py deleted file mode 100755 index 8b2dc892..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,65 +0,0 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) - - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') - f.write("Hello world.") - f.close() - - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) - - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - zip_file = cmd.create_zipfile() - - assert zipfile.is_zipfile(zip_file) - - zip_f = zipfile.ZipFile(zip_file) # woh... - - assert zip_f.namelist() == ['index.html'] - - diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py deleted file mode 100755 index 80e084b2..00000000 --- a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py +++ /dev/null @@ -1,82 +0,0 @@ -def __boot(): - import sys, imp, os, os.path - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p,np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot - - - - - - - - diff --git a/lib/python2.7/site-packages/easy-install.pth b/lib/python2.7/site-packages/easy-install.pth deleted file mode 100644 index 492c156f..00000000 --- a/lib/python2.7/site-packages/easy-install.pth +++ /dev/null @@ -1,4 +0,0 @@ -import sys; sys.__plen = len(sys.path) -./distribute-0.6.14-py2.7.egg -./pip-0.8.1-py2.7.egg -import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO deleted file mode 100644 index fb0a3680..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO +++ /dev/null @@ -1,348 +0,0 @@ -Metadata-Version: 1.0 -Name: pip -Version: 0.8.1 -Summary: pip installs packages. Python packages. An easy_install replacement -Home-page: http://pip.openplans.org -Author: Ian Bicking -Author-email: python-virtualenv@groups.google.com -License: MIT -Description: The main website for pip is `pip.openplans.org - `_. You can also install - the `in-development version `_ - of pip with ``easy_install pip==dev``. - - - Introduction - ------------ - - pip installs packages. Python packages. - - If you use `virtualenv `__ -- a tool - for installing libraries in a local and isolated manner -- you'll - automatically get a copy of pip. Free bonus! - - Once you have pip, you can use it like this:: - - $ pip install SomePackage - - SomePackage is some package you'll find on `PyPI - `_. This installs the package and all - its dependencies. - - pip does other stuff too, with packages, but install is the biggest - one. You can ``pip uninstall`` too. - - You can also install from a URL (that points to a tar or zip file), - install from some version control system (use URLs like - ``hg+http://domain/repo`` -- or prefix ``git+``, ``svn+`` etc). pip - knows a bunch of stuff about revisions and stuff, so if you need to do - things like install a very specific revision from a repository pip can - do that too. - - If you've ever used ``python setup.py develop``, you can do something - like that with ``pip install -e ./`` -- this works with packages that - use ``distutils`` too (usually this only works with Setuptools - projects). - - You can use ``pip install --upgrade SomePackage`` to upgrade to a - newer version, or ``pip install SomePackage==1.0.4`` to install a very - specific version. - - Pip Compared To easy_install - ---------------------------- - - pip is a replacement for `easy_install - `_. It uses mostly the - same techniques for finding packages, so packages that were made - easy_installable should be pip-installable as well. - - pip is meant to improve on easy_install. Some of the improvements: - - * All packages are downloaded before installation. Partially-completed - installation doesn't occur as a result. - - * Care is taken to present useful output on the console. - - * The reasons for actions are kept track of. For instance, if a package is - being installed, pip keeps track of why that package was required. - - * Error messages should be useful. - - * The code is relatively concise and cohesive, making it easier to use - programmatically. - - * Packages don't have to be installed as egg archives, they can be installed - flat (while keeping the egg metadata). - - * Native support for other version control systems (Git, Mercurial and Bazaar) - - * Uninstallation of packages. - - * Simple to define fixed sets of requirements and reliably reproduce a - set of packages. - - pip doesn't do everything that easy_install does. Specifically: - - * It cannot install from eggs. It only installs from source. (In the - future it would be good if it could install binaries from Windows ``.exe`` - or ``.msi`` -- binary install on other platforms is not a priority.) - - * It doesn't understand Setuptools extras (like ``package[test]``). This should - be added eventually. - - * It is incompatible with some packages that extensively customize distutils - or setuptools in their ``setup.py`` files. - - pip is complementary with `virtualenv - `__, and it is encouraged that you use - virtualenv to isolate your installation. - - Community - --------- - - The homepage for pip is temporarily located `on PyPI - `_ -- a more proper homepage will - follow. Bugs can go on the `pip issue tracker - `_. Discussion should happen on the - `virtualenv email group - `_. - - Uninstall - --------- - - pip is able to uninstall most installed packages with ``pip uninstall - package-name``. - - Known exceptions include pure-distutils packages installed with - ``python setup.py install`` (such packages leave behind no metadata allowing - determination of what files were installed), and script wrappers installed - by develop-installs (``python setup.py develop``). - - pip also performs an automatic uninstall of an old version of a package - before upgrading to a newer version, so outdated files (and egg-info data) - from conflicting versions aren't left hanging around to cause trouble. The - old version of the package is automatically restored if the new version - fails to download or install. - - .. _`requirements file`: - - Requirements Files - ------------------ - - When installing software, and Python packages in particular, it's common that - you get a lot of libraries installed. You just did ``easy_install MyPackage`` - and you get a dozen packages. Each of these packages has its own version. - - Maybe you ran that installation and it works. Great! Will it keep working? - Did you have to provide special options to get it to find everything? Did you - have to install a bunch of other optional pieces? Most of all, will you be able - to do it again? Requirements files give you a way to create an *environment*: - a *set* of packages that work together. - - If you've ever tried to setup an application on a new system, or with slightly - updated pieces, and had it fail, pip requirements are for you. If you - haven't had this problem then you will eventually, so pip requirements are - for you too -- requirements make explicit, repeatable installation of packages. - - So what are requirements files? They are very simple: lists of packages to - install. Instead of running something like ``pip MyApp`` and getting - whatever libraries come along, you can create a requirements file something like:: - - MyApp - Framework==0.9.4 - Library>=0.2 - - Then, regardless of what MyApp lists in ``setup.py``, you'll get a - specific version of Framework (0.9.4) and at least the 0.2 version of - Library. (You might think you could list these specific versions in - MyApp's ``setup.py`` -- but if you do that you'll have to edit MyApp - if you want to try a new version of Framework, or release a new - version of MyApp if you determine that Library 0.3 doesn't work with - your application.) You can also add optional libraries and support - tools that MyApp doesn't strictly require, giving people a set of - recommended libraries. - - You can also include "editable" packages -- packages that are checked out from - Subversion, Git, Mercurial and Bazaar. These are just like using the ``-e`` - option to pip. They look like:: - - -e svn+http://myrepo/svn/MyApp#egg=MyApp - - You have to start the URL with ``svn+`` (``git+``, ``hg+`` or ``bzr+``), and - you have to include ``#egg=Package`` so pip knows what to expect at that URL. - You can also include ``@rev`` in the URL, e.g., ``@275`` to check out - revision 275. - - Requirement files are mostly *flat*. Maybe ``MyApp`` requires - ``Framework``, and ``Framework`` requires ``Library``. I encourage - you to still list all these in a single requirement file; it is the - nature of Python programs that there are implicit bindings *directly* - between MyApp and Library. For instance, Framework might expose one - of Library's objects, and so if Library is updated it might directly - break MyApp. If that happens you can update the requirements file to - force an earlier version of Library, and you can do that without - having to re-release MyApp at all. - - Read the `requirements file format `_ to - learn about other features. - - Freezing Requirements - --------------------- - - So you have a working set of packages, and you want to be able to install them - elsewhere. `Requirements files`_ let you install exact versions, but it won't - tell you what all the exact versions are. - - To create a new requirements file from a known working environment, use:: - - $ pip freeze > stable-req.txt - - This will write a listing of *all* installed libraries to ``stable-req.txt`` - with exact versions for every library. You may want to edit the file down after - generating (e.g., to eliminate unnecessary libraries), but it'll give you a - stable starting point for constructing your requirements file. - - You can also give it an existing requirements file, and it will use that as a - sort of template for the new file. So if you do:: - - $ pip freeze -r devel-req.txt > stable-req.txt - - it will keep the packages listed in ``devel-req.txt`` in order and preserve - comments. - - Bundles - ------- - - Another way to distribute a set of libraries is a bundle format (specific to - pip). This format is not stable at this time (there simply hasn't been - any feedback, nor a great deal of thought). A bundle file contains all the - source for your package, and you can have pip install them all together. - Once you have the bundle file further network access won't be necessary. To - build a bundle file, do:: - - $ pip bundle MyApp.pybundle MyApp - - (Using a `requirements file`_ would be wise.) Then someone else can get the - file ``MyApp.pybundle`` and run:: - - $ pip install MyApp.pybundle - - This is *not* a binary format. This only packages source. If you have binary - packages, then the person who installs the files will have to have a compiler, - any necessary headers installed, etc. Binary packages are hard, this is - relatively easy. - - Using pip with virtualenv - ------------------------- - - pip is most nutritious when used with `virtualenv - `__. One of the reasons pip - doesn't install "multi-version" eggs is that virtualenv removes much of the need - for it. Because pip is installed by virtualenv, just use - ``path/to/my/environment/bin/pip`` to install things into that - specific environment. - - To tell pip to only run if there is a virtualenv currently activated, - and to bail if not, use:: - - export PIP_REQUIRE_VIRTUALENV=true - - To tell pip to automatically use the currently active virtualenv:: - - export PIP_RESPECT_VIRTUALENV=true - - Providing an environment with ``-E`` will be ignored. - - Using pip with virtualenvwrapper - --------------------------------- - - If you are using `virtualenvwrapper - `_, you might - want pip to automatically create its virtualenvs in your - ``$WORKON_HOME``. - - You can tell pip to do so by defining ``PIP_VIRTUALENV_BASE`` in your - environment and setting it to the same value as that of - ``$WORKON_HOME``. - - Do so by adding the line:: - - export PIP_VIRTUALENV_BASE=$WORKON_HOME - - in your .bashrc under the line starting with ``export WORKON_HOME``. - - Using pip with buildout - ----------------------- - - If you are using `zc.buildout - `_ you should look at - `gp.recipe.pip `_ as an - option to use pip and virtualenv in your buildouts. - - Command line completion - ----------------------- - - pip comes with support for command line completion in bash and zsh and - allows you tab complete commands and options. To enable it you simply - need copy the required shell script to the your shell startup file - (e.g. ``.profile`` or ``.zprofile``) by running the special ``completion`` - command, e.g. for bash:: - - $ pip completion --bash >> ~/.profile - - And for zsh:: - - $ pip completion --zsh >> ~/.zprofile - - Alternatively, you can use the result of the ``completion`` command - directly with the eval function of you shell, e.g. by adding:: - - eval "`pip completion --bash`" - - to your startup file. - - Searching for packages - ---------------------- - - pip can search the `Python Package Index `_ (PyPI) - for packages using the ``pip search`` command. To search, run:: - - $ pip search "query" - - The query will be used to search the names and summaries of all packages - indexed. - - pip searches http://pypi.python.org/pypi by default but alternative indexes - can be searched by using the ``--index`` flag. - - Mirror support - -------------- - - The `PyPI mirroring infrastructure `_ as - described in `PEP 381 `_ can be - used by passing the ``--use-mirrors`` option to the install command. - Alternatively, you can use the other ways to configure pip, e.g.:: - - $ export PIP_USE_MIRRORS=true - - If enabled, pip will automatically query the DNS entry of the mirror index URL - to find the list of mirrors to use. In case you want to override this list, - please use the ``--mirrors`` option of the install command, or add to your pip - configuration file:: - - [install] - use-mirrors = true - mirrors = - http://d.pypi.python.org - http://b.pypi.python.org - -Keywords: easy_install distutils setuptools egg virtualenv -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Topic :: Software Development :: Build Tools -Classifier: Programming Language :: Python :: 2.4 -Classifier: Programming Language :: Python :: 2.5 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt deleted file mode 100644 index 3a068547..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt +++ /dev/null @@ -1,57 +0,0 @@ -MANIFEST.in -setup.cfg -setup.py -docs/branches.txt -docs/ci-server-step-by-step.txt -docs/configuration.txt -docs/how-to-contribute.txt -docs/index.txt -docs/license.txt -docs/news.txt -docs/requirement-format.txt -docs/running-tests.txt -docs/_build/branches.html -docs/_build/ci-server-step-by-step.html -docs/_build/configuration.html -docs/_build/how-to-contribute.html -docs/_build/index.html -docs/_build/license.html -docs/_build/news.html -docs/_build/requirement-format.html -docs/_build/running-tests.html -docs/_build/search.html -pip/__init__.py -pip/_pkgutil.py -pip/backwardcompat.py -pip/basecommand.py -pip/baseparser.py -pip/download.py -pip/exceptions.py -pip/index.py -pip/locations.py -pip/log.py -pip/req.py -pip/runner.py -pip/util.py -pip/venv.py -pip.egg-info/PKG-INFO -pip.egg-info/SOURCES.txt -pip.egg-info/dependency_links.txt -pip.egg-info/entry_points.txt -pip.egg-info/not-zip-safe -pip.egg-info/top_level.txt -pip/commands/__init__.py -pip/commands/bundle.py -pip/commands/completion.py -pip/commands/freeze.py -pip/commands/help.py -pip/commands/install.py -pip/commands/search.py -pip/commands/uninstall.py -pip/commands/unzip.py -pip/commands/zip.py -pip/vcs/__init__.py -pip/vcs/bazaar.py -pip/vcs/git.py -pip/vcs/mercurial.py -pip/vcs/subversion.py \ No newline at end of file diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt deleted file mode 100644 index 8b137891..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt deleted file mode 100644 index 5f7b7cf9..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt +++ /dev/null @@ -1,4 +0,0 @@ -[console_scripts] -pip = pip:main -pip-2.7 = pip:main - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe deleted file mode 100644 index 8b137891..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt deleted file mode 100644 index a1b589e3..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py deleted file mode 100755 index c5de5c9a..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py +++ /dev/null @@ -1,261 +0,0 @@ -#!/usr/bin/env python -import os -import optparse - -import subprocess -import sys -import re -import difflib - -from pip.basecommand import command_dict, load_command, load_all_commands, command_names -from pip.baseparser import parser -from pip.exceptions import InstallationError -from pip.log import logger -from pip.util import get_installed_distributions -from pip.backwardcompat import walk_packages - - -def autocomplete(): - """Command and option completion for the main option parser (and options) - and its subcommands (and options). - - Enable by sourcing one of the completion shell scripts (bash or zsh). - """ - # Don't complete if user hasn't sourced bash_completion file. - if 'PIP_AUTO_COMPLETE' not in os.environ: - return - cwords = os.environ['COMP_WORDS'].split()[1:] - cword = int(os.environ['COMP_CWORD']) - try: - current = cwords[cword-1] - except IndexError: - current = '' - load_all_commands() - subcommands = [cmd for cmd, cls in command_dict.items() if not cls.hidden] - options = [] - # subcommand - try: - subcommand_name = [w for w in cwords if w in subcommands][0] - except IndexError: - subcommand_name = None - # subcommand options - if subcommand_name: - # special case: 'help' subcommand has no options - if subcommand_name == 'help': - sys.exit(1) - # special case: list locally installed dists for uninstall command - if subcommand_name == 'uninstall' and not current.startswith('-'): - installed = [] - lc = current.lower() - for dist in get_installed_distributions(local_only=True): - if dist.key.startswith(lc) and dist.key not in cwords[1:]: - installed.append(dist.key) - # if there are no dists installed, fall back to option completion - if installed: - for dist in installed: - print dist - sys.exit(1) - subcommand = command_dict.get(subcommand_name) - options += [(opt.get_opt_string(), opt.nargs) - for opt in subcommand.parser.option_list - if opt.help != optparse.SUPPRESS_HELP] - # filter out previously specified options from available options - prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]] - options = filter(lambda (x, v): x not in prev_opts, options) - # filter options by current input - options = [(k, v) for k, v in options if k.startswith(current)] - for option in options: - opt_label = option[0] - # append '=' to options which require args - if option[1]: - opt_label += '=' - print opt_label - else: - # show options of main parser only when necessary - if current.startswith('-') or current.startswith('--'): - subcommands += [opt.get_opt_string() - for opt in parser.option_list - if opt.help != optparse.SUPPRESS_HELP] - print ' '.join(filter(lambda x: x.startswith(current), subcommands)) - sys.exit(1) - - -def version_control(): - # Import all the version control support modules: - from pip import vcs - for importer, modname, ispkg in \ - walk_packages(path=vcs.__path__, prefix=vcs.__name__+'.'): - __import__(modname) - - -def main(initial_args=None): - if initial_args is None: - initial_args = sys.argv[1:] - autocomplete() - version_control() - options, args = parser.parse_args(initial_args) - if options.help and not args: - args = ['help'] - if not args: - parser.error('You must give a command (use "pip help" to see a list of commands)') - command = args[0].lower() - load_command(command) - if command not in command_dict: - close_commands = difflib.get_close_matches(command, command_names()) - if close_commands: - guess = close_commands[0] - if args[1:]: - guess = "%s %s" % (guess, " ".join(args[1:])) - else: - guess = 'install %s' % command - error_dict = {'arg': command, 'guess': guess, - 'script': os.path.basename(sys.argv[0])} - parser.error('No command by the name %(script)s %(arg)s\n ' - '(maybe you meant "%(script)s %(guess)s")' % error_dict) - command = command_dict[command] - return command.main(initial_args, args[1:], options) - - -############################################################ -## Writing freeze files - - -class FrozenRequirement(object): - - def __init__(self, name, req, editable, comments=()): - self.name = name - self.req = req - self.editable = editable - self.comments = comments - - _rev_re = re.compile(r'-r(\d+)$') - _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') - - @classmethod - def from_dist(cls, dist, dependency_links, find_tags=False): - location = os.path.normcase(os.path.abspath(dist.location)) - comments = [] - from pip.vcs import vcs, get_src_requirement - if vcs.get_backend_name(location): - editable = True - req = get_src_requirement(dist, location, find_tags) - if req is None: - logger.warn('Could not determine repository location of %s' % location) - comments.append('## !! Could not determine repository location') - req = dist.as_requirement() - editable = False - else: - editable = False - req = dist.as_requirement() - specs = req.specs - assert len(specs) == 1 and specs[0][0] == '==' - version = specs[0][1] - ver_match = cls._rev_re.search(version) - date_match = cls._date_re.search(version) - if ver_match or date_match: - svn_backend = vcs.get_backend('svn') - if svn_backend: - svn_location = svn_backend( - ).get_location(dist, dependency_links) - if not svn_location: - logger.warn( - 'Warning: cannot find svn location for %s' % req) - comments.append('## FIXME: could not find svn URL in dependency_links for this package:') - else: - comments.append('# Installing as editable to satisfy requirement %s:' % req) - if ver_match: - rev = ver_match.group(1) - else: - rev = '{%s}' % date_match.group(1) - editable = True - req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist)) - return cls(dist.project_name, req, editable, comments) - - @staticmethod - def egg_name(dist): - name = dist.egg_name() - match = re.search(r'-py\d\.\d$', name) - if match: - name = name[:match.start()] - return name - - def __str__(self): - req = self.req - if self.editable: - req = '-e %s' % req - return '\n'.join(list(self.comments)+[str(req)])+'\n' - -############################################################ -## Requirement files - - -def call_subprocess(cmd, show_stdout=True, - filter_stdout=None, cwd=None, - raise_on_returncode=True, - command_level=logger.DEBUG, command_desc=None, - extra_environ=None): - if command_desc is None: - cmd_parts = [] - for part in cmd: - if ' ' in part or '\n' in part or '"' in part or "'" in part: - part = '"%s"' % part.replace('"', '\\"') - cmd_parts.append(part) - command_desc = ' '.join(cmd_parts) - if show_stdout: - stdout = None - else: - stdout = subprocess.PIPE - logger.log(command_level, "Running command %s" % command_desc) - env = os.environ.copy() - if extra_environ: - env.update(extra_environ) - try: - proc = subprocess.Popen( - cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, - cwd=cwd, env=env) - except Exception, e: - logger.fatal( - "Error %s while executing command %s" % (e, command_desc)) - raise - all_output = [] - if stdout is not None: - stdout = proc.stdout - while 1: - line = stdout.readline() - if not line: - break - line = line.rstrip() - all_output.append(line + '\n') - if filter_stdout: - level = filter_stdout(line) - if isinstance(level, tuple): - level, line = level - logger.log(level, line) - if not logger.stdout_level_matches(level): - logger.show_progress() - else: - logger.info(line) - else: - returned_stdout, returned_stderr = proc.communicate() - all_output = [returned_stdout or ''] - proc.wait() - if proc.returncode: - if raise_on_returncode: - if all_output: - logger.notify('Complete output from command %s:' % command_desc) - logger.notify('\n'.join(all_output) + '\n----------------------------------------') - raise InstallationError( - "Command %s failed with error code %s" - % (command_desc, proc.returncode)) - else: - logger.warn( - "Command %s had error code %s" - % (command_desc, proc.returncode)) - if stdout is not None: - return ''.join(all_output) - - -if __name__ == '__main__': - exit = main() - if exit: - sys.exit(exit) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py deleted file mode 100755 index f8fb8aa6..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py +++ /dev/null @@ -1,589 +0,0 @@ -"""Utilities to support packages.""" - -# NOTE: This module must remain compatible with Python 2.3, as it is shared -# by setuptools for distribution with Python 2.3 and up. - -import os -import sys -import imp -import os.path -from types import ModuleType - -__all__ = [ - 'get_importer', 'iter_importers', 'get_loader', 'find_loader', - 'walk_packages', 'iter_modules', - 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path', -] - - -def read_code(stream): - # This helper is needed in order for the PEP 302 emulation to - # correctly handle compiled files - import marshal - - magic = stream.read(4) - if magic != imp.get_magic(): - return None - - stream.read(4) # Skip timestamp - return marshal.load(stream) - - -def simplegeneric(func): - """Make a trivial single-dispatch generic function""" - registry = {} - - def wrapper(*args, **kw): - ob = args[0] - try: - cls = ob.__class__ - except AttributeError: - cls = type(ob) - try: - mro = cls.__mro__ - except AttributeError: - try: - - class cls(cls, object): - pass - - mro = cls.__mro__[1:] - except TypeError: - mro = object, # must be an ExtensionClass or some such :( - for t in mro: - if t in registry: - return registry[t](*args, **kw) - else: - return func(*args, **kw) - try: - wrapper.__name__ = func.__name__ - except (TypeError, AttributeError): - pass # Python 2.3 doesn't allow functions to be renamed - - def register(typ, func=None): - if func is None: - return lambda f: register(typ, f) - registry[typ] = func - return func - - wrapper.__dict__ = func.__dict__ - wrapper.__doc__ = func.__doc__ - wrapper.register = register - return wrapper - - -def walk_packages(path=None, prefix='', onerror=None): - """Yields (module_loader, name, ispkg) for all modules recursively - on path, or, if path is None, all accessible modules. - - 'path' should be either None or a list of paths to look for - modules in. - - 'prefix' is a string to output on the front of every module name - on output. - - Note that this function must import all *packages* (NOT all - modules!) on the given path, in order to access the __path__ - attribute to find submodules. - - 'onerror' is a function which gets called with one argument (the - name of the package which was being imported) if any exception - occurs while trying to import a package. If no onerror function is - supplied, ImportErrors are caught and ignored, while all other - exceptions are propagated, terminating the search. - - Examples: - - # list all modules python can access - walk_packages() - - # list all submodules of ctypes - walk_packages(ctypes.__path__, ctypes.__name__+'.') - """ - - def seen(p, m={}): - if p in m: - return True - m[p] = True - - for importer, name, ispkg in iter_modules(path, prefix): - yield importer, name, ispkg - - if ispkg: - try: - __import__(name) - except ImportError: - if onerror is not None: - onerror(name) - except Exception: - if onerror is not None: - onerror(name) - else: - raise - else: - path = getattr(sys.modules[name], '__path__', None) or [] - - # don't traverse path items we've seen before - path = [p for p in path if not seen(p)] - - for item in walk_packages(path, name+'.', onerror): - yield item - - -def iter_modules(path=None, prefix=''): - """Yields (module_loader, name, ispkg) for all submodules on path, - or, if path is None, all top-level modules on sys.path. - - 'path' should be either None or a list of paths to look for - modules in. - - 'prefix' is a string to output on the front of every module name - on output. - """ - - if path is None: - importers = iter_importers() - else: - importers = map(get_importer, path) - - yielded = {} - for i in importers: - for name, ispkg in iter_importer_modules(i, prefix): - if name not in yielded: - yielded[name] = 1 - yield i, name, ispkg - - -#@simplegeneric -def iter_importer_modules(importer, prefix=''): - if not hasattr(importer, 'iter_modules'): - return [] - return importer.iter_modules(prefix) - -iter_importer_modules = simplegeneric(iter_importer_modules) - - -class ImpImporter: - """PEP 302 Importer that wraps Python's "classic" import algorithm - - ImpImporter(dirname) produces a PEP 302 importer that searches that - directory. ImpImporter(None) produces a PEP 302 importer that searches - the current sys.path, plus any modules that are frozen or built-in. - - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ - - def __init__(self, path=None): - self.path = path - - def find_module(self, fullname, path=None): - # Note: we ignore 'path' argument since it is only used via meta_path - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [os.path.realpath(self.path)] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(fullname, file, filename, etc) - - def iter_modules(self, prefix=''): - if self.path is None or not os.path.isdir(self.path): - return - - yielded = {} - import inspect - - filenames = os.listdir(self.path) - filenames.sort() # handle packages before same-named modules - - for fn in filenames: - modname = inspect.getmodulename(fn) - if modname=='__init__' or modname in yielded: - continue - - path = os.path.join(self.path, fn) - ispkg = False - - if not modname and os.path.isdir(path) and '.' not in fn: - modname = fn - for fn in os.listdir(path): - subname = inspect.getmodulename(fn) - if subname=='__init__': - ispkg = True - break - else: - continue # not a package - - if modname and '.' not in modname: - yielded[modname] = 1 - yield prefix + modname, ispkg - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "classic" import algorithm - """ - code = source = None - - def __init__(self, fullname, file, filename, etc): - self.file = file - self.filename = filename - self.fullname = fullname - self.etc = etc - - def load_module(self, fullname): - self._reopen() - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: - self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - def get_data(self, pathname): - return open(pathname, "rb").read() - - def _reopen(self): - if self.file and self.file.closed: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self.file = open(self.filename, 'rU') - elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION): - self.file = open(self.filename, 'rb') - - def _fix_name(self, fullname): - if fullname is None: - fullname = self.fullname - elif fullname != self.fullname: - raise ImportError("Loader for module %s cannot handle " - "module %s" % (self.fullname, fullname)) - return fullname - - def is_package(self, fullname): - fullname = self._fix_name(fullname) - return self.etc[2]==imp.PKG_DIRECTORY - - def get_code(self, fullname=None): - fullname = self._fix_name(fullname) - if self.code is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - source = self.get_source(fullname) - self.code = compile(source, self.filename, 'exec') - elif mod_type==imp.PY_COMPILED: - self._reopen() - try: - self.code = read_code(self.file) - finally: - self.file.close() - elif mod_type==imp.PKG_DIRECTORY: - self.code = self._get_delegate().get_code() - return self.code - - def get_source(self, fullname=None): - fullname = self._fix_name(fullname) - if self.source is None: - mod_type = self.etc[2] - if mod_type==imp.PY_SOURCE: - self._reopen() - try: - self.source = self.file.read() - finally: - self.file.close() - elif mod_type==imp.PY_COMPILED: - if os.path.exists(self.filename[:-1]): - f = open(self.filename[:-1], 'rU') - self.source = f.read() - f.close() - elif mod_type==imp.PKG_DIRECTORY: - self.source = self._get_delegate().get_source() - return self.source - - def _get_delegate(self): - return ImpImporter(self.filename).find_module('__init__') - - def get_filename(self, fullname=None): - fullname = self._fix_name(fullname) - mod_type = self.etc[2] - if self.etc[2]==imp.PKG_DIRECTORY: - return self._get_delegate().get_filename() - elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): - return self.filename - return None - - -try: - import zipimport - from zipimport import zipimporter - - def iter_zipimport_modules(importer, prefix=''): - dirlist = zipimport._zip_directory_cache[importer.archive].keys() - dirlist.sort() - _prefix = importer.prefix - plen = len(_prefix) - yielded = {} - import inspect - for fn in dirlist: - if not fn.startswith(_prefix): - continue - - fn = fn[plen:].split(os.sep) - - if len(fn)==2 and fn[1].startswith('__init__.py'): - if fn[0] not in yielded: - yielded[fn[0]] = 1 - yield fn[0], True - - if len(fn)!=1: - continue - - modname = inspect.getmodulename(fn[0]) - if modname=='__init__': - continue - - if modname and '.' not in modname and modname not in yielded: - yielded[modname] = 1 - yield prefix + modname, False - - iter_importer_modules.register(zipimporter, iter_zipimport_modules) - -except ImportError: - pass - - -def get_importer(path_item): - """Retrieve a PEP 302 importer for the given path item - - The returned importer is cached in sys.path_importer_cache - if it was newly created by a path hook. - - If there is no importer, a wrapper around the basic import - machinery is returned. This wrapper is never inserted into - the importer cache (None is inserted instead). - - The cache (or part of it) can be cleared manually if a - rescan of sys.path_hooks is necessary. - """ - try: - importer = sys.path_importer_cache[path_item] - except KeyError: - for path_hook in sys.path_hooks: - try: - importer = path_hook(path_item) - break - except ImportError: - pass - else: - importer = None - sys.path_importer_cache.setdefault(path_item, importer) - - if importer is None: - try: - importer = ImpImporter(path_item) - except ImportError: - importer = None - return importer - - -def iter_importers(fullname=""): - """Yield PEP 302 importers for the given module name - - If fullname contains a '.', the importers will be for the package - containing fullname, otherwise they will be importers for sys.meta_path, - sys.path, and Python's "classic" import machinery, in that order. If - the named module is in a package, that package is imported as a side - effect of invoking this function. - - Non PEP 302 mechanisms (e.g. the Windows registry) used by the - standard import machinery to find files in alternative locations - are partially supported, but are searched AFTER sys.path. Normally, - these locations are searched BEFORE sys.path, preventing sys.path - entries from shadowing them. - - For this to cause a visible difference in behaviour, there must - be a module or package name that is accessible via both sys.path - and one of the non PEP 302 file system mechanisms. In this case, - the emulation will find the former version, while the builtin - import mechanism will find the latter. - - Items of the following types can be affected by this discrepancy: - imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY - """ - if fullname.startswith('.'): - raise ImportError("Relative module names not supported") - if '.' in fullname: - # Get the containing package's __path__ - pkg = '.'.join(fullname.split('.')[:-1]) - if pkg not in sys.modules: - __import__(pkg) - path = getattr(sys.modules[pkg], '__path__', None) or [] - else: - for importer in sys.meta_path: - yield importer - path = sys.path - for item in path: - yield get_importer(item) - if '.' not in fullname: - yield ImpImporter() - - -def get_loader(module_or_name): - """Get a PEP 302 "loader" object for module_or_name - - If the module or package is accessible via the normal import - mechanism, a wrapper around the relevant part of that machinery - is returned. Returns None if the module cannot be found or imported. - If the named module is not already imported, its containing package - (if any) is imported, in order to establish the package __path__. - - This function uses iter_importers(), and is thus subject to the same - limitations regarding platform-specific special import locations such - as the Windows registry. - """ - if module_or_name in sys.modules: - module_or_name = sys.modules[module_or_name] - if isinstance(module_or_name, ModuleType): - module = module_or_name - loader = getattr(module, '__loader__', None) - if loader is not None: - return loader - fullname = module.__name__ - else: - fullname = module_or_name - return find_loader(fullname) - - -def find_loader(fullname): - """Find a PEP 302 "loader" object for fullname - - If fullname contains dots, path must be the containing package's __path__. - Returns None if the module cannot be found or imported. This function uses - iter_importers(), and is thus subject to the same limitations regarding - platform-specific special import locations such as the Windows registry. - """ - for importer in iter_importers(fullname): - loader = importer.find_module(fullname) - if loader is not None: - return loader - - return None - - -def extend_path(path, name): - """Extend a package's path. - - Intended use is to place the following code in a package's __init__.py: - - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) - - This will add to the package's __path__ all subdirectories of - directories on sys.path named after the package. This is useful - if one wants to distribute different parts of a single logical - package as multiple directories. - - It also looks for *.pkg files beginning where * matches the name - argument. This feature is similar to *.pth files (see site.py), - except that it doesn't special-case lines starting with 'import'. - A *.pkg file is trusted at face value: apart from checking for - duplicates, all entries found in a *.pkg file are added to the - path, regardless of whether they are exist the filesystem. (This - is a feature.) - - If the input path is not a list (as is the case for frozen - packages) it is returned unchanged. The input path is not - modified; an extended copy is returned. Items are only appended - to the copy at the end. - - It is assumed that sys.path is a sequence. Items of sys.path that - are not (unicode or 8-bit) strings referring to existing - directories are ignored. Unicode items of sys.path that cause - errors when used as filenames may cause this function to raise an - exception (in line with os.path.isdir() behavior). - """ - - if not isinstance(path, list): - # This could happen e.g. when this is called from inside a - # frozen package. Return the path unchanged in that case. - return path - - pname = os.path.join(*name.split('.')) # Reconstitute as relative path - # Just in case os.extsep != '.' - sname = os.extsep.join(name.split('.')) - sname_pkg = sname + os.extsep + "pkg" - init_py = "__init__" + os.extsep + "py" - - path = path[:] # Start with a copy of the existing path - - for dir in sys.path: - if not isinstance(dir, basestring) or not os.path.isdir(dir): - continue - subdir = os.path.join(dir, pname) - # XXX This may still add duplicate entries to path on - # case-insensitive filesystems - initfile = os.path.join(subdir, init_py) - if subdir not in path and os.path.isfile(initfile): - path.append(subdir) - # XXX Is this the right thing for subpackages like zope.app? - # It looks for a file named "zope.app.pkg" - pkgfile = os.path.join(dir, sname_pkg) - if os.path.isfile(pkgfile): - try: - f = open(pkgfile) - except IOError, msg: - sys.stderr.write("Can't open %s: %s\n" % - (pkgfile, msg)) - else: - for line in f: - line = line.rstrip('\n') - if not line or line.startswith('#'): - continue - path.append(line) # Don't check for existence! - f.close() - - return path - - -def get_data(package, resource): - """Get a resource from a package. - - This is a wrapper round the PEP 302 loader get_data API. The package - argument should be the name of a package, in standard module format - (foo.bar). The resource argument should be in the form of a relative - filename, using '/' as the path separator. The parent directory name '..' - is not allowed, and nor is a rooted name (starting with a '/'). - - The function returns a binary string, which is the contents of the - specified resource. - - For packages located in the filesystem, which have already been imported, - this is the rough equivalent of - - d = os.path.dirname(sys.modules[package].__file__) - data = open(os.path.join(d, resource), 'rb').read() - - If the package cannot be located or loaded, or it uses a PEP 302 loader - which does not support get_data(), then None is returned. - """ - - loader = get_loader(package) - if loader is None or not hasattr(loader, 'get_data'): - return None - mod = sys.modules.get(package) or loader.load_module(package) - if mod is None or not hasattr(mod, '__file__'): - return None - - # Modify the resource name to be compatible with the loader.get_data - # signature - an os.path format "filename" starting with the dirname of - # the package's __file__ - parts = resource.split('/') - parts.insert(0, os.path.dirname(mod.__file__)) - resource_name = os.path.join(*parts) - return loader.get_data(resource_name) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py deleted file mode 100755 index e7c11f1d..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Stuff that isn't in some old versions of Python""" - -import sys -import os -import shutil - -__all__ = ['any', 'WindowsError', 'md5', 'copytree'] - -try: - WindowsError = WindowsError -except NameError: - WindowsError = None -try: - from hashlib import md5 -except ImportError: - import md5 as md5_module - md5 = md5_module.new - -try: - from pkgutil import walk_packages -except ImportError: - # let's fall back as long as we can - from _pkgutil import walk_packages - -try: - any = any -except NameError: - - def any(seq): - for item in seq: - if item: - return True - return False - - -def copytree(src, dst): - if sys.version_info < (2, 5): - before_last_dir = os.path.dirname(dst) - if not os.path.exists(before_last_dir): - os.makedirs(before_last_dir) - shutil.copytree(src, dst) - shutil.copymode(src, dst) - else: - shutil.copytree(src, dst) - - -def product(*args, **kwds): - # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy - # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 - pools = map(tuple, args) * kwds.get('repeat', 1) - result = [[]] - for pool in pools: - result = [x+[y] for x in result for y in pool] - for prod in result: - yield tuple(prod) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py deleted file mode 100755 index f450e839..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Base Command class, and related routines""" - -from cStringIO import StringIO -import getpass -import os -import socket -import sys -import traceback -import time -import urllib -import urllib2 - -from pip import commands -from pip.log import logger -from pip.baseparser import parser, ConfigOptionParser, UpdatingDefaultsHelpFormatter -from pip.download import urlopen -from pip.exceptions import BadCommand, InstallationError, UninstallationError -from pip.venv import restart_in_venv -from pip.backwardcompat import walk_packages - -__all__ = ['command_dict', 'Command', 'load_all_commands', - 'load_command', 'command_names'] - -command_dict = {} - -# for backwards compatibiliy -get_proxy = urlopen.get_proxy - - -class Command(object): - name = None - usage = None - hidden = False - - def __init__(self): - assert self.name - self.parser = ConfigOptionParser( - usage=self.usage, - prog='%s %s' % (sys.argv[0], self.name), - version=parser.version, - formatter=UpdatingDefaultsHelpFormatter(), - name=self.name) - for option in parser.option_list: - if not option.dest or option.dest == 'help': - # -h, --version, etc - continue - self.parser.add_option(option) - command_dict[self.name] = self - - def merge_options(self, initial_options, options): - # Make sure we have all global options carried over - for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv', - 'respect_venv', 'log_explicit_levels', 'log_file', - 'timeout', 'default_vcs', 'skip_requirements_regex', - 'no_input']: - setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr)) - options.quiet += initial_options.quiet - options.verbose += initial_options.verbose - - def setup_logging(self): - pass - - def main(self, complete_args, args, initial_options): - options, args = self.parser.parse_args(args) - self.merge_options(initial_options, options) - - level = 1 # Notify - level += options.verbose - level -= options.quiet - level = logger.level_for_integer(4-level) - complete_log = [] - logger.consumers.extend( - [(level, sys.stdout), - (logger.DEBUG, complete_log.append)]) - if options.log_explicit_levels: - logger.explicit_levels = True - - self.setup_logging() - - if options.require_venv and not options.venv: - # If a venv is required check if it can really be found - if not os.environ.get('VIRTUAL_ENV'): - logger.fatal('Could not find an activated virtualenv (required).') - sys.exit(3) - # Automatically install in currently activated venv if required - options.respect_venv = True - - if args and args[-1] == '___VENV_RESTART___': - ## FIXME: We don't do anything this this value yet: - args = args[:-2] - options.venv = None - else: - # If given the option to respect the activated environment - # check if no venv is given as a command line parameter - if options.respect_venv and os.environ.get('VIRTUAL_ENV'): - if options.venv and os.path.exists(options.venv): - # Make sure command line venv and environmental are the same - if (os.path.realpath(os.path.expanduser(options.venv)) != - os.path.realpath(os.environ.get('VIRTUAL_ENV'))): - logger.fatal("Given virtualenv (%s) doesn't match " - "currently activated virtualenv (%s)." - % (options.venv, os.environ.get('VIRTUAL_ENV'))) - sys.exit(3) - else: - options.venv = os.environ.get('VIRTUAL_ENV') - logger.info('Using already activated environment %s' % options.venv) - if options.venv: - logger.info('Running in environment %s' % options.venv) - site_packages=False - if options.site_packages: - site_packages=True - restart_in_venv(options.venv, options.venv_base, site_packages, - complete_args) - # restart_in_venv should actually never return, but for clarity... - return - - ## FIXME: not sure if this sure come before or after venv restart - if options.log: - log_fp = open_logfile(options.log, 'a') - logger.consumers.append((logger.DEBUG, log_fp)) - else: - log_fp = None - - socket.setdefaulttimeout(options.timeout or None) - - urlopen.setup(proxystr=options.proxy, prompting=not options.no_input) - - exit = 0 - try: - self.run(options, args) - except (InstallationError, UninstallationError), e: - logger.fatal(str(e)) - logger.info('Exception information:\n%s' % format_exc()) - exit = 1 - except BadCommand, e: - logger.fatal(str(e)) - logger.info('Exception information:\n%s' % format_exc()) - exit = 1 - except: - logger.fatal('Exception:\n%s' % format_exc()) - exit = 2 - - if log_fp is not None: - log_fp.close() - if exit: - log_fn = options.log_file - text = '\n'.join(complete_log) - logger.fatal('Storing complete log in %s' % log_fn) - log_fp = open_logfile(log_fn, 'w') - log_fp.write(text) - log_fp.close() - return exit - - - - -def format_exc(exc_info=None): - if exc_info is None: - exc_info = sys.exc_info() - out = StringIO() - traceback.print_exception(*exc_info, **dict(file=out)) - return out.getvalue() - - -def open_logfile(filename, mode='a'): - """Open the named log file in append mode. - - If the file already exists, a separator will also be printed to - the file to separate past activity from current activity. - """ - filename = os.path.expanduser(filename) - filename = os.path.abspath(filename) - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - exists = os.path.exists(filename) - - log_fp = open(filename, mode) - if exists: - print >> log_fp, '-'*60 - print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c')) - return log_fp - - -def load_command(name): - full_name = 'pip.commands.%s' % name - if full_name in sys.modules: - return - try: - __import__(full_name) - except ImportError: - pass - - -def load_all_commands(): - for name in command_names(): - load_command(name) - - -def command_names(): - names = set((pkg[1] for pkg in walk_packages(path=commands.__path__))) - return list(names) - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py deleted file mode 100755 index a8bd6ce4..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py +++ /dev/null @@ -1,231 +0,0 @@ -"""Base option parser setup""" - -import sys -import optparse -import pkg_resources -import ConfigParser -import os -from distutils.util import strtobool -from pip.locations import default_config_file, default_log_file - - -class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter): - """Custom help formatter for use in ConfigOptionParser that updates - the defaults before expanding them, allowing them to show up correctly - in the help listing""" - - def expand_default(self, option): - if self.parser is not None: - self.parser.update_defaults(self.parser.defaults) - return optparse.IndentedHelpFormatter.expand_default(self, option) - - -class ConfigOptionParser(optparse.OptionParser): - """Custom option parser which updates its defaults by by checking the - configuration files and environmental variables""" - - def __init__(self, *args, **kwargs): - self.config = ConfigParser.RawConfigParser() - self.name = kwargs.pop('name') - self.files = self.get_config_files() - self.config.read(self.files) - assert self.name - optparse.OptionParser.__init__(self, *args, **kwargs) - - def get_config_files(self): - config_file = os.environ.get('PIP_CONFIG_FILE', False) - if config_file and os.path.exists(config_file): - return [config_file] - return [default_config_file] - - def update_defaults(self, defaults): - """Updates the given defaults with values from the config files and - the environ. Does a little special handling for certain types of - options (lists).""" - # Then go and look for the other sources of configuration: - config = {} - # 1. config files - for section in ('global', self.name): - config.update(dict(self.get_config_section(section))) - # 2. environmental variables - config.update(dict(self.get_environ_vars())) - # Then set the options with those values - for key, val in config.iteritems(): - key = key.replace('_', '-') - if not key.startswith('--'): - key = '--%s' % key # only prefer long opts - option = self.get_option(key) - if option is not None: - # ignore empty values - if not val: - continue - # handle multiline configs - if option.action == 'append': - val = val.split() - else: - option.nargs = 1 - if option.action in ('store_true', 'store_false', 'count'): - val = strtobool(val) - try: - val = option.convert_value(key, val) - except optparse.OptionValueError, e: - print ("An error occured during configuration: %s" % e) - sys.exit(3) - defaults[option.dest] = val - return defaults - - def get_config_section(self, name): - """Get a section of a configuration""" - if self.config.has_section(name): - return self.config.items(name) - return [] - - def get_environ_vars(self, prefix='PIP_'): - """Returns a generator with all environmental vars with prefix PIP_""" - for key, val in os.environ.iteritems(): - if key.startswith(prefix): - yield (key.replace(prefix, '').lower(), val) - - def get_default_values(self): - """Overridding to make updating the defaults after instantiation of - the option parser possible, update_defaults() does the dirty work.""" - if not self.process_default_values: - # Old, pre-Optik 1.5 behaviour. - return optparse.Values(self.defaults) - - defaults = self.update_defaults(self.defaults.copy()) # ours - for option in self._get_all_options(): - default = defaults.get(option.dest) - if isinstance(default, basestring): - opt_str = option.get_opt_string() - defaults[option.dest] = option.check_value(opt_str, default) - return optparse.Values(defaults) - -try: - pip_dist = pkg_resources.get_distribution('pip') - version = '%s from %s (python %s)' % ( - pip_dist, pip_dist.location, sys.version[:3]) -except pkg_resources.DistributionNotFound: - # when running pip.py without installing - version=None - -parser = ConfigOptionParser( - usage='%prog COMMAND [OPTIONS]', - version=version, - add_help_option=False, - formatter=UpdatingDefaultsHelpFormatter(), - name='global') - -parser.add_option( - '-h', '--help', - dest='help', - action='store_true', - help='Show help') -parser.add_option( - '-E', '--environment', - dest='venv', - metavar='DIR', - help='virtualenv environment to run pip in (either give the ' - 'interpreter or the environment base directory)') -parser.add_option( - '-s', '--enable-site-packages', - dest='site_packages', - action='store_true', - help='Include site-packages in virtualenv if one is to be ' - 'created. Ignored if --environment is not used or ' - 'the virtualenv already exists.') -parser.add_option( - # Defines a default root directory for virtualenvs, relative - # virtualenvs names/paths are considered relative to it. - '--virtualenv-base', - dest='venv_base', - type='str', - default='', - help=optparse.SUPPRESS_HELP) -parser.add_option( - # Run only if inside a virtualenv, bail if not. - '--require-virtualenv', '--require-venv', - dest='require_venv', - action='store_true', - default=False, - help=optparse.SUPPRESS_HELP) -parser.add_option( - # Use automatically an activated virtualenv instead of installing - # globally. -E will be ignored if used. - '--respect-virtualenv', '--respect-venv', - dest='respect_venv', - action='store_true', - default=False, - help=optparse.SUPPRESS_HELP) - -parser.add_option( - '-v', '--verbose', - dest='verbose', - action='count', - default=0, - help='Give more output') -parser.add_option( - '-q', '--quiet', - dest='quiet', - action='count', - default=0, - help='Give less output') -parser.add_option( - '--log', - dest='log', - metavar='FILENAME', - help='Log file where a complete (maximum verbosity) record will be kept') -parser.add_option( - # Writes the log levels explicitely to the log' - '--log-explicit-levels', - dest='log_explicit_levels', - action='store_true', - default=False, - help=optparse.SUPPRESS_HELP) -parser.add_option( - # The default log file - '--local-log', '--log-file', - dest='log_file', - metavar='FILENAME', - default=default_log_file, - help=optparse.SUPPRESS_HELP) -parser.add_option( - # Don't ask for input - '--no-input', - dest='no_input', - action='store_true', - default=False, - help=optparse.SUPPRESS_HELP) - -parser.add_option( - '--proxy', - dest='proxy', - type='str', - default='', - help="Specify a proxy in the form user:passwd@proxy.server:port. " - "Note that the user:password@ is optional and required only if you " - "are behind an authenticated proxy. If you provide " - "user@proxy.server:port then you will be prompted for a password.") -parser.add_option( - '--timeout', '--default-timeout', - metavar='SECONDS', - dest='timeout', - type='float', - default=15, - help='Set the socket timeout (default %default seconds)') -parser.add_option( - # The default version control system for editables, e.g. 'svn' - '--default-vcs', - dest='default_vcs', - type='str', - default='', - help=optparse.SUPPRESS_HELP) -parser.add_option( - # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', - help=optparse.SUPPRESS_HELP) - -parser.disable_interspersed_args() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py deleted file mode 100755 index 792d6005..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py deleted file mode 100755 index fb0f7570..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py +++ /dev/null @@ -1,33 +0,0 @@ -from pip.locations import build_prefix, src_prefix -from pip.util import display_path, backup_dir -from pip.log import logger -from pip.exceptions import InstallationError -from pip.commands.install import InstallCommand - - -class BundleCommand(InstallCommand): - name = 'bundle' - usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...' - summary = 'Create pybundles (archives containing multiple packages)' - bundle = True - - def __init__(self): - super(BundleCommand, self).__init__() - - def run(self, options, args): - if not args: - raise InstallationError('You must give a bundle filename') - if not options.build_dir: - options.build_dir = backup_dir(build_prefix, '-bundle') - if not options.src_dir: - options.src_dir = backup_dir(src_prefix, '-bundle') - # We have to get everything when creating a bundle: - options.ignore_installed = True - logger.notify('Putting temporary build files in %s and source/develop files in %s' - % (display_path(options.build_dir), display_path(options.src_dir))) - self.bundle_filename = args.pop(0) - requirement_set = super(BundleCommand, self).run(options, args) - return requirement_set - - -BundleCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py deleted file mode 100755 index d003b9ae..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py +++ /dev/null @@ -1,60 +0,0 @@ -import sys -from pip.basecommand import Command - -BASE_COMPLETION = """ -# pip %(shell)s completion start%(script)s# pip %(shell)s completion end -""" - -COMPLETION_SCRIPTS = { - 'bash': """ -_pip_completion() -{ - COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ - COMP_CWORD=$COMP_CWORD \\ - PIP_AUTO_COMPLETE=1 $1 ) ) -} -complete -o default -F _pip_completion pip -""", 'zsh': """ -function _pip_completion { - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] ) ) -} -compctl -K _pip_completion pip -"""} - - -class CompletionCommand(Command): - name = 'completion' - summary = 'A helper command to be used for command completion' - hidden = True - - def __init__(self): - super(CompletionCommand, self).__init__() - self.parser.add_option( - '--bash', '-b', - action='store_const', - const='bash', - dest='shell', - help='Emit completion code for bash') - self.parser.add_option( - '--zsh', '-z', - action='store_const', - const='zsh', - dest='shell', - help='Emit completion code for zsh') - - def run(self, options, args): - """Prints the completion code of the given shell""" - shells = COMPLETION_SCRIPTS.keys() - shell_options = ['--'+shell for shell in sorted(shells)] - if options.shell in shells: - script = COMPLETION_SCRIPTS.get(options.shell, '') - print BASE_COMPLETION % {'script': script, 'shell': options.shell} - else: - sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options)) - -CompletionCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py deleted file mode 100755 index 01b5df93..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py +++ /dev/null @@ -1,109 +0,0 @@ -import re -import sys -import pkg_resources -import pip -from pip.req import InstallRequirement -from pip.log import logger -from pip.basecommand import Command -from pip.util import get_installed_distributions - - -class FreezeCommand(Command): - name = 'freeze' - usage = '%prog [OPTIONS]' - summary = 'Output all currently installed packages (exact versions) to stdout' - - def __init__(self): - super(FreezeCommand, self).__init__() - self.parser.add_option( - '-r', '--requirement', - dest='requirement', - action='store', - default=None, - metavar='FILENAME', - help='Use the given requirements file as a hint about how to generate the new frozen requirements') - self.parser.add_option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='URL', - help='URL for finding packages, which will be added to the frozen requirements file') - self.parser.add_option( - '-l', '--local', - dest='local', - action='store_true', - default=False, - help='If in a virtualenv, do not report globally-installed packages') - - def setup_logging(self): - logger.move_stdout_to_stderr() - - def run(self, options, args): - requirement = options.requirement - find_links = options.find_links or [] - local_only = options.local - ## FIXME: Obviously this should be settable: - find_tags = False - skip_match = None - - skip_regex = options.skip_requirements_regex - if skip_regex: - skip_match = re.compile(skip_regex) - - dependency_links = [] - - f = sys.stdout - - for dist in pkg_resources.working_set: - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) - for link in find_links: - if '#egg=' in link: - dependency_links.append(link) - for link in find_links: - f.write('-f %s\n' % link) - installations = {} - for dist in get_installed_distributions(local_only=local_only): - req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags) - installations[req.name] = req - if requirement: - req_f = open(requirement) - for line in req_f: - if not line.strip() or line.strip().startswith('#'): - f.write(line) - continue - if skip_match and skip_match.search(line): - f.write(line) - continue - elif line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip().lstrip('=') - line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs) - elif (line.startswith('-r') or line.startswith('--requirement') - or line.startswith('-Z') or line.startswith('--always-unzip') - or line.startswith('-f') or line.startswith('-i') - or line.startswith('--extra-index-url')): - f.write(line) - continue - else: - line_req = InstallRequirement.from_line(line) - if not line_req.name: - logger.notify("Skipping line because it's not clear what it would install: %s" - % line.strip()) - logger.notify(" (add #egg=PackageName to the URL to avoid this warning)") - continue - if line_req.name not in installations: - logger.warn("Requirement file contains %s, but that package is not installed" - % line.strip()) - continue - f.write(str(installations[line_req.name])) - del installations[line_req.name] - f.write('## The following requirements were added by pip --freeze:\n') - for installation in sorted(installations.values(), key=lambda x: x.name): - f.write(str(installation)) - - -FreezeCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py deleted file mode 100755 index b0b36611..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py +++ /dev/null @@ -1,32 +0,0 @@ -from pip.basecommand import Command, command_dict, load_all_commands -from pip.exceptions import InstallationError -from pip.baseparser import parser - - -class HelpCommand(Command): - name = 'help' - usage = '%prog' - summary = 'Show available commands' - - def run(self, options, args): - load_all_commands() - if args: - ## FIXME: handle errors better here - command = args[0] - if command not in command_dict: - raise InstallationError('No command with the name: %s' % command) - command = command_dict[command] - command.parser.print_help() - return - parser.print_help() - print - print 'Commands available:' - commands = list(set(command_dict.values())) - commands.sort(key=lambda x: x.name) - for command in commands: - if command.hidden: - continue - print ' %s: %s' % (command.name, command.summary) - - -HelpCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py deleted file mode 100755 index 861c332b..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py +++ /dev/null @@ -1,247 +0,0 @@ -import os, sys -from pip.req import InstallRequirement, RequirementSet -from pip.req import parse_requirements -from pip.log import logger -from pip.locations import build_prefix, src_prefix -from pip.basecommand import Command -from pip.index import PackageFinder -from pip.exceptions import InstallationError - - -class InstallCommand(Command): - name = 'install' - usage = '%prog [OPTIONS] PACKAGE_NAMES...' - summary = 'Install packages' - bundle = False - - def __init__(self): - super(InstallCommand, self).__init__() - self.parser.add_option( - '-e', '--editable', - dest='editables', - action='append', - default=[], - metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE', - help='Install a package directly from a checkout. Source will be checked ' - 'out into src/PACKAGE (lower-case) and installed in-place (using ' - 'setup.py develop). You can run this on an existing directory/checkout (like ' - 'pip install -e src/mycheckout). This option may be provided multiple times. ' - 'Possible values for VCS are: svn, git, hg and bzr.') - self.parser.add_option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='FILENAME', - help='Install all the packages listed in the given requirements file. ' - 'This option can be used multiple times.') - self.parser.add_option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='URL', - help='URL to look for packages at') - self.parser.add_option( - '-i', '--index-url', '--pypi-url', - dest='index_url', - metavar='URL', - default='http://pypi.python.org/simple/', - help='Base URL of Python Package Index (default %default)') - self.parser.add_option( - '--extra-index-url', - dest='extra_index_urls', - metavar='URL', - action='append', - default=[], - help='Extra URLs of package indexes to use in addition to --index-url') - self.parser.add_option( - '--no-index', - dest='no_index', - action='store_true', - default=False, - help='Ignore package index (only looking at --find-links URLs instead)') - self.parser.add_option( - '-M', '--use-mirrors', - dest='use_mirrors', - action='store_true', - default=False, - help='Use the PyPI mirrors as a fallback in case the main index is down.') - self.parser.add_option( - '--mirrors', - dest='mirrors', - metavar='URL', - action='append', - default=[], - help='Specific mirror URLs to query when --use-mirrors is used') - - self.parser.add_option( - '-b', '--build', '--build-dir', '--build-directory', - dest='build_dir', - metavar='DIR', - default=None, - help='Unpack packages into DIR (default %s) and build from there' % build_prefix) - self.parser.add_option( - '-d', '--download', '--download-dir', '--download-directory', - dest='download_dir', - metavar='DIR', - default=None, - help='Download packages into DIR instead of installing them') - self.parser.add_option( - '--download-cache', - dest='download_cache', - metavar='DIR', - default=None, - help='Cache downloaded packages in DIR') - self.parser.add_option( - '--src', '--source', '--source-dir', '--source-directory', - dest='src_dir', - metavar='DIR', - default=None, - help='Check out --editable packages into DIR (default %s)' % src_prefix) - - self.parser.add_option( - '-U', '--upgrade', - dest='upgrade', - action='store_true', - help='Upgrade all packages to the newest available version') - self.parser.add_option( - '-I', '--ignore-installed', - dest='ignore_installed', - action='store_true', - help='Ignore the installed packages (reinstalling instead)') - self.parser.add_option( - '--no-deps', '--no-dependencies', - dest='ignore_dependencies', - action='store_true', - default=False, - help='Ignore package dependencies') - self.parser.add_option( - '--no-install', - dest='no_install', - action='store_true', - help="Download and unpack all packages, but don't actually install them") - self.parser.add_option( - '--no-download', - dest='no_download', - action="store_true", - help="Don't download any packages, just install the ones already downloaded " - "(completes an install run with --no-install)") - - self.parser.add_option( - '--install-option', - dest='install_options', - action='append', - help="Extra arguments to be supplied to the setup.py install " - "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). " - "Use multiple --install-option options to pass multiple options to setup.py install. " - "If you are using an option with a directory path, be sure to use absolute path.") - - self.parser.add_option( - '--global-option', - dest='global_options', - action='append', - help="Extra global options to be supplied to the setup.py" - "call before the install command") - - self.parser.add_option( - '--user', - dest='use_user_site', - action='store_true', - help='Install to user-site') - - def _build_package_finder(self, options, index_urls): - """ - Create a package finder appropriate to this install command. - This method is meant to be overridden by subclasses, not - called directly. - """ - return PackageFinder(find_links=options.find_links, - index_urls=index_urls, - use_mirrors=options.use_mirrors, - mirrors=options.mirrors) - - def run(self, options, args): - if not options.build_dir: - options.build_dir = build_prefix - if not options.src_dir: - options.src_dir = src_prefix - if options.download_dir: - options.no_install = True - options.ignore_installed = True - options.build_dir = os.path.abspath(options.build_dir) - options.src_dir = os.path.abspath(options.src_dir) - install_options = options.install_options or [] - if options.use_user_site: - install_options.append('--user') - global_options = options.global_options or [] - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) - index_urls = [] - - finder = self._build_package_finder(options, index_urls) - - requirement_set = RequirementSet( - build_dir=options.build_dir, - src_dir=options.src_dir, - download_dir=options.download_dir, - download_cache=options.download_cache, - upgrade=options.upgrade, - ignore_installed=options.ignore_installed, - ignore_dependencies=options.ignore_dependencies) - for name in args: - requirement_set.add_requirement( - InstallRequirement.from_line(name, None)) - for name in options.editables: - requirement_set.add_requirement( - InstallRequirement.from_editable(name, default_vcs=options.default_vcs)) - for filename in options.requirements: - for req in parse_requirements(filename, finder=finder, options=options): - requirement_set.add_requirement(req) - - if not requirement_set.has_requirements: - if options.find_links: - raise InstallationError('You must give at least one ' - 'requirement to %s (maybe you meant "pip install %s"?)' - % (self.name, " ".join(options.find_links))) - raise InstallationError('You must give at least one requirement ' - 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) - - if (options.use_user_site and - sys.version_info < (2, 6)): - raise InstallationError('--user is only supported in Python version 2.6 and newer') - - import setuptools - if (options.use_user_site and - requirement_set.has_editables and - not getattr(setuptools, '_distribute', False)): - - raise InstallationError('--user --editable not supported with setuptools, use distribute') - - if not options.no_download: - requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) - else: - requirement_set.locate_files() - - if not options.no_install and not self.bundle: - requirement_set.install(install_options, global_options) - installed = ' '.join([req.name for req in - requirement_set.successfully_installed]) - if installed: - logger.notify('Successfully installed %s' % installed) - elif not self.bundle: - downloaded = ' '.join([req.name for req in - requirement_set.successfully_downloaded]) - if downloaded: - logger.notify('Successfully downloaded %s' % downloaded) - elif self.bundle: - requirement_set.create_bundle(self.bundle_filename) - logger.notify('Created bundle in %s' % self.bundle_filename) - # Clean up - if not options.no_install: - requirement_set.cleanup_files(bundle=self.bundle) - return requirement_set - - -InstallCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py deleted file mode 100755 index 73da58ac..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py +++ /dev/null @@ -1,116 +0,0 @@ -import sys -import xmlrpclib -import textwrap -import pkg_resources -import pip.download -from pip.basecommand import Command -from pip.util import get_terminal_size -from pip.log import logger -from distutils.version import StrictVersion, LooseVersion - - -class SearchCommand(Command): - name = 'search' - usage = '%prog QUERY' - summary = 'Search PyPI' - - def __init__(self): - super(SearchCommand, self).__init__() - self.parser.add_option( - '--index', - dest='index', - metavar='URL', - default='http://pypi.python.org/pypi', - help='Base URL of Python Package Index (default %default)') - - def run(self, options, args): - if not args: - logger.warn('ERROR: Missing required argument (search query).') - return - query = ' '.join(args) - index_url = options.index - - pypi_hits = self.search(query, index_url) - hits = transform_hits(pypi_hits) - - terminal_width = None - if sys.stdout.isatty(): - terminal_width = get_terminal_size()[0] - - print_results(hits, terminal_width=terminal_width) - - def search(self, query, index_url): - pypi = xmlrpclib.ServerProxy(index_url, pip.download.xmlrpclib_transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') - return hits - - -def transform_hits(hits): - """ - The list from pypi is really a list of versions. We want a list of - packages with the list of versions stored inline. This converts the - list from pypi into one we can use. - """ - packages = {} - for hit in hits: - name = hit['name'] - summary = hit['summary'] - version = hit['version'] - score = hit['_pypi_ordering'] - - if name not in packages.keys(): - packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score} - else: - packages[name]['versions'].append(version) - - # if this is the highest version, replace summary and score - if version == highest_version(packages[name]['versions']): - packages[name]['summary'] = summary - packages[name]['score'] = score - - # each record has a unique name now, so we will convert the dict into a list sorted by score - package_list = sorted(packages.values(), lambda x, y: cmp(y['score'], x['score'])) - return package_list - - -def print_results(hits, name_column_width=25, terminal_width=None): - installed_packages = [p.project_name for p in pkg_resources.working_set] - for hit in hits: - name = hit['name'] - summary = hit['summary'] or '' - if terminal_width is not None: - # wrap and indent summary to fit terminal - summary = textwrap.wrap(summary, terminal_width - name_column_width - 5) - summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) - line = '%s - %s' % (name.ljust(name_column_width), summary) - try: - logger.notify(line) - if name in installed_packages: - dist = pkg_resources.get_distribution(name) - logger.indent += 2 - try: - latest = highest_version(hit['versions']) - if dist.version == latest: - logger.notify('INSTALLED: %s (latest)' % dist.version) - else: - logger.notify('INSTALLED: %s' % dist.version) - logger.notify('LATEST: %s' % latest) - finally: - logger.indent -= 2 - except UnicodeEncodeError: - pass - - -def compare_versions(version1, version2): - try: - return cmp(StrictVersion(version1), StrictVersion(version2)) - # in case of abnormal version number, fall back to LooseVersion - except ValueError: - return cmp(LooseVersion(version1), LooseVersion(version2)) - - -def highest_version(versions): - return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions) - - -SearchCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py deleted file mode 100755 index 7effd844..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py +++ /dev/null @@ -1,42 +0,0 @@ -from pip.req import InstallRequirement, RequirementSet, parse_requirements -from pip.basecommand import Command -from pip.exceptions import InstallationError - -class UninstallCommand(Command): - name = 'uninstall' - usage = '%prog [OPTIONS] PACKAGE_NAMES ...' - summary = 'Uninstall packages' - - def __init__(self): - super(UninstallCommand, self).__init__() - self.parser.add_option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='FILENAME', - help='Uninstall all the packages listed in the given requirements file. ' - 'This option can be used multiple times.') - self.parser.add_option( - '-y', '--yes', - dest='yes', - action='store_true', - help="Don't ask for confirmation of uninstall deletions.") - - def run(self, options, args): - requirement_set = RequirementSet( - build_dir=None, - src_dir=None, - download_dir=None) - for name in args: - requirement_set.add_requirement( - InstallRequirement.from_line(name)) - for filename in options.requirements: - for req in parse_requirements(filename, options=options): - requirement_set.add_requirement(req) - if not requirement_set.has_requirements: - raise InstallationError('You must give at least one requirement ' - 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) - requirement_set.uninstall(auto_confirm=options.yes) - -UninstallCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py deleted file mode 100755 index f83e1820..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py +++ /dev/null @@ -1,9 +0,0 @@ -from pip.commands.zip import ZipCommand - - -class UnzipCommand(ZipCommand): - name = 'unzip' - summary = 'Unzip individual packages' - - -UnzipCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py deleted file mode 100755 index 346fc051..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py +++ /dev/null @@ -1,346 +0,0 @@ -import sys -import re -import fnmatch -import os -import shutil -import zipfile -from pip.util import display_path, backup_dir -from pip.log import logger -from pip.exceptions import InstallationError -from pip.basecommand import Command - - -class ZipCommand(Command): - name = 'zip' - usage = '%prog [OPTIONS] PACKAGE_NAMES...' - summary = 'Zip individual packages' - - def __init__(self): - super(ZipCommand, self).__init__() - if self.name == 'zip': - self.parser.add_option( - '--unzip', - action='store_true', - dest='unzip', - help='Unzip (rather than zip) a package') - else: - self.parser.add_option( - '--zip', - action='store_false', - dest='unzip', - default=True, - help='Zip (rather than unzip) a package') - self.parser.add_option( - '--no-pyc', - action='store_true', - dest='no_pyc', - help='Do not include .pyc files in zip files (useful on Google App Engine)') - self.parser.add_option( - '-l', '--list', - action='store_true', - dest='list', - help='List the packages available, and their zip status') - self.parser.add_option( - '--sort-files', - action='store_true', - dest='sort_files', - help='With --list, sort packages according to how many files they contain') - self.parser.add_option( - '--path', - action='append', - dest='paths', - help='Restrict operations to the given paths (may include wildcards)') - self.parser.add_option( - '-n', '--simulate', - action='store_true', - help='Do not actually perform the zip/unzip operation') - - def paths(self): - """All the entries of sys.path, possibly restricted by --path""" - if not self.select_paths: - return sys.path - result = [] - match_any = set() - for path in sys.path: - path = os.path.normcase(os.path.abspath(path)) - for match in self.select_paths: - match = os.path.normcase(os.path.abspath(match)) - if '*' in match: - if re.search(fnmatch.translate(match+'*'), path): - result.append(path) - match_any.add(match) - break - else: - if path.startswith(match): - result.append(path) - match_any.add(match) - break - else: - logger.debug("Skipping path %s because it doesn't match %s" - % (path, ', '.join(self.select_paths))) - for match in self.select_paths: - if match not in match_any and '*' not in match: - result.append(match) - logger.debug("Adding path %s because it doesn't match anything already on sys.path" - % match) - return result - - def run(self, options, args): - self.select_paths = options.paths - self.simulate = options.simulate - if options.list: - return self.list(options, args) - if not args: - raise InstallationError( - 'You must give at least one package to zip or unzip') - packages = [] - for arg in args: - module_name, filename = self.find_package(arg) - if options.unzip and os.path.isdir(filename): - raise InstallationError( - 'The module %s (in %s) is not a zip file; cannot be unzipped' - % (module_name, filename)) - elif not options.unzip and not os.path.isdir(filename): - raise InstallationError( - 'The module %s (in %s) is not a directory; cannot be zipped' - % (module_name, filename)) - packages.append((module_name, filename)) - last_status = None - for module_name, filename in packages: - if options.unzip: - last_status = self.unzip_package(module_name, filename) - else: - last_status = self.zip_package(module_name, filename, options.no_pyc) - return last_status - - def unzip_package(self, module_name, filename): - zip_filename = os.path.dirname(filename) - if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename): - raise InstallationError( - 'Module %s (in %s) isn\'t located in a zip file in %s' - % (module_name, filename, zip_filename)) - package_path = os.path.dirname(zip_filename) - if not package_path in self.paths(): - logger.warn( - 'Unpacking %s into %s, but %s is not on sys.path' - % (display_path(zip_filename), display_path(package_path), - display_path(package_path))) - logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename))) - if self.simulate: - logger.notify('Skipping remaining operations because of --simulate') - return - logger.indent += 2 - try: - ## FIXME: this should be undoable: - zip = zipfile.ZipFile(zip_filename) - to_save = [] - for name in zip.namelist(): - if name.startswith(module_name + os.path.sep): - content = zip.read(name) - dest = os.path.join(package_path, name) - if not os.path.exists(os.path.dirname(dest)): - os.makedirs(os.path.dirname(dest)) - if not content and dest.endswith(os.path.sep): - if not os.path.exists(dest): - os.makedirs(dest) - else: - f = open(dest, 'wb') - f.write(content) - f.close() - else: - to_save.append((name, zip.read(name))) - zip.close() - if not to_save: - logger.info('Removing now-empty zip file %s' % display_path(zip_filename)) - os.unlink(zip_filename) - self.remove_filename_from_pth(zip_filename) - else: - logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename))) - zip = zipfile.ZipFile(zip_filename, 'w') - for name, content in to_save: - zip.writestr(name, content) - zip.close() - finally: - logger.indent -= 2 - - def zip_package(self, module_name, filename, no_pyc): - orig_filename = filename - logger.notify('Zip %s (in %s)' % (module_name, display_path(filename))) - logger.indent += 2 - if filename.endswith('.egg'): - dest_filename = filename - else: - dest_filename = filename + '.zip' - try: - ## FIXME: I think this needs to be undoable: - if filename == dest_filename: - filename = backup_dir(orig_filename) - logger.notify('Moving %s aside to %s' % (orig_filename, filename)) - if not self.simulate: - shutil.move(orig_filename, filename) - try: - logger.info('Creating zip file in %s' % display_path(dest_filename)) - if not self.simulate: - zip = zipfile.ZipFile(dest_filename, 'w') - zip.writestr(module_name + '/', '') - for dirpath, dirnames, filenames in os.walk(filename): - if no_pyc: - filenames = [f for f in filenames - if not f.lower().endswith('.pyc')] - for fns, is_dir in [(dirnames, True), (filenames, False)]: - for fn in fns: - full = os.path.join(dirpath, fn) - dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn) - if is_dir: - zip.writestr(dest+'/', '') - else: - zip.write(full, dest) - zip.close() - logger.info('Removing old directory %s' % display_path(filename)) - if not self.simulate: - shutil.rmtree(filename) - except: - ## FIXME: need to do an undo here - raise - ## FIXME: should also be undone: - self.add_filename_to_pth(dest_filename) - finally: - logger.indent -= 2 - - def remove_filename_from_pth(self, filename): - for pth in self.pth_files(): - f = open(pth, 'r') - lines = f.readlines() - f.close() - new_lines = [ - l for l in lines if l.strip() != filename] - if lines != new_lines: - logger.info('Removing reference to %s from .pth file %s' - % (display_path(filename), display_path(pth))) - if not filter(None, new_lines): - logger.info('%s file would be empty: deleting' % display_path(pth)) - if not self.simulate: - os.unlink(pth) - else: - if not self.simulate: - f = open(pth, 'wb') - f.writelines(new_lines) - f.close() - return - logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename)) - - def add_filename_to_pth(self, filename): - path = os.path.dirname(filename) - dest = os.path.join(path, filename + '.pth') - if path not in self.paths(): - logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest)) - if not self.simulate: - if os.path.exists(dest): - f = open(dest) - lines = f.readlines() - f.close() - if lines and not lines[-1].endswith('\n'): - lines[-1] += '\n' - lines.append(filename+'\n') - else: - lines = [filename + '\n'] - f = open(dest, 'wb') - f.writelines(lines) - f.close() - - def pth_files(self): - for path in self.paths(): - if not os.path.exists(path) or not os.path.isdir(path): - continue - for filename in os.listdir(path): - if filename.endswith('.pth'): - yield os.path.join(path, filename) - - def find_package(self, package): - for path in self.paths(): - full = os.path.join(path, package) - if os.path.exists(full): - return package, full - if not os.path.isdir(path) and zipfile.is_zipfile(path): - zip = zipfile.ZipFile(path, 'r') - try: - zip.read(os.path.join(package, '__init__.py')) - except KeyError: - pass - else: - zip.close() - return package, full - zip.close() - ## FIXME: need special error for package.py case: - raise InstallationError( - 'No package with the name %s found' % package) - - def list(self, options, args): - if args: - raise InstallationError( - 'You cannot give an argument with --list') - for path in sorted(self.paths()): - if not os.path.exists(path): - continue - basename = os.path.basename(path.rstrip(os.path.sep)) - if os.path.isfile(path) and zipfile.is_zipfile(path): - if os.path.dirname(path) not in self.paths(): - logger.notify('Zipped egg: %s' % display_path(path)) - continue - if (basename != 'site-packages' and basename != 'dist-packages' - and not path.replace('\\', '/').endswith('lib/python')): - continue - logger.notify('In %s:' % display_path(path)) - logger.indent += 2 - zipped = [] - unzipped = [] - try: - for filename in sorted(os.listdir(path)): - ext = os.path.splitext(filename)[1].lower() - if ext in ('.pth', '.egg-info', '.egg-link'): - continue - if ext == '.py': - logger.info('Not displaying %s: not a package' % display_path(filename)) - continue - full = os.path.join(path, filename) - if os.path.isdir(full): - unzipped.append((filename, self.count_package(full))) - elif zipfile.is_zipfile(full): - zipped.append(filename) - else: - logger.info('Unknown file: %s' % display_path(filename)) - if zipped: - logger.notify('Zipped packages:') - logger.indent += 2 - try: - for filename in zipped: - logger.notify(filename) - finally: - logger.indent -= 2 - else: - logger.notify('No zipped packages.') - if unzipped: - if options.sort_files: - unzipped.sort(key=lambda x: -x[1]) - logger.notify('Unzipped packages:') - logger.indent += 2 - try: - for filename, count in unzipped: - logger.notify('%s (%i files)' % (filename, count)) - finally: - logger.indent -= 2 - else: - logger.notify('No unzipped packages.') - finally: - logger.indent -= 2 - - def count_package(self, path): - total = 0 - for dirpath, dirnames, filenames in os.walk(path): - filenames = [f for f in filenames - if not f.lower().endswith('.pyc')] - total += len(filenames) - return total - - -ZipCommand() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py deleted file mode 100755 index f1b63936..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py +++ /dev/null @@ -1,470 +0,0 @@ -import xmlrpclib -import re -import getpass -import urllib -import urllib2 -import urlparse -import os -import mimetypes -import shutil -import tempfile -from pip.backwardcompat import md5, copytree -from pip.exceptions import InstallationError -from pip.util import (splitext, - format_size, display_path, backup_dir, ask, - unpack_file, create_download_cache_folder, cache_download) -from pip.vcs import vcs -from pip.log import logger - - -__all__ = ['xmlrpclib_transport', 'get_file_content', 'urlopen', - 'is_url', 'url_to_path', 'path_to_url', 'path_to_url2', - 'geturl', 'is_archive_file', 'unpack_vcs_link', - 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url'] - - -xmlrpclib_transport = xmlrpclib.Transport() - - -def get_file_content(url, comes_from=None): - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content)""" - match = _scheme_re.search(url) - if match: - scheme = match.group(1).lower() - if (scheme == 'file' and comes_from - and comes_from.startswith('http')): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - if scheme == 'file': - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - else: - ## FIXME: catch some errors - resp = urlopen(url) - return geturl(resp), resp.read() - try: - f = open(url) - content = f.read() - except IOError, e: - raise InstallationError('Could not open requirements file: %s' % str(e)) - else: - f.close() - return url, content - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - -class URLOpener(object): - """ - pip's own URL helper that adds HTTP auth and proxy support - """ - def __init__(self): - self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm() - - def __call__(self, url): - """ - If the given url contains auth info or if a normal request gets a 401 - response, an attempt is made to fetch the resource using basic HTTP - auth. - - """ - url, username, password = self.extract_credentials(url) - if username is None: - try: - response = urllib2.urlopen(self.get_request(url)) - except urllib2.HTTPError, e: - if e.code != 401: - raise - response = self.get_response(url) - else: - response = self.get_response(url, username, password) - return response - - def get_request(self, url): - """ - Wraps the URL to retrieve to protects against "creative" - interpretation of the RFC: http://bugs.python.org/issue8732 - """ - if isinstance(url, basestring): - url = urllib2.Request(url, headers={'Accept-encoding': 'identity'}) - return url - - def get_response(self, url, username=None, password=None): - """ - does the dirty work of actually getting the rsponse object using urllib2 - and its HTTP auth builtins. - """ - scheme, netloc, path, query, frag = urlparse.urlsplit(url) - pass_url = urlparse.urlunsplit(('_none_', netloc, path, query, frag)).replace('_none_://', '', 1) - req = self.get_request(url) - - stored_username, stored_password = self.passman.find_user_password(None, netloc) - # see if we have a password stored - if stored_username is None: - if username is None and self.prompting: - username = urllib.quote(raw_input('User for %s: ' % netloc)) - password = urllib.quote(getpass.getpass('Password: ')) - if username and password: - self.passman.add_password(None, netloc, username, password) - stored_username, stored_password = self.passman.find_user_password(None, netloc) - authhandler = urllib2.HTTPBasicAuthHandler(self.passman) - opener = urllib2.build_opener(authhandler) - # FIXME: should catch a 401 and offer to let the user reenter credentials - return opener.open(req) - - def setup(self, proxystr='', prompting=True): - """ - Sets the proxy handler given the option passed on the command - line. If an empty string is passed it looks at the HTTP_PROXY - environment variable. - """ - self.prompting = prompting - proxy = self.get_proxy(proxystr) - if proxy: - proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy}) - opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler) - urllib2.install_opener(opener) - - def parse_credentials(self, netloc): - if "@" in netloc: - userinfo = netloc.rsplit("@", 1)[0] - if ":" in userinfo: - return userinfo.split(":", 1) - return userinfo, None - return None, None - - def extract_credentials(self, url): - """ - Extracts user/password from a url. - - Returns a tuple: - (url-without-auth, username, password) - """ - if isinstance(url, urllib2.Request): - result = urlparse.urlsplit(url.get_full_url()) - else: - result = urlparse.urlsplit(url) - scheme, netloc, path, query, frag = result - - username, password = self.parse_credentials(netloc) - if username is None: - return url, None, None - elif password is None and self.prompting: - # remove the auth credentials from the url part - netloc = netloc.replace('%s@' % username, '', 1) - # prompt for the password - prompt = 'Password for %s@%s: ' % (username, netloc) - password = urllib.quote(getpass.getpass(prompt)) - else: - # remove the auth credentials from the url part - netloc = netloc.replace('%s:%s@' % (username, password), '', 1) - - target_url = urlparse.urlunsplit((scheme, netloc, path, query, frag)) - return target_url, username, password - - def get_proxy(self, proxystr=''): - """ - Get the proxy given the option passed on the command line. - If an empty string is passed it looks at the HTTP_PROXY - environment variable. - """ - if not proxystr: - proxystr = os.environ.get('HTTP_PROXY', '') - if proxystr: - if '@' in proxystr: - user_password, server_port = proxystr.split('@', 1) - if ':' in user_password: - user, password = user_password.split(':', 1) - else: - user = user_password - prompt = 'Password for %s@%s: ' % (user, server_port) - password = urllib.quote(getpass.getpass(prompt)) - return '%s:%s@%s' % (user, password, server_port) - else: - return proxystr - else: - return None - -urlopen = URLOpener() - - -def is_url(name): - """Returns true if the name looks like a URL""" - if ':' not in name: - return False - scheme = name.split(':', 1)[0].lower() - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes - - -def url_to_path(url): - """ - Convert a file: URL to a path. - """ - assert url.startswith('file:'), ( - "You can only turn file: urls into filenames (not %r)" % url) - path = url[len('file:'):].lstrip('/') - path = urllib.unquote(path) - if _url_drive_re.match(path): - path = path[0] + ':' + path[2:] - else: - path = '/' + path - return path - - -_drive_re = re.compile('^([a-z]):', re.I) -_url_drive_re = re.compile('^([a-z])[:|]', re.I) - - -def path_to_url(path): - """ - Convert a path to a file: URL. The path will be made absolute. - """ - path = os.path.normcase(os.path.abspath(path)) - if _drive_re.match(path): - path = path[0] + '|' + path[2:] - url = urllib.quote(path) - url = url.replace(os.path.sep, '/') - url = url.lstrip('/') - return 'file:///' + url - - -def path_to_url2(path): - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - drive, path = os.path.splitdrive(path) - filepath = path.split(os.path.sep) - url = '/'.join([urllib.quote(part) for part in filepath]) - if not drive: - url = url.lstrip('/') - return 'file:///' + drive + url - - -def geturl(urllib2_resp): - """ - Use instead of urllib.addinfourl.geturl(), which appears to have - some issues with dropping the double slash for certain schemes - (e.g. file://). This implementation is probably over-eager, as it - always restores '://' if it is missing, and it appears some url - schemata aren't always followed by '//' after the colon, but as - far as I know pip doesn't need any of those. - The URI RFC can be found at: http://tools.ietf.org/html/rfc1630 - - This function assumes that - scheme:/foo/bar - is the same as - scheme:///foo/bar - """ - url = urllib2_resp.geturl() - scheme, rest = url.split(':', 1) - if rest.startswith('//'): - return url - else: - # FIXME: write a good test to cover it - return '%s://%s' % (scheme, rest) - - -def is_archive_file(name): - """Return True if `name` is a considered as an archive file.""" - archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle') - ext = splitext(name)[1].lower() - if ext in archives: - return True - return False - - -def unpack_vcs_link(link, location, only_download=False): - vcs_backend = _get_used_vcs_backend(link) - if only_download: - vcs_backend.export(location) - else: - vcs_backend.unpack(location) - - -def unpack_file_url(link, location): - source = url_to_path(link.url) - content_type = mimetypes.guess_type(source)[0] - if os.path.isdir(source): - # delete the location since shutil will create it again :( - if os.path.isdir(location): - shutil.rmtree(location) - copytree(source, location) - else: - unpack_file(source, location, content_type, link) - - -def _get_used_vcs_backend(link): - for backend in vcs.backends: - if link.scheme in backend.schemes: - vcs_backend = backend(link.url) - return vcs_backend - - -def is_vcs_url(link): - return bool(_get_used_vcs_backend(link)) - - -def is_file_url(link): - return link.url.lower().startswith('file:') - - -def _check_md5(download_hash, link): - download_hash = download_hash.hexdigest() - if download_hash != link.md5_hash: - logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!" - % (link, download_hash, link.md5_hash)) - raise InstallationError('Bad MD5 hash for package %s' % link) - - -def _get_md5_from_file(target_file, link): - download_hash = md5() - fp = open(target_file, 'rb') - while 1: - chunk = fp.read(4096) - if not chunk: - break - download_hash.update(chunk) - fp.close() - return download_hash - - -def _download_url(resp, link, temp_location): - fp = open(temp_location, 'wb') - download_hash = None - if link.md5_hash: - download_hash = md5() - try: - total_length = int(resp.info()['content-length']) - except (ValueError, KeyError): - total_length = 0 - downloaded = 0 - show_progress = total_length > 40*1000 or not total_length - show_url = link.show_url - try: - if show_progress: - ## FIXME: the URL can get really long in this message: - if total_length: - logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) - else: - logger.start_progress('Downloading %s (unknown size): ' % show_url) - else: - logger.notify('Downloading %s' % show_url) - logger.debug('Downloading from URL %s' % link) - - while 1: - chunk = resp.read(4096) - if not chunk: - break - downloaded += len(chunk) - if show_progress: - if not total_length: - logger.show_progress('%s' % format_size(downloaded)) - else: - logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded))) - if link.md5_hash: - download_hash.update(chunk) - fp.write(chunk) - fp.close() - finally: - if show_progress: - logger.end_progress('%s downloaded' % format_size(downloaded)) - return download_hash - - -def _copy_file(filename, location, content_type, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup ' - % display_path(download_location), ('i', 'w', 'b')) - if response == 'i': - copy = False - elif response == 'w': - logger.warn('Deleting %s' % display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warn('Backing up %s to %s' - % (display_path(download_location), display_path(dest_file))) - shutil.move(download_location, dest_file) - if copy: - shutil.copy(filename, download_location) - logger.indent -= 2 - logger.notify('Saved %s' % display_path(download_location)) - - -def unpack_http_url(link, location, download_cache, only_download): - temp_dir = tempfile.mkdtemp('-unpack', 'pip-') - target_url = link.url.split('#', 1)[0] - target_file = None - download_hash = None - if download_cache: - target_file = os.path.join(download_cache, - urllib.quote(target_url, '')) - if not os.path.isdir(download_cache): - create_download_cache_folder(download_cache) - if (target_file - and os.path.exists(target_file) - and os.path.exists(target_file+'.content-type')): - fp = open(target_file+'.content-type') - content_type = fp.read().strip() - fp.close() - if link.md5_hash: - download_hash = _get_md5_from_file(target_file, link) - temp_location = target_file - logger.notify('Using download cache from %s' % target_file) - else: - resp = _get_response_from_url(target_url, link) - content_type = resp.info()['content-type'] - filename = link.filename - ext = splitext(filename)[1] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != geturl(resp): - ext = os.path.splitext(geturl(resp))[1] - if ext: - filename += ext - temp_location = os.path.join(temp_dir, filename) - download_hash = _download_url(resp, link, temp_location) - if link.md5_hash: - _check_md5(download_hash, link) - if only_download: - _copy_file(temp_location, location, content_type, link) - else: - unpack_file(temp_location, location, content_type, link) - if target_file and target_file != temp_location: - cache_download(target_file, temp_location, content_type) - if target_file is None: - os.unlink(temp_location) - os.rmdir(temp_dir) - - -def _get_response_from_url(target_url, link): - try: - resp = urlopen(target_url) - except urllib2.HTTPError, e: - logger.fatal("HTTP error %s while getting %s" % (e.code, link)) - raise - except IOError, e: - # Typically an FTP error - logger.fatal("Error %s while getting %s" % (e, link)) - raise - return resp - -class Urllib2HeadRequest(urllib2.Request): - def get_method(self): - return "HEAD" diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py deleted file mode 100755 index 1ad1a616..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Exceptions used throughout package""" - - -class InstallationError(Exception): - """General exception during installation""" - - -class UninstallationError(Exception): - """General exception during uninstallation""" - - -class DistributionNotFound(InstallationError): - """Raised when a distribution cannot be found to satisfy a requirement""" - - -class BadCommand(Exception): - """Raised when virtualenv or a command is not found""" diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py deleted file mode 100755 index e42d8c86..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py +++ /dev/null @@ -1,686 +0,0 @@ -"""Routines related to PyPI, indexes""" - -import sys -import os -import re -import mimetypes -import threading -import posixpath -import pkg_resources -import urllib -import urllib2 -import urlparse -import httplib -import random -import socket -import string -from Queue import Queue -from Queue import Empty as QueueEmpty -from pip.log import logger -from pip.util import Inf -from pip.util import normalize_name, splitext -from pip.exceptions import DistributionNotFound -from pip.backwardcompat import WindowsError, product -from pip.download import urlopen, path_to_url2, url_to_path, geturl, Urllib2HeadRequest - -__all__ = ['PackageFinder'] - - -DEFAULT_MIRROR_URL = "last.pypi.python.org" - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links - """ - - def __init__(self, find_links, index_urls, - use_mirrors=False, mirrors=None, main_mirror_url=None): - self.find_links = find_links - self.index_urls = index_urls - self.dependency_links = [] - self.cache = PageCache() - # These are boring links that have already been logged somehow: - self.logged_links = set() - if use_mirrors: - self.mirror_urls = self._get_mirror_urls(mirrors, main_mirror_url) - logger.info('Using PyPI mirrors: %s' % ', '.join(self.mirror_urls)) - else: - self.mirror_urls = [] - - def add_dependency_links(self, links): - ## FIXME: this shouldn't be global list this, it should only - ## apply to requirements of the package that specifies the - ## dependency_links value - ## FIXME: also, we should track comes_from (i.e., use Link) - self.dependency_links.extend(links) - - @staticmethod - def _sort_locations(locations): - """ - Sort locations into "files" (archives) and "urls", and return - a pair of lists (files,urls) - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate - # list - def sort_path(path): - url = path_to_url2(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - if url.startswith('file:'): - path = url_to_path(url) - if os.path.isdir(path): - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif os.path.isfile(path): - sort_path(path) - else: - urls.append(url) - return files, urls - - def find_requirement(self, req, upgrade): - url_name = req.url_name - # Only check main index if index URL is given: - main_index_url = None - if self.index_urls: - # Check that we have the url_name correctly spelled: - main_index_url = Link(posixpath.join(self.index_urls[0], url_name)) - # This will also cache the page, so it's okay that we get it again later: - page = self._get_page(main_index_url, req) - if page is None: - url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name - - # Combine index URLs with mirror URLs here to allow - # adding more index URLs from requirements files - all_index_urls = self.index_urls + self.mirror_urls - - def mkurl_pypi_url(url): - loc = posixpath.join(url, url_name) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's behavior. - if not loc.endswith('/'): - loc = loc + '/' - return loc - if url_name is not None: - locations = [ - mkurl_pypi_url(url) - for url in all_index_urls] + self.find_links - else: - locations = list(self.find_links) - locations.extend(self.dependency_links) - for version in req.absolute_versions: - if url_name is not None and main_index_url is not None: - locations = [ - posixpath.join(main_index_url.url, version)] + locations - - file_locations, url_locations = self._sort_locations(locations) - - locations = [Link(url) for url in url_locations] - logger.debug('URLs to search for versions for %s:' % req) - for location in locations: - logger.debug('* %s' % location) - found_versions = [] - found_versions.extend( - self._package_versions( - [Link(url, '-f') for url in self.find_links], req.name.lower())) - page_versions = [] - for page in self._get_pages(locations, req): - logger.debug('Analyzing links from page %s' % page.url) - logger.indent += 2 - try: - page_versions.extend(self._package_versions(page.links, req.name.lower())) - finally: - logger.indent -= 2 - dependency_versions = list(self._package_versions( - [Link(url) for url in self.dependency_links], req.name.lower())) - if dependency_versions: - logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions])) - file_versions = list(self._package_versions( - [Link(url) for url in file_locations], req.name.lower())) - if not found_versions and not page_versions and not dependency_versions and not file_versions: - logger.fatal('Could not find any downloads that satisfy the requirement %s' % req) - raise DistributionNotFound('No distributions at all found for %s' % req) - if req.satisfied_by is not None: - found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version)) - if file_versions: - file_versions.sort(reverse=True) - logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions])) - found_versions = file_versions + found_versions - all_versions = found_versions + page_versions + dependency_versions - applicable_versions = [] - for (parsed_version, link, version) in all_versions: - if version not in req.req: - logger.info("Ignoring link %s, version %s doesn't match %s" - % (link, version, ','.join([''.join(s) for s in req.req.specs]))) - continue - applicable_versions.append((link, version)) - applicable_versions = sorted(applicable_versions, key=lambda v: pkg_resources.parse_version(v[1]), reverse=True) - existing_applicable = bool([link for link, version in applicable_versions if link is Inf]) - if not upgrade and existing_applicable: - if applicable_versions[0][1] is Inf: - logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement' - % req.satisfied_by.version) - else: - logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)' - % (req.satisfied_by.version, applicable_versions[0][1])) - return None - if not applicable_versions: - logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)' - % (req, ', '.join([version for parsed_version, link, version in found_versions]))) - raise DistributionNotFound('No distributions matching the version for %s' % req) - if applicable_versions[0][0] is Inf: - # We have an existing version, and its the best version - logger.info('Installed version (%s) is most up-to-date (past versions: %s)' - % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none')) - return None - if len(applicable_versions) > 1: - logger.info('Using version %s (newest of versions: %s)' % - (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions]))) - return applicable_versions[0][0] - - def _find_url_name(self, index_url, url_name, req): - """Finds the true URL name of a package, when the given name isn't quite correct. - This is usually used to implement case-insensitivity.""" - if not index_url.url.endswith('/'): - # Vaguely part of the PyPI API... weird but true. - ## FIXME: bad to modify this? - index_url.url += '/' - page = self._get_page(index_url, req) - if page is None: - logger.fatal('Cannot fetch index base URL %s' % index_url) - return - norm_name = normalize_name(req.url_name) - for link in page.links: - base = posixpath.basename(link.path.rstrip('/')) - if norm_name == normalize_name(base): - logger.notify('Real name of requirement %s is %s' % (url_name, base)) - return base - return None - - def _get_pages(self, locations, req): - """Yields (page, page_url) from the given locations, skipping - locations that have errors, and adding download/homepage links""" - pending_queue = Queue() - for location in locations: - pending_queue.put(location) - done = [] - seen = set() - threads = [] - for i in range(min(10, len(locations))): - t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen)) - t.setDaemon(True) - threads.append(t) - t.start() - for t in threads: - t.join() - return done - - _log_lock = threading.Lock() - - def _get_queued_page(self, req, pending_queue, done, seen): - while 1: - try: - location = pending_queue.get(False) - except QueueEmpty: - return - if location in seen: - continue - seen.add(location) - page = self._get_page(location, req) - if page is None: - continue - done.append(page) - for link in page.rel_links(): - pending_queue.put(link) - - _egg_fragment_re = re.compile(r'#egg=([^&]*)') - _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I) - _py_version_re = re.compile(r'-py([123]\.[0-9])$') - - def _sort_links(self, links): - "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates" - eggs, no_eggs = [], [] - seen = set() - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _package_versions(self, links, search_name): - for link in self._sort_links(links): - for v in self._link_package_versions(link, search_name): - yield v - - def _link_package_versions(self, link, search_name): - """ - Return an iterable of triples (pkg_resources_version_key, - link, python_version) that can be extracted from the given - link. - - Meant to be overridden by subclasses, not called by clients. - """ - if link.egg_fragment: - egg_info = link.egg_fragment - else: - egg_info, ext = link.splitext() - if not ext: - if link not in self.logged_links: - logger.debug('Skipping link %s; not a file' % link) - self.logged_links.add(link) - return [] - if egg_info.endswith('.tar'): - # Special double-extension case: - egg_info = egg_info[:-4] - ext = '.tar' + ext - if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'): - if link not in self.logged_links: - logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext)) - self.logged_links.add(link) - return [] - version = self._egg_info_matches(egg_info, search_name, link) - if version is None: - logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) - return [] - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != sys.version[:3]: - logger.debug('Skipping %s because Python version is incorrect' % link) - return [] - logger.debug('Found link %s, version: %s' % (link, version)) - return [(pkg_resources.parse_version(version), - link, - version)] - - def _egg_info_matches(self, egg_info, search_name, link): - match = self._egg_info_re.search(egg_info) - if not match: - logger.debug('Could not parse version from link: %s' % link) - return None - name = match.group(0).lower() - # To match the "safe" name that pkg_resources creates: - name = name.replace('_', '-') - if name.startswith(search_name.lower()): - return match.group(0)[len(search_name):].lstrip('-') - else: - return None - - def _get_page(self, link, req): - return HTMLPage.get_page(link, req, cache=self.cache) - - def _get_mirror_urls(self, mirrors=None, main_mirror_url=None): - """Retrieves a list of URLs from the main mirror DNS entry - unless a list of mirror URLs are passed. - """ - if not mirrors: - mirrors = get_mirrors(main_mirror_url) - # Should this be made "less random"? E.g. netselect like? - random.shuffle(mirrors) - - mirror_urls = set() - for mirror_url in mirrors: - # Make sure we have a valid URL - if not ("http://" or "https://" or "file://") in mirror_url: - mirror_url = "http://%s" % mirror_url - if not mirror_url.endswith("/simple"): - mirror_url = "%s/simple/" % mirror_url - mirror_urls.add(mirror_url) - - return list(mirror_urls) - - -class PageCache(object): - """Cache of HTML pages""" - - failure_limit = 3 - - def __init__(self): - self._failures = {} - self._pages = {} - self._archives = {} - - def too_many_failures(self, url): - return self._failures.get(url, 0) >= self.failure_limit - - def get_page(self, url): - return self._pages.get(url) - - def is_archive(self, url): - return self._archives.get(url, False) - - def set_is_archive(self, url, value=True): - self._archives[url] = value - - def add_page_failure(self, url, level): - self._failures[url] = self._failures.get(url, 0)+level - - def add_page(self, urls, page): - for url in urls: - self._pages[url] = page - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - ## FIXME: these regexes are horrible hacks: - _homepage_re = re.compile(r'\s*home\s*page', re.I) - _download_re = re.compile(r'\s*download\s+url', re.I) - ## These aren't so aweful: - _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I) - _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S) - _base_re = re.compile(r"""]+)""", re.I) - - def __init__(self, content, url, headers=None): - self.content = content - self.url = url - self.headers = headers - - def __str__(self): - return self.url - - @classmethod - def get_page(cls, link, req, cache=None, skip_archives=True): - url = link.url - url = url.split('#', 1)[0] - if cache.too_many_failures(url): - return None - - # Check for VCS schemes that do not support lookup as web pages. - from pip.vcs import VcsSupport - for scheme in VcsSupport.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals()) - return None - - if cache is not None: - inst = cache.get_page(url) - if inst is not None: - return inst - try: - if skip_archives: - if cache is not None: - if cache.is_archive(url): - return None - filename = link.filename - for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']: - if filename.endswith(bad_ext): - content_type = cls._get_content_type(url) - if content_type.lower().startswith('text/html'): - break - else: - logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type)) - if cache is not None: - cache.set_is_archive(url) - return None - logger.debug('Getting page %s' % url) - - # Tack index.html onto file:// URLs that point to directories - (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url) - if scheme == 'file' and os.path.isdir(urllib.url2pathname(path)): - # add trailing slash if not present so urljoin doesn't trim final segment - if not url.endswith('/'): - url += '/' - url = urlparse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s' % url) - - resp = urlopen(url) - - real_url = geturl(resp) - headers = resp.info() - inst = cls(resp.read(), real_url, headers) - except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error, OSError, WindowsError), e: - desc = str(e) - if isinstance(e, socket.timeout): - log_meth = logger.info - level =1 - desc = 'timed out' - elif isinstance(e, urllib2.URLError): - log_meth = logger.info - if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout): - desc = 'timed out' - level = 1 - else: - level = 2 - elif isinstance(e, urllib2.HTTPError) and e.code == 404: - ## FIXME: notify? - log_meth = logger.info - level = 2 - else: - log_meth = logger.info - level = 1 - log_meth('Could not fetch URL %s: %s' % (link, desc)) - log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req)) - if cache is not None: - cache.add_page_failure(url, level) - return None - if cache is not None: - cache.add_page([url, real_url], inst) - return inst - - @staticmethod - def _get_content_type(url): - """Get the Content-Type of the given url, using a HEAD request""" - scheme, netloc, path, query, fragment = urlparse.urlsplit(url) - if not scheme in ('http', 'https', 'ftp', 'ftps'): - ## FIXME: some warning or something? - ## assertion error? - return '' - req = Urllib2HeadRequest(url, headers={'Host': netloc}) - resp = urlopen(req) - try: - if hasattr(resp, 'code') and resp.code != 200 and scheme not in ('ftp', 'ftps'): - ## FIXME: doesn't handle redirects - return '' - return resp.info().get('content-type', '') - finally: - resp.close() - - @property - def base_url(self): - if not hasattr(self, "_base_url"): - match = self._base_re.search(self.content) - if match: - self._base_url = match.group(1) - else: - self._base_url = self.url - return self._base_url - - @property - def links(self): - """Yields all links in the page""" - for match in self._href_re.finditer(self.content): - url = match.group(1) or match.group(2) or match.group(3) - url = self.clean_link(urlparse.urljoin(self.base_url, url)) - yield Link(url, self) - - def rel_links(self): - for url in self.explicit_rel_links(): - yield url - for url in self.scraped_rel_links(): - yield url - - def explicit_rel_links(self, rels=('homepage', 'download')): - """Yields all links with the given relations""" - for match in self._rel_re.finditer(self.content): - found_rels = match.group(1).lower().split() - for rel in rels: - if rel in found_rels: - break - else: - continue - match = self._href_re.search(match.group(0)) - if not match: - continue - url = match.group(1) or match.group(2) or match.group(3) - url = self.clean_link(urlparse.urljoin(self.base_url, url)) - yield Link(url, self) - - def scraped_rel_links(self): - for regex in (self._homepage_re, self._download_re): - match = regex.search(self.content) - if not match: - continue - href_match = self._href_re.search(self.content, pos=match.end()) - if not href_match: - continue - url = match.group(1) or match.group(2) or match.group(3) - if not url: - continue - url = self.clean_link(urlparse.urljoin(self.base_url, url)) - yield Link(url, self) - - _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) - - def clean_link(self, url): - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - return self._clean_re.sub( - lambda match: '%%%2x' % ord(match.group(0)), url) - - -class Link(object): - - def __init__(self, url, comes_from=None): - self.url = url - self.comes_from = comes_from - - def __str__(self): - if self.comes_from: - return '%s (from %s)' % (self.url, self.comes_from) - else: - return self.url - - def __repr__(self): - return '' % self - - def __eq__(self, other): - return self.url == other.url - - def __hash__(self): - return hash(self.url) - - @property - def filename(self): - url = self.url - url = url.split('#', 1)[0] - url = url.split('?', 1)[0] - url = url.rstrip('/') - name = posixpath.basename(url) - assert name, ( - 'URL %r produced no filename' % url) - return name - - @property - def scheme(self): - return urlparse.urlsplit(self.url)[0] - - @property - def path(self): - return urlparse.urlsplit(self.url)[2] - - def splitext(self): - return splitext(posixpath.basename(self.path.rstrip('/'))) - - _egg_fragment_re = re.compile(r'#egg=([^&]*)') - - @property - def egg_fragment(self): - match = self._egg_fragment_re.search(self.url) - if not match: - return None - return match.group(1) - - _md5_re = re.compile(r'md5=([a-f0-9]+)') - - @property - def md5_hash(self): - match = self._md5_re.search(self.url) - if match: - return match.group(1) - return None - - @property - def show_url(self): - return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) - - -def get_requirement_from_url(url): - """Get a requirement from the URL, if possible. This looks for #egg - in the URL""" - link = Link(url) - egg_info = link.egg_fragment - if not egg_info: - egg_info = splitext(link.filename)[0] - return package_to_requirement(egg_info) - - -def package_to_requirement(package_name): - """Translate a name like Foo-1.2 to Foo==1.3""" - match = re.search(r'^(.*?)(-dev|-\d.*)', package_name) - if match: - name = match.group(1) - version = match.group(2) - else: - name = package_name - version = '' - if version: - return '%s==%s' % (name, version) - else: - return name - - -def get_mirrors(hostname=None): - """Return the list of mirrors from the last record found on the DNS - entry:: - - >>> from pip.index import get_mirrors - >>> get_mirrors() - ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org', - 'd.pypi.python.org'] - - Originally written for the distutils2 project by Alexis Metaireau. - """ - if hostname is None: - hostname = DEFAULT_MIRROR_URL - - # return the last mirror registered on PyPI. - try: - hostname = socket.gethostbyname_ex(hostname)[0] - except socket.gaierror: - return [] - end_letter = hostname.split(".", 1) - - # determine the list from the last one. - return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])] - - -def string_range(last): - """Compute the range of string between "a" and last. - - This works for simple "a to z" lists, but also for "a to zz" lists. - """ - for k in range(len(last)): - for x in product(string.ascii_lowercase, repeat=k+1): - result = ''.join(x) - yield result - if result == last: - return - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py deleted file mode 100755 index 4254ef2f..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Locations where we look for configs, install stuff, etc""" - -import sys -import os -from distutils import sysconfig - - -def running_under_virtualenv(): - """ - Return True if we're running inside a virtualenv, False otherwise. - - """ - return hasattr(sys, 'real_prefix') - - -if running_under_virtualenv(): - ## FIXME: is build/ a good name? - build_prefix = os.path.join(sys.prefix, 'build') - src_prefix = os.path.join(sys.prefix, 'src') -else: - ## FIXME: this isn't a very good default - build_prefix = os.path.join(os.getcwd(), 'build') - src_prefix = os.path.join(os.getcwd(), 'src') - -# FIXME doesn't account for venv linked to global site-packages - -site_packages = sysconfig.get_python_lib() -user_dir = os.path.expanduser('~') -if sys.platform == 'win32': - bin_py = os.path.join(sys.prefix, 'Scripts') - # buildout uses 'bin' on Windows too? - if not os.path.exists(bin_py): - bin_py = os.path.join(sys.prefix, 'bin') - user_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming - default_storage_dir = os.path.join(user_dir, 'pip') - default_config_file = os.path.join(default_storage_dir, 'pip.ini') - default_log_file = os.path.join(default_storage_dir, 'pip.log') -else: - bin_py = os.path.join(sys.prefix, 'bin') - default_storage_dir = os.path.join(user_dir, '.pip') - default_config_file = os.path.join(default_storage_dir, 'pip.conf') - default_log_file = os.path.join(default_storage_dir, 'pip.log') - # Forcing to use /usr/local/bin for standard Mac OS X framework installs - if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': - bin_py = '/usr/local/bin' diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py deleted file mode 100755 index 0218ab1a..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Logging -""" - -import sys -import logging - - -class Logger(object): - - """ - Logging object for use in command-line script. Allows ranges of - levels, to avoid some redundancy of displayed information. - """ - - VERBOSE_DEBUG = logging.DEBUG-1 - DEBUG = logging.DEBUG - INFO = logging.INFO - NOTIFY = (logging.INFO+logging.WARN)/2 - WARN = WARNING = logging.WARN - ERROR = logging.ERROR - FATAL = logging.FATAL - - LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL] - - def __init__(self): - self.consumers = [] - self.indent = 0 - self.explicit_levels = False - self.in_progress = None - self.in_progress_hanging = False - - def debug(self, msg, *args, **kw): - self.log(self.DEBUG, msg, *args, **kw) - - def info(self, msg, *args, **kw): - self.log(self.INFO, msg, *args, **kw) - - def notify(self, msg, *args, **kw): - self.log(self.NOTIFY, msg, *args, **kw) - - def warn(self, msg, *args, **kw): - self.log(self.WARN, msg, *args, **kw) - - def error(self, msg, *args, **kw): - self.log(self.WARN, msg, *args, **kw) - - def fatal(self, msg, *args, **kw): - self.log(self.FATAL, msg, *args, **kw) - - def log(self, level, msg, *args, **kw): - if args: - if kw: - raise TypeError( - "You may give positional or keyword arguments, not both") - args = args or kw - rendered = None - for consumer_level, consumer in self.consumers: - if self.level_matches(level, consumer_level): - if (self.in_progress_hanging - and consumer in (sys.stdout, sys.stderr)): - self.in_progress_hanging = False - sys.stdout.write('\n') - sys.stdout.flush() - if rendered is None: - if args: - rendered = msg % args - else: - rendered = msg - rendered = ' '*self.indent + rendered - if self.explicit_levels: - ## FIXME: should this be a name, not a level number? - rendered = '%02i %s' % (level, rendered) - if hasattr(consumer, 'write'): - consumer.write(rendered+'\n') - else: - consumer(rendered) - - def start_progress(self, msg): - assert not self.in_progress, ( - "Tried to start_progress(%r) while in_progress %r" - % (msg, self.in_progress)) - if self.level_matches(self.NOTIFY, self._stdout_level()): - sys.stdout.write(' '*self.indent + msg) - sys.stdout.flush() - self.in_progress_hanging = True - else: - self.in_progress_hanging = False - self.in_progress = msg - self.last_message = None - - def end_progress(self, msg='done.'): - assert self.in_progress, ( - "Tried to end_progress without start_progress") - if self.stdout_level_matches(self.NOTIFY): - if not self.in_progress_hanging: - # Some message has been printed out since start_progress - sys.stdout.write('...' + self.in_progress + msg + '\n') - sys.stdout.flush() - else: - # These erase any messages shown with show_progress (besides .'s) - logger.show_progress('') - logger.show_progress('') - sys.stdout.write(msg + '\n') - sys.stdout.flush() - self.in_progress = None - self.in_progress_hanging = False - - def show_progress(self, message=None): - """If we are in a progress scope, and no log messages have been - shown, write out another '.'""" - if self.in_progress_hanging: - if message is None: - sys.stdout.write('.') - sys.stdout.flush() - else: - if self.last_message: - padding = ' ' * max(0, len(self.last_message)-len(message)) - else: - padding = '' - sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding)) - sys.stdout.flush() - self.last_message = message - - def stdout_level_matches(self, level): - """Returns true if a message at this level will go to stdout""" - return self.level_matches(level, self._stdout_level()) - - def _stdout_level(self): - """Returns the level that stdout runs at""" - for level, consumer in self.consumers: - if consumer is sys.stdout: - return level - return self.FATAL - - def level_matches(self, level, consumer_level): - """ - >>> l = Logger() - >>> l.level_matches(3, 4) - False - >>> l.level_matches(3, 2) - True - >>> l.level_matches(slice(None, 3), 3) - False - >>> l.level_matches(slice(None, 3), 2) - True - >>> l.level_matches(slice(1, 3), 1) - True - >>> l.level_matches(slice(2, 3), 1) - False - """ - if isinstance(level, slice): - start, stop = level.start, level.stop - if start is not None and start > consumer_level: - return False - if stop is not None or stop <= consumer_level: - return False - return True - else: - return level >= consumer_level - - @classmethod - def level_for_integer(cls, level): - levels = cls.LEVELS - if level < 0: - return levels[0] - if level >= len(levels): - return levels[-1] - return levels[level] - - def move_stdout_to_stderr(self): - to_remove = [] - to_add = [] - for consumer_level, consumer in self.consumers: - if consumer == sys.stdout: - to_remove.append((consumer_level, consumer)) - to_add.append((consumer_level, sys.stderr)) - for item in to_remove: - self.consumers.remove(item) - self.consumers.extend(to_add) - -logger = Logger() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py deleted file mode 100755 index 444e7252..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py +++ /dev/null @@ -1,1432 +0,0 @@ -import sys -import os -import shutil -import re -import zipfile -import pkg_resources -import tempfile -import urlparse -import urllib2 -import urllib -import ConfigParser -from distutils.sysconfig import get_python_version -from email.FeedParser import FeedParser -from pip.locations import bin_py, running_under_virtualenv -from pip.exceptions import InstallationError, UninstallationError -from pip.vcs import vcs -from pip.log import logger -from pip.util import display_path, rmtree -from pip.util import ask, backup_dir -from pip.util import is_installable_dir, is_local, dist_is_local -from pip.util import renames, normalize_path, egg_link_path -from pip.util import make_path_relative -from pip import call_subprocess -from pip.backwardcompat import any, copytree -from pip.index import Link -from pip.locations import build_prefix -from pip.download import (get_file_content, is_url, url_to_path, - path_to_url, is_archive_file, - unpack_vcs_link, is_vcs_url, is_file_url, - unpack_file_url, unpack_http_url) - - -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' - - -class InstallRequirement(object): - - def __init__(self, req, comes_from, source_dir=None, editable=False, - url=None, update=True): - if isinstance(req, basestring): - req = pkg_resources.Requirement.parse(req) - self.req = req - self.comes_from = comes_from - self.source_dir = source_dir - self.editable = editable - self.url = url - self._egg_info_path = None - # This holds the pkg_resources.Distribution object if this requirement - # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None - self._temp_build_dir = None - self._is_bundle = None - # True if the editable should be updated: - self.update = update - # Set to True after successful installation - self.install_succeeded = None - # UninstallPathSet of uninstalled distribution (for possible rollback) - self.uninstalled = None - - @classmethod - def from_editable(cls, editable_req, comes_from=None, default_vcs=None): - name, url = parse_editable(editable_req, default_vcs) - if url.startswith('file:'): - source_dir = url_to_path(url) - else: - source_dir = None - return cls(name, comes_from, source_dir=source_dir, editable=True, url=url) - - @classmethod - def from_line(cls, name, comes_from=None): - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. - """ - url = None - name = name.strip() - req = name - path = os.path.normpath(os.path.abspath(name)) - - if is_url(name): - url = name - ## FIXME: I think getting the requirement here is a bad idea: - #req = get_requirement_from_url(url) - req = None - elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')): - if not is_installable_dir(path): - raise InstallationError("Directory %r is not installable. File 'setup.py' not found." - % name) - url = path_to_url(name) - #req = get_requirement_from_url(url) - req = None - elif is_archive_file(path): - if not os.path.isfile(path): - logger.warn('Requirement %r looks like a filename, but the file does not exist' - % name) - url = path_to_url(name) - #req = get_requirement_from_url(url) - req = None - return cls(req, comes_from, url=url) - - def __str__(self): - if self.req: - s = str(self.req) - if self.url: - s += ' from %s' % self.url - else: - s = self.url - if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) - if self.comes_from: - if isinstance(self.comes_from, basestring): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += ' (from %s)' % comes_from - return s - - def from_path(self): - if self.req is None: - return None - s = str(self.req) - if self.comes_from: - if isinstance(self.comes_from, basestring): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += '->' + comes_from - return s - - def build_location(self, build_dir, unpack=True): - if self._temp_build_dir is not None: - return self._temp_build_dir - if self.req is None: - self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') - self._ideal_build_dir = build_dir - return self._temp_build_dir - if self.editable: - name = self.name.lower() - else: - name = self.name - # FIXME: Is there a better place to create the build_dir? (hg and bzr need this) - if not os.path.exists(build_dir): - _make_build_dir(build_dir) - return os.path.join(build_dir, name) - - def correct_build_location(self): - """If the build location was a temporary directory, this will move it - to a new more permanent location""" - if self.source_dir is not None: - return - assert self.req is not None - assert self._temp_build_dir - old_location = self._temp_build_dir - new_build_dir = self._ideal_build_dir - del self._ideal_build_dir - if self.editable: - name = self.name.lower() - else: - name = self.name - new_location = os.path.join(new_build_dir, name) - if not os.path.exists(new_build_dir): - logger.debug('Creating directory %s' % new_build_dir) - _make_build_dir(new_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location)) - logger.debug('Moving package %s from %s to new location %s' - % (self, display_path(old_location), display_path(new_location))) - shutil.move(old_location, new_location) - self._temp_build_dir = new_location - self.source_dir = new_location - self._egg_info_path = None - - @property - def name(self): - if self.req is None: - return None - return self.req.project_name - - @property - def url_name(self): - if self.req is None: - return None - return urllib.quote(self.req.unsafe_name) - - @property - def setup_py(self): - return os.path.join(self.source_dir, 'setup.py') - - def run_egg_info(self, force_root_egg_info=False): - assert self.source_dir - if self.name: - logger.notify('Running setup.py egg_info for package %s' % self.name) - else: - logger.notify('Running setup.py egg_info for package from %s' % self.url) - logger.indent += 2 - try: - script = self._run_setup_py - script = script.replace('__SETUP_PY__', repr(self.setup_py)) - script = script.replace('__PKG_NAME__', repr(self.name)) - # We can't put the .egg-info files at the root, because then the source code will be mistaken - # for an installed egg, causing problems - if self.editable or force_root_egg_info: - egg_base_option = [] - else: - egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') - if not os.path.exists(egg_info_dir): - os.makedirs(egg_info_dir) - egg_base_option = ['--egg-base', 'pip-egg-info'] - call_subprocess( - [sys.executable, '-c', script, 'egg_info'] + egg_base_option, - cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False, - command_level=logger.VERBOSE_DEBUG, - command_desc='python setup.py egg_info') - finally: - logger.indent -= 2 - if not self.req: - self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name']) - self.correct_build_location() - - ## FIXME: This is a lame hack, entirely for PasteScript which has - ## a self-provided entry point that causes this awkwardness - _run_setup_py = """ -__file__ = __SETUP_PY__ -from setuptools.command import egg_info -def replacement_run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in egg_info.iter_entry_points('egg_info.writers'): - # require=False is the change we're making: - writer = ep.load(require=False) - if writer: - writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name)) - self.find_sources() -egg_info.egg_info.run = replacement_run -execfile(__file__) -""" - - def egg_info_data(self, filename): - if self.satisfied_by is not None: - if not self.satisfied_by.has_metadata(filename): - return None - return self.satisfied_by.get_metadata(filename) - assert self.source_dir - filename = self.egg_info_path(filename) - if not os.path.exists(filename): - return None - fp = open(filename, 'r') - data = fp.read() - fp.close() - return data - - def egg_info_path(self, filename): - if self._egg_info_path is None: - if self.editable: - base = self.source_dir - else: - base = os.path.join(self.source_dir, 'pip-egg-info') - filenames = os.listdir(base) - if self.editable: - filenames = [] - for root, dirs, files in os.walk(base): - for dir in vcs.dirnames: - if dir in dirs: - dirs.remove(dir) - for dir in dirs: - # Don't search in anything that looks like a virtualenv environment - if (os.path.exists(os.path.join(root, dir, 'bin', 'python')) - or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))): - dirs.remove(dir) - # Also don't search through tests - if dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) - filenames = [f for f in filenames if f.endswith('.egg-info')] - - if not filenames: - raise InstallationError('No files/directores in %s (from %s)' % (base, filename)) - assert filenames, "No files/directories in %s (from %s)" % (base, filename) - - # if we have more than one match, we pick the toplevel one. This can - # easily be the case if there is a dist folder which contains an - # extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort(key=lambda x: x.count(os.path.sep) + - (os.path.altsep and - x.count(os.path.altsep) or 0)) - self._egg_info_path = os.path.join(base, filenames[0]) - return os.path.join(self._egg_info_path, filename) - - def egg_info_lines(self, filename): - data = self.egg_info_data(filename) - if not data: - return [] - result = [] - for line in data.splitlines(): - line = line.strip() - if not line or line.startswith('#'): - continue - result.append(line) - return result - - def pkg_info(self): - p = FeedParser() - data = self.egg_info_data('PKG-INFO') - if not data: - logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO'))) - p.feed(data or '') - return p.close() - - @property - def dependency_links(self): - return self.egg_info_lines('dependency_links.txt') - - _requirements_section_re = re.compile(r'\[(.*?)\]') - - def requirements(self, extras=()): - in_extra = None - for line in self.egg_info_lines('requires.txt'): - match = self._requirements_section_re.match(line) - if match: - in_extra = match.group(1) - continue - if in_extra and in_extra not in extras: - # Skip requirement for an extra we aren't requiring - continue - yield line - - @property - def absolute_versions(self): - for qualifier, version in self.req.specs: - if qualifier == '==': - yield version - - @property - def installed_version(self): - return self.pkg_info()['version'] - - def assert_source_matches_version(self): - assert self.source_dir - if self.comes_from is None: - # We don't check the versions of things explicitly installed. - # This makes, e.g., "pip Package==dev" possible - return - version = self.installed_version - if version not in self.req: - logger.fatal( - 'Source in %s has the version %s, which does not match the requirement %s' - % (display_path(self.source_dir), version, self)) - raise InstallationError( - 'Source in %s has version %s that conflicts with %s' - % (display_path(self.source_dir), version, self)) - else: - logger.debug('Source in %s has version %s, which satisfies requirement %s' - % (display_path(self.source_dir), version, self)) - - def update_editable(self, obtain=True): - if not self.url: - logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir) - return - assert self.editable - assert self.source_dir - if self.url.startswith('file:'): - # Static paths don't get updated - return - assert '+' in self.url, "bad url: %r" % self.url - if not self.update: - return - vc_type, url = self.url.split('+', 1) - backend = vcs.get_backend(vc_type) - if backend: - vcs_backend = backend(self.url) - if obtain: - vcs_backend.obtain(self.source_dir) - else: - vcs_backend.export(self.source_dir) - else: - assert 0, ( - 'Unexpected version control type (in %s): %s' - % (self.url, vc_type)) - - def uninstall(self, auto_confirm=False): - """ - Uninstall the distribution currently satisfying this requirement. - - Prompts before removing or modifying files unless - ``auto_confirm`` is True. - - Refuses to delete or modify files outside of ``sys.prefix`` - - thus uninstallation within a virtual environment can only - modify that virtual environment, even if the virtualenv is - linked to global site-packages. - - """ - if not self.check_if_exists(): - raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,)) - dist = self.satisfied_by or self.conflicts_with - - paths_to_remove = UninstallPathSet(dist) - - pip_egg_info_path = os.path.join(dist.location, - dist.egg_name()) + '.egg-info' - easy_install_egg = dist.egg_name() + '.egg' - develop_egg_link = egg_link_path(dist) - if os.path.exists(pip_egg_info_path): - # package installed by pip - paths_to_remove.add(pip_egg_info_path) - if dist.has_metadata('installed-files.txt'): - for installed_file in dist.get_metadata('installed-files.txt').splitlines(): - path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file)) - paths_to_remove.add(path) - if dist.has_metadata('top_level.txt'): - if dist.has_metadata('namespace_packages.txt'): - namespaces = dist.get_metadata('namespace_packages.txt') - else: - namespaces = [] - for top_level_pkg in [p for p - in dist.get_metadata('top_level.txt').splitlines() - if p and p not in namespaces]: - path = os.path.join(dist.location, top_level_pkg) - paths_to_remove.add(path) - paths_to_remove.add(path + '.py') - paths_to_remove.add(path + '.pyc') - - elif dist.location.endswith(easy_install_egg): - # package installed by easy_install - paths_to_remove.add(dist.location) - easy_install_pth = os.path.join(os.path.dirname(dist.location), - 'easy-install.pth') - paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) - - elif os.path.isfile(develop_egg_link): - # develop egg - fh = open(develop_egg_link, 'r') - link_pointer = os.path.normcase(fh.readline().strip()) - fh.close() - assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location) - paths_to_remove.add(develop_egg_link) - easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), - 'easy-install.pth') - paths_to_remove.add_pth(easy_install_pth, dist.location) - - # find distutils scripts= scripts - if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): - for script in dist.metadata_listdir('scripts'): - paths_to_remove.add(os.path.join(bin_py, script)) - if sys.platform == 'win32': - paths_to_remove.add(os.path.join(bin_py, script) + '.bat') - - # find console_scripts - if dist.has_metadata('entry_points.txt'): - config = ConfigParser.SafeConfigParser() - config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt'))) - if config.has_section('console_scripts'): - for name, value in config.items('console_scripts'): - paths_to_remove.add(os.path.join(bin_py, name)) - if sys.platform == 'win32': - paths_to_remove.add(os.path.join(bin_py, name) + '.exe') - paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest') - paths_to_remove.add(os.path.join(bin_py, name) + '-script.py') - - paths_to_remove.remove(auto_confirm) - self.uninstalled = paths_to_remove - - def rollback_uninstall(self): - if self.uninstalled: - self.uninstalled.rollback() - else: - logger.error("Can't rollback %s, nothing uninstalled." - % (self.project_name,)) - - def commit_uninstall(self): - if self.uninstalled: - self.uninstalled.commit() - else: - logger.error("Can't commit %s, nothing uninstalled." - % (self.project_name,)) - - def archive(self, build_dir): - assert self.source_dir - create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.installed_version) - archive_path = os.path.join(build_dir, archive_name) - if os.path.exists(archive_path): - response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup ' - % display_path(archive_path), ('i', 'w', 'b')) - if response == 'i': - create_archive = False - elif response == 'w': - logger.warn('Deleting %s' % display_path(archive_path)) - os.remove(archive_path) - elif response == 'b': - dest_file = backup_dir(archive_path) - logger.warn('Backing up %s to %s' - % (display_path(archive_path), display_path(dest_file))) - shutil.move(archive_path, dest_file) - if create_archive: - zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED) - dir = os.path.normcase(os.path.abspath(self.source_dir)) - for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') - for dirname in dirnames: - dirname = os.path.join(dirpath, dirname) - name = self._clean_zip_name(dirname, dir) - zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') - zipdir.external_attr = 0755 << 16L - zip.writestr(zipdir, '') - for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - filename = os.path.join(dirpath, filename) - name = self._clean_zip_name(filename, dir) - zip.write(filename, self.name + '/' + name) - zip.close() - logger.indent -= 2 - logger.notify('Saved %s' % display_path(archive_path)) - - def _clean_zip_name(self, name, prefix): - assert name.startswith(prefix+os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix)) - name = name[len(prefix)+1:] - name = name.replace(os.path.sep, '/') - return name - - def install(self, install_options, global_options=()): - if self.editable: - self.install_editable(install_options, global_options) - return - temp_location = tempfile.mkdtemp('-record', 'pip-') - record_filename = os.path.join(temp_location, 'install-record.txt') - try: - - install_args = [ - sys.executable, '-c', - "import setuptools;__file__=%r;"\ - "execfile(__file__)" % self.setup_py] +\ - list(global_options) + [ - 'install', - '--single-version-externally-managed', - '--record', record_filename] - - if running_under_virtualenv(): - ## FIXME: I'm not sure if this is a reasonable location; probably not - ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - 'python' + get_python_version())] - logger.notify('Running setup.py install for %s' % self.name) - logger.indent += 2 - try: - call_subprocess(install_args + install_options, - cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False) - finally: - logger.indent -= 2 - if not os.path.exists(record_filename): - logger.notify('Record file %s not found' % record_filename) - return - self.install_succeeded = True - f = open(record_filename) - for line in f: - line = line.strip() - if line.endswith('.egg-info'): - egg_info_dir = line - break - else: - logger.warn('Could not find .egg-info directory in install record for %s' % self) - ## FIXME: put the record somewhere - ## FIXME: should this be an error? - return - f.close() - new_lines = [] - f = open(record_filename) - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append(make_path_relative(filename, egg_info_dir)) - f.close() - f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w') - f.write('\n'.join(new_lines)+'\n') - f.close() - finally: - if os.path.exists(record_filename): - os.remove(record_filename) - os.rmdir(temp_location) - - def remove_temporary_source(self): - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.is_bundle or os.path.exists(self.delete_marker_filename): - logger.info('Removing source in %s' % self.source_dir) - if self.source_dir: - rmtree(self.source_dir) - self.source_dir = None - if self._temp_build_dir and os.path.exists(self._temp_build_dir): - rmtree(self._temp_build_dir) - self._temp_build_dir = None - - def install_editable(self, install_options, global_options=()): - logger.notify('Running setup.py develop for %s' % self.name) - logger.indent += 2 - try: - ## FIXME: should we do --install-headers here too? - call_subprocess( - [sys.executable, '-c', - "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py)] - + list(global_options) + ['develop', '--no-deps'] + list(install_options), - - cwd=self.source_dir, filter_stdout=self._filter_install, - show_stdout=False) - finally: - logger.indent -= 2 - self.install_succeeded = True - - def _filter_install(self, line): - level = logger.NOTIFY - for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*', - r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$", - r'^byte-compiling ', - # Not sure what this warning is, but it seems harmless: - r"^warning: manifest_maker: standard file '-c' not found$"]: - if re.search(regex, line.strip()): - level = logger.INFO - break - return (level, line) - - def check_if_exists(self): - """Find an installed distribution that satisfies or conflicts - with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately.""" - if self.req is None: - return False - try: - self.satisfied_by = pkg_resources.get_distribution(self.req) - except pkg_resources.DistributionNotFound: - return False - except pkg_resources.VersionConflict: - self.conflicts_with = pkg_resources.get_distribution(self.req.project_name) - return True - - @property - def is_bundle(self): - if self._is_bundle is not None: - return self._is_bundle - base = self._temp_build_dir - if not base: - ## FIXME: this doesn't seem right: - return False - self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt')) - or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt'))) - return self._is_bundle - - def bundle_requirements(self): - for dest_dir in self._bundle_editable_dirs: - package = os.path.basename(dest_dir) - ## FIXME: svnism: - for vcs_backend in vcs.backends: - url = rev = None - vcs_bundle_file = os.path.join( - dest_dir, vcs_backend.bundle_file) - if os.path.exists(vcs_bundle_file): - vc_type = vcs_backend.name - fp = open(vcs_bundle_file) - content = fp.read() - fp.close() - url, rev = vcs_backend().parse_vcs_bundle_file(content) - break - if url: - url = '%s+%s@%s' % (vc_type, url, rev) - else: - url = None - yield InstallRequirement( - package, self, editable=True, url=url, - update=False, source_dir=dest_dir) - for dest_dir in self._bundle_build_dirs: - package = os.path.basename(dest_dir) - yield InstallRequirement( - package, self, - source_dir=dest_dir) - - def move_bundle_files(self, dest_build_dir, dest_src_dir): - base = self._temp_build_dir - assert base - src_dir = os.path.join(base, 'src') - build_dir = os.path.join(base, 'build') - bundle_build_dirs = [] - bundle_editable_dirs = [] - for source_dir, dest_dir, dir_collection in [ - (src_dir, dest_src_dir, bundle_editable_dirs), - (build_dir, dest_build_dir, bundle_build_dirs)]: - if os.path.exists(source_dir): - for dirname in os.listdir(source_dir): - dest = os.path.join(dest_dir, dirname) - dir_collection.append(dest) - if os.path.exists(dest): - logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s' - % (dest, dirname, self)) - continue - if not os.path.exists(dest_dir): - logger.info('Creating directory %s' % dest_dir) - os.makedirs(dest_dir) - shutil.move(os.path.join(source_dir, dirname), dest) - if not os.listdir(source_dir): - os.rmdir(source_dir) - self._temp_build_dir = None - self._bundle_build_dirs = bundle_build_dirs - self._bundle_editable_dirs = bundle_editable_dirs - - @property - def delete_marker_filename(self): - assert self.source_dir - return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME) - - -DELETE_MARKER_MESSAGE = '''\ -This file is placed here by pip to indicate the source was put -here by pip. - -Once this package is successfully installed this source code will be -deleted (unless you remove this file). -''' - - -class RequirementSet(object): - - def __init__(self, build_dir, src_dir, download_dir, download_cache=None, - upgrade=False, ignore_installed=False, - ignore_dependencies=False): - self.build_dir = build_dir - self.src_dir = src_dir - self.download_dir = download_dir - self.download_cache = download_cache - self.upgrade = upgrade - self.ignore_installed = ignore_installed - self.requirements = {} - # Mapping of alias: real_name - self.requirement_aliases = {} - self.unnamed_requirements = [] - self.ignore_dependencies = ignore_dependencies - self.successfully_downloaded = [] - self.successfully_installed = [] - self.reqs_to_cleanup = [] - - def __str__(self): - reqs = [req for req in self.requirements.values() - if not req.comes_from] - reqs.sort(key=lambda req: req.name.lower()) - return ' '.join([str(req.req) for req in reqs]) - - def add_requirement(self, install_req): - name = install_req.name - if not name: - self.unnamed_requirements.append(install_req) - else: - if self.has_requirement(name): - raise InstallationError( - 'Double requirement given: %s (aready in %s, name=%r)' - % (install_req, self.get_requirement(name), name)) - self.requirements[name] = install_req - ## FIXME: what about other normalizations? E.g., _ vs. -? - if name.lower() != name: - self.requirement_aliases[name.lower()] = name - - def has_requirement(self, project_name): - for name in project_name, project_name.lower(): - if name in self.requirements or name in self.requirement_aliases: - return True - return False - - @property - def has_requirements(self): - return self.requirements.values() or self.unnamed_requirements - - @property - def has_editables(self): - if any(req.editable for req in self.requirements.values()): - return True - if any(req.editable for req in self.unnamed_requirements): - return True - return False - - @property - def is_download(self): - if self.download_dir: - self.download_dir = os.path.expanduser(self.download_dir) - if os.path.exists(self.download_dir): - return True - else: - logger.fatal('Could not find download directory') - raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) - return False - - def get_requirement(self, project_name): - for name in project_name, project_name.lower(): - if name in self.requirements: - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] - raise KeyError("No project with the name %r" % project_name) - - def uninstall(self, auto_confirm=False): - for req in self.requirements.values(): - req.uninstall(auto_confirm=auto_confirm) - req.commit_uninstall() - - def locate_files(self): - ## FIXME: duplicates code from install_files; relevant code should - ## probably be factored out into a separate method - unnamed = list(self.unnamed_requirements) - reqs = self.requirements.values() - while reqs or unnamed: - if unnamed: - req_to_install = unnamed.pop(0) - else: - req_to_install = reqs.pop(0) - install_needed = True - if not self.ignore_installed and not req_to_install.editable: - req_to_install.check_if_exists() - if req_to_install.satisfied_by: - if self.upgrade: - req_to_install.conflicts_with = req_to_install.satisfied_by - req_to_install.satisfied_by = None - else: - install_needed = False - if req_to_install.satisfied_by: - logger.notify('Requirement already satisfied ' - '(use --upgrade to upgrade): %s' - % req_to_install) - - if req_to_install.editable: - if req_to_install.source_dir is None: - req_to_install.source_dir = req_to_install.build_location(self.src_dir) - elif install_needed: - req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download) - - if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir): - raise InstallationError('Could not install requirement %s ' - 'because source folder %s does not exist ' - '(perhaps --no-download was used without first running ' - 'an equivalent install with --no-install?)' - % (req_to_install, req_to_install.source_dir)) - - def prepare_files(self, finder, force_root_egg_info=False, bundle=False): - """Prepare process. Create temp directories, download and/or unpack files.""" - unnamed = list(self.unnamed_requirements) - reqs = self.requirements.values() - while reqs or unnamed: - if unnamed: - req_to_install = unnamed.pop(0) - else: - req_to_install = reqs.pop(0) - install = True - if not self.ignore_installed and not req_to_install.editable: - req_to_install.check_if_exists() - if req_to_install.satisfied_by: - if self.upgrade: - req_to_install.conflicts_with = req_to_install.satisfied_by - req_to_install.satisfied_by = None - else: - install = False - if req_to_install.satisfied_by: - logger.notify('Requirement already satisfied ' - '(use --upgrade to upgrade): %s' - % req_to_install) - if req_to_install.editable: - logger.notify('Obtaining %s' % req_to_install) - elif install: - if req_to_install.url and req_to_install.url.lower().startswith('file:'): - logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url))) - else: - logger.notify('Downloading/unpacking %s' % req_to_install) - logger.indent += 2 - try: - is_bundle = False - if req_to_install.editable: - if req_to_install.source_dir is None: - location = req_to_install.build_location(self.src_dir) - req_to_install.source_dir = location - else: - location = req_to_install.source_dir - if not os.path.exists(self.build_dir): - _make_build_dir(self.build_dir) - req_to_install.update_editable(not self.is_download) - if self.is_download: - req_to_install.run_egg_info() - req_to_install.archive(self.download_dir) - else: - req_to_install.run_egg_info() - elif install: - ##@@ if filesystem packages are not marked - ##editable in a req, a non deterministic error - ##occurs when the script attempts to unpack the - ##build directory - - location = req_to_install.build_location(self.build_dir, not self.is_download) - ## FIXME: is the existance of the checkout good enough to use it? I don't think so. - unpack = True - if not os.path.exists(os.path.join(location, 'setup.py')): - ## FIXME: this won't upgrade when there's an existing package unpacked in `location` - if req_to_install.url is None: - url = finder.find_requirement(req_to_install, upgrade=self.upgrade) - else: - ## FIXME: should req_to_install.url already be a link? - url = Link(req_to_install.url) - assert url - if url: - try: - self.unpack_url(url, location, self.is_download) - except urllib2.HTTPError, e: - logger.fatal('Could not install requirement %s because of error %s' - % (req_to_install, e)) - raise InstallationError( - 'Could not install requirement %s because of HTTP error %s for URL %s' - % (req_to_install, e, url)) - else: - unpack = False - if unpack: - is_bundle = req_to_install.is_bundle - url = None - if is_bundle: - req_to_install.move_bundle_files(self.build_dir, self.src_dir) - for subreq in req_to_install.bundle_requirements(): - reqs.append(subreq) - self.add_requirement(subreq) - elif self.is_download: - req_to_install.source_dir = location - if url and url.scheme in vcs.all_schemes: - req_to_install.run_egg_info() - req_to_install.archive(self.download_dir) - else: - req_to_install.source_dir = location - req_to_install.run_egg_info() - if force_root_egg_info: - # We need to run this to make sure that the .egg-info/ - # directory is created for packing in the bundle - req_to_install.run_egg_info(force_root_egg_info=True) - req_to_install.assert_source_matches_version() - #@@ sketchy way of identifying packages not grabbed from an index - if bundle and req_to_install.url: - self.copy_to_build_dir(req_to_install) - if not is_bundle and not self.is_download: - ## FIXME: shouldn't be globally added: - finder.add_dependency_links(req_to_install.dependency_links) - ## FIXME: add extras in here: - if not self.ignore_dependencies: - for req in req_to_install.requirements(): - try: - name = pkg_resources.Requirement.parse(req).project_name - except ValueError, e: - ## FIXME: proper warning - logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install)) - continue - if self.has_requirement(name): - ## FIXME: check for conflict - continue - subreq = InstallRequirement(req, req_to_install) - reqs.append(subreq) - self.add_requirement(subreq) - if req_to_install.name not in self.requirements: - self.requirements[req_to_install.name] = req_to_install - else: - self.reqs_to_cleanup.append(req_to_install) - if install: - self.successfully_downloaded.append(req_to_install) - if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')): - self.copy_to_build_dir(req_to_install) - finally: - logger.indent -= 2 - - def cleanup_files(self, bundle=False): - """Clean up files, remove builds.""" - logger.notify('Cleaning up...') - logger.indent += 2 - for req in self.reqs_to_cleanup: - req.remove_temporary_source() - - remove_dir = [] - if self._pip_has_created_build_dir(): - remove_dir.append(self.build_dir) - - # The source dir of a bundle can always be removed. - if bundle: - remove_dir.append(self.src_dir) - - for dir in remove_dir: - if os.path.exists(dir): - logger.info('Removing temporary dir %s...' % dir) - rmtree(dir) - - logger.indent -= 2 - - def _pip_has_created_build_dir(self): - return (self.build_dir == build_prefix and - os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME))) - - def copy_to_build_dir(self, req_to_install): - target_dir = req_to_install.editable and self.src_dir or self.build_dir - logger.info("Copying %s to %s" %(req_to_install.name, target_dir)) - dest = os.path.join(target_dir, req_to_install.name) - copytree(req_to_install.source_dir, dest) - call_subprocess(["python", "%s/setup.py"%dest, "clean"]) - - def unpack_url(self, link, location, only_download=False): - if only_download: - location = self.download_dir - if is_vcs_url(link): - return unpack_vcs_link(link, location, only_download) - elif is_file_url(link): - return unpack_file_url(link, location) - else: - if self.download_cache: - self.download_cache = os.path.expanduser(self.download_cache) - return unpack_http_url(link, location, self.download_cache, only_download) - - def install(self, install_options, global_options=()): - """Install everything in this set (after having downloaded and unpacked the packages)""" - to_install = sorted([r for r in self.requirements.values() - if self.upgrade or not r.satisfied_by], - key=lambda p: p.name.lower()) - if to_install: - logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install]))) - logger.indent += 2 - try: - for requirement in to_install: - if requirement.conflicts_with: - logger.notify('Found existing installation: %s' - % requirement.conflicts_with) - logger.indent += 2 - try: - requirement.uninstall(auto_confirm=True) - finally: - logger.indent -= 2 - try: - requirement.install(install_options, global_options) - except: - # if install did not succeed, rollback previous uninstall - if requirement.conflicts_with and not requirement.install_succeeded: - requirement.rollback_uninstall() - raise - else: - if requirement.conflicts_with and requirement.install_succeeded: - requirement.commit_uninstall() - requirement.remove_temporary_source() - finally: - logger.indent -= 2 - self.successfully_installed = to_install - - def create_bundle(self, bundle_filename): - ## FIXME: can't decide which is better; zip is easier to read - ## random files from, but tar.bz2 is smaller and not as lame a - ## format. - - ## FIXME: this file should really include a manifest of the - ## packages, maybe some other metadata files. It would make - ## it easier to detect as well. - zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED) - vcs_dirs = [] - for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'): - dir = os.path.normcase(os.path.abspath(dir)) - for dirpath, dirnames, filenames in os.walk(dir): - for backend in vcs.backends: - vcs_backend = backend() - vcs_url = vcs_rev = None - if vcs_backend.dirname in dirnames: - for vcs_dir in vcs_dirs: - if dirpath.startswith(vcs_dir): - # vcs bundle file already in parent directory - break - else: - vcs_url, vcs_rev = vcs_backend.get_info( - os.path.join(dir, dirpath)) - vcs_dirs.append(dirpath) - vcs_bundle_file = vcs_backend.bundle_file - vcs_guide = vcs_backend.guide % {'url': vcs_url, - 'rev': vcs_rev} - dirnames.remove(vcs_backend.dirname) - break - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') - for dirname in dirnames: - dirname = os.path.join(dirpath, dirname) - name = self._clean_zip_name(dirname, dir) - zip.writestr(basename + '/' + name + '/', '') - for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - filename = os.path.join(dirpath, filename) - name = self._clean_zip_name(filename, dir) - zip.write(filename, basename + '/' + name) - if vcs_url: - name = os.path.join(dirpath, vcs_bundle_file) - name = self._clean_zip_name(name, dir) - zip.writestr(basename + '/' + name, vcs_guide) - - zip.writestr('pip-manifest.txt', self.bundle_requirements()) - zip.close() - - BUNDLE_HEADER = '''\ -# This is a pip bundle file, that contains many source packages -# that can be installed as a group. You can install this like: -# pip this_file.zip -# The rest of the file contains a list of all the packages included: -''' - - def bundle_requirements(self): - parts = [self.BUNDLE_HEADER] - for req in sorted( - [req for req in self.requirements.values() - if not req.comes_from], - key=lambda x: x.name): - parts.append('%s==%s\n' % (req.name, req.installed_version)) - parts.append('# These packages were installed to satisfy the above requirements:\n') - for req in sorted( - [req for req in self.requirements.values() - if req.comes_from], - key=lambda x: x.name): - parts.append('%s==%s\n' % (req.name, req.installed_version)) - ## FIXME: should we do something with self.unnamed_requirements? - return ''.join(parts) - - def _clean_zip_name(self, name, prefix): - assert name.startswith(prefix+os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix)) - name = name[len(prefix)+1:] - name = name.replace(os.path.sep, '/') - return name - - -def _make_build_dir(build_dir): - os.makedirs(build_dir) - _write_delete_marker_message(os.path.join(build_dir, PIP_DELETE_MARKER_FILENAME)) - - -def _write_delete_marker_message(filepath): - marker_fp = open(filepath, 'w') - marker_fp.write(DELETE_MARKER_MESSAGE) - marker_fp.close() - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) - - -def parse_requirements(filename, finder=None, comes_from=None, options=None): - skip_match = None - skip_regex = options.skip_requirements_regex - if skip_regex: - skip_match = re.compile(skip_regex) - filename, content = get_file_content(filename, comes_from=comes_from) - for line_number, line in enumerate(content.splitlines()): - line_number += 1 - line = line.strip() - if not line or line.startswith('#'): - continue - if skip_match and skip_match.search(line): - continue - if line.startswith('-r') or line.startswith('--requirement'): - if line.startswith('-r'): - req_url = line[2:].strip() - else: - req_url = line[len('--requirement'):].strip().strip('=') - if _scheme_re.search(filename): - # Relative to a URL - req_url = urlparse.urljoin(req_url, filename) - elif not _scheme_re.search(req_url): - req_url = os.path.join(os.path.dirname(filename), req_url) - for item in parse_requirements(req_url, finder, comes_from=filename, options=options): - yield item - elif line.startswith('-Z') or line.startswith('--always-unzip'): - # No longer used, but previously these were used in - # requirement files, so we'll ignore. - pass - elif line.startswith('-f') or line.startswith('--find-links'): - if line.startswith('-f'): - line = line[2:].strip() - else: - line = line[len('--find-links'):].strip().lstrip('=') - ## FIXME: it would be nice to keep track of the source of - ## the find_links: - if finder: - finder.find_links.append(line) - elif line.startswith('-i') or line.startswith('--index-url'): - if line.startswith('-i'): - line = line[2:].strip() - else: - line = line[len('--index-url'):].strip().lstrip('=') - if finder: - finder.index_urls = [line] - elif line.startswith('--extra-index-url'): - line = line[len('--extra-index-url'):].strip().lstrip('=') - if finder: - finder.index_urls.append(line) - else: - comes_from = '-r %s (line %s)' % (filename, line_number) - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip() - req = InstallRequirement.from_editable( - line, comes_from=comes_from, default_vcs=options.default_vcs) - else: - req = InstallRequirement.from_line(line, comes_from) - yield req - - -def parse_editable(editable_req, default_vcs=None): - """Parses svn+http://blahblah@rev#egg=Foobar into a requirement - (Foobar) and a URL""" - url = editable_req - if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')): - # Treating it as code that has already been checked out - url = path_to_url(url) - if url.lower().startswith('file:'): - return None, url - for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) - if '+' not in url: - if default_vcs: - url = default_vcs + '+' + url - else: - raise InstallationError( - '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req) - vc_type = url.split('+', 1)[0].lower() - if not vcs.get_backend(vc_type): - raise InstallationError( - 'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req) - match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req) - if (not match or not match.group(1)) and vcs.get_backend(vc_type): - parts = [p for p in editable_req.split('#', 1)[0].split('/') if p] - if parts[-2] in ('tags', 'branches', 'tag', 'branch'): - req = parts[-3] - elif parts[-1] == 'trunk': - req = parts[-2] - else: - raise InstallationError( - '--editable=%s is not the right format; it must have #egg=Package' - % editable_req) - else: - req = match.group(1) - ## FIXME: use package_to_requirement? - match = re.search(r'^(.*?)(?:-dev|-\d.*)', req) - if match: - # Strip off -dev, -0.2, etc. - req = match.group(1) - return req, url - - -class UninstallPathSet(object): - """A set of file paths to be removed in the uninstallation of a - requirement.""" - def __init__(self, dist): - self.paths = set() - self._refuse = set() - self.pth = {} - self.dist = dist - self.save_dir = None - self._moved_paths = [] - - def _permitted(self, path): - """ - Return True if the given path is one we are permitted to - remove/modify, False otherwise. - - """ - return is_local(path) - - def _can_uninstall(self): - if not dist_is_local(self.dist): - logger.notify("Not uninstalling %s at %s, outside environment %s" - % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix)) - return False - return True - - def add(self, path): - path = normalize_path(path) - if not os.path.exists(path): - return - if self._permitted(path): - self.paths.add(path) - else: - self._refuse.add(path) - - def add_pth(self, pth_file, entry): - pth_file = normalize_path(pth_file) - if self._permitted(pth_file): - if pth_file not in self.pth: - self.pth[pth_file] = UninstallPthEntries(pth_file) - self.pth[pth_file].add(entry) - else: - self._refuse.add(pth_file) - - def compact(self, paths): - """Compact a path set to contain the minimal number of paths - necessary to contain all paths in the set. If /a/path/ and - /a/path/to/a/file.txt are both in the set, leave only the - shorter path.""" - short_paths = set() - for path in sorted(paths, key=len): - if not any([(path.startswith(shortpath) and - path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) - for shortpath in short_paths]): - short_paths.add(path) - return short_paths - - def _stash(self, path): - return os.path.join( - self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) - - def remove(self, auto_confirm=False): - """Remove paths in ``self.paths`` with confirmation (unless - ``auto_confirm`` is True).""" - if not self._can_uninstall(): - return - logger.notify('Uninstalling %s:' % self.dist.project_name) - logger.indent += 2 - paths = sorted(self.compact(self.paths)) - try: - if auto_confirm: - response = 'y' - else: - for path in paths: - logger.notify(path) - response = ask('Proceed (y/n)? ', ('y', 'n')) - if self._refuse: - logger.notify('Not removing or modifying (outside of prefix):') - for path in self.compact(self._refuse): - logger.notify(path) - if response == 'y': - self.save_dir = tempfile.mkdtemp(suffix='-uninstall', - prefix='pip-') - for path in paths: - new_path = self._stash(path) - logger.info('Removing file or directory %s' % path) - self._moved_paths.append(path) - renames(path, new_path) - for pth in self.pth.values(): - pth.remove() - logger.notify('Successfully uninstalled %s' % self.dist.project_name) - - finally: - logger.indent -= 2 - - def rollback(self): - """Rollback the changes previously made by remove().""" - if self.save_dir is None: - logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name) - return False - logger.notify('Rolling back uninstall of %s' % self.dist.project_name) - for path in self._moved_paths: - tmp_path = self._stash(path) - logger.info('Replacing %s' % path) - renames(tmp_path, path) - for pth in self.pth: - pth.rollback() - - def commit(self): - """Remove temporary save dir: rollback will no longer be possible.""" - if self.save_dir is not None: - shutil.rmtree(self.save_dir) - self.save_dir = None - self._moved_paths = [] - - -class UninstallPthEntries(object): - def __init__(self, pth_file): - if not os.path.isfile(pth_file): - raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file) - self.file = pth_file - self.entries = set() - self._saved_lines = None - - def add(self, entry): - entry = os.path.normcase(entry) - # On Windows, os.path.normcase converts the entry to use - # backslashes. This is correct for entries that describe absolute - # paths outside of site-packages, but all the others use forward - # slashes. - if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]: - entry = entry.replace('\\', '/') - self.entries.add(entry) - - def remove(self): - logger.info('Removing pth entries from %s:' % self.file) - fh = open(self.file, 'r') - lines = fh.readlines() - self._saved_lines = lines - fh.close() - try: - for entry in self.entries: - logger.info('Removing entry: %s' % entry) - try: - lines.remove(entry + '\n') - except ValueError: - pass - finally: - pass - fh = open(self.file, 'wb') - fh.writelines(lines) - fh.close() - - def rollback(self): - if self._saved_lines is None: - logger.error('Cannot roll back changes to %s, none were made' % self.file) - return False - logger.info('Rolling %s back to previous state' % self.file) - fh = open(self.file, 'wb') - fh.writelines(self._saved_lines) - fh.close() - return True - - -class FakeFile(object): - """Wrap a list of lines in an object with readline() to make - ConfigParser happy.""" - def __init__(self, lines): - self._gen = (l for l in lines) - - def readline(self): - try: - return self._gen.next() - except StopIteration: - return '' diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py deleted file mode 100755 index be830ad9..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py +++ /dev/null @@ -1,18 +0,0 @@ -import sys -import os - - -def run(): - base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - ## FIXME: this is kind of crude; if we could create a fake pip - ## module, then exec into it and update pip.__path__ properly, we - ## wouldn't have to update sys.path: - sys.path.insert(0, base) - import pip - return pip.main() - - -if __name__ == '__main__': - exit = run() - if exit: - sys.exit(exit) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py deleted file mode 100755 index 1eab34c0..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py +++ /dev/null @@ -1,479 +0,0 @@ -import sys -import shutil -import os -import stat -import re -import posixpath -import pkg_resources -import zipfile -import tarfile -from pip.exceptions import InstallationError -from pip.backwardcompat import WindowsError -from pip.locations import site_packages, running_under_virtualenv -from pip.log import logger - -__all__ = ['rmtree', 'display_path', 'backup_dir', - 'find_command', 'ask', 'Inf', - 'normalize_name', 'splitext', - 'format_size', 'is_installable_dir', - 'is_svn_page', 'file_contents', - 'split_leading_dir', 'has_leading_dir', - 'make_path_relative', 'normalize_path', - 'renames', 'get_terminal_size', - 'unzip_file', 'untar_file', 'create_download_cache_folder', - 'cache_download', 'unpack_file'] - - -def rmtree(dir): - shutil.rmtree(dir, ignore_errors=True, - onerror=rmtree_errorhandler) - - -def rmtree_errorhandler(func, path, exc_info): - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" - exctype, value = exc_info[:2] - # lookin for a windows error - if exctype is not WindowsError or 'Access is denied' not in str(value): - raise - # file type should currently be read only - if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD): - raise - # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation - func(path) - - -def display_path(path): - """Gives the display value for a given path, making it relative to cwd - if possible.""" - path = os.path.normcase(os.path.abspath(path)) - if path.startswith(os.getcwd() + os.path.sep): - path = '.' + path[len(os.getcwd()):] - return path - - -def backup_dir(dir, ext='.bak'): - """Figure out the name of a directory to back up the given dir to - (adding .bak, .bak2, etc)""" - n = 1 - extension = ext - while os.path.exists(dir + extension): - n += 1 - extension = ext + str(n) - return dir + extension - - -def find_command(cmd, paths=None, pathext=None): - """Searches the PATH for the given command and returns its path""" - if paths is None: - paths = os.environ.get('PATH', []).split(os.pathsep) - if isinstance(paths, basestring): - paths = [paths] - # check if there are funny path extensions for executables, e.g. Windows - if pathext is None: - pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') - pathext = [ext for ext in pathext.lower().split(os.pathsep)] - # don't use extensions if the command ends with one of them - if os.path.splitext(cmd)[1].lower() in pathext: - pathext = [''] - # check if we find the command on PATH - for path in paths: - # try without extension first - cmd_path = os.path.join(path, cmd) - for ext in pathext: - # then including the extension - cmd_path_ext = cmd_path + ext - if os.path.exists(cmd_path_ext): - return cmd_path_ext - if os.path.exists(cmd_path): - return cmd_path - return None - - -def ask(message, options): - """Ask the message interactively, with the given possible responses""" - while 1: - if os.environ.get('PIP_NO_INPUT'): - raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message) - response = raw_input(message) - response = response.strip().lower() - if response not in options: - print 'Your response (%r) was not one of the expected responses: %s' % ( - response, ', '.join(options)) - else: - return response - - -class _Inf(object): - """I am bigger than everything!""" - def __cmp__(self, a): - if self is a: - return 0 - return 1 - - def __repr__(self): - return 'Inf' - -Inf = _Inf() -del _Inf - - -_normalize_re = re.compile(r'[^a-z]', re.I) - - -def normalize_name(name): - return _normalize_re.sub('-', name.lower()) - - -def format_size(bytes): - if bytes > 1000*1000: - return '%.1fMb' % (bytes/1000.0/1000) - elif bytes > 10*1000: - return '%iKb' % (bytes/1000) - elif bytes > 1000: - return '%.1fKb' % (bytes/1000.0) - else: - return '%ibytes' % bytes - - -def is_installable_dir(path): - """Return True if `path` is a directory containing a setup.py file.""" - if not os.path.isdir(path): - return False - setup_py = os.path.join(path, 'setup.py') - if os.path.isfile(setup_py): - return True - return False - - -def is_svn_page(html): - """Returns true if the page appears to be the index page of an svn repository""" - return (re.search(r'[^<]*Revision \d+:', html) - and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) - - -def file_contents(filename): - fp = open(filename, 'rb') - try: - return fp.read() - finally: - fp.close() - - -def split_leading_dir(path): - path = str(path) - path = path.lstrip('/').lstrip('\\') - if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) - or '\\' not in path): - return path.split('/', 1) - elif '\\' in path: - return path.split('\\', 1) - else: - return path, '' - - -def has_leading_dir(paths): - """Returns true if all the paths have the same leading path name - (i.e., everything is in one subdirectory in an archive)""" - common_prefix = None - for path in paths: - prefix, rest = split_leading_dir(path) - if not prefix: - return False - elif common_prefix is None: - common_prefix = prefix - elif prefix != common_prefix: - return False - return True - - -def make_path_relative(path, rel_to): - """ - Make a filename relative, where the filename path, and it is - relative to rel_to - - >>> make_relative_path('/usr/share/something/a-file.pth', - ... '/usr/share/another-place/src/Directory') - '../../../something/a-file.pth' - >>> make_relative_path('/usr/share/something/a-file.pth', - ... '/home/user/src/Directory') - '../../../usr/share/something/a-file.pth' - >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/') - 'a-file.pth' - """ - path_filename = os.path.basename(path) - path = os.path.dirname(path) - path = os.path.normpath(os.path.abspath(path)) - rel_to = os.path.normpath(os.path.abspath(rel_to)) - path_parts = path.strip(os.path.sep).split(os.path.sep) - rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep) - while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]: - path_parts.pop(0) - rel_to_parts.pop(0) - full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename] - if full_parts == ['']: - return '.' + os.path.sep - return os.path.sep.join(full_parts) - - -def normalize_path(path): - """ - Convert a path to its canonical, case-normalized, absolute version. - - """ - return os.path.normcase(os.path.realpath(path)) - - -def splitext(path): - """Like os.path.splitext, but take off .tar too""" - base, ext = posixpath.splitext(path) - if base.lower().endswith('.tar'): - ext = base[-4:] + ext - base = base[:-4] - return base, ext - - -def renames(old, new): - """Like os.renames(), but handles renaming across devices.""" - # Implementation borrowed from os.renames(). - head, tail = os.path.split(new) - if head and tail and not os.path.exists(head): - os.makedirs(head) - - shutil.move(old, new) - - head, tail = os.path.split(old) - if head and tail: - try: - os.removedirs(head) - except OSError: - pass - - -def is_local(path): - """ - Return True if path is within sys.prefix, if we're running in a virtualenv. - - If we're not in a virtualenv, all paths are considered "local." - - """ - if not running_under_virtualenv(): - return True - return normalize_path(path).startswith(normalize_path(sys.prefix)) - - -def dist_is_local(dist): - """ - Return True if given Distribution object is installed locally - (i.e. within current virtualenv). - - Always True if we're not in a virtualenv. - - """ - return is_local(dist_location(dist)) - - -def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')): - """ - Return a list of installed Distribution objects. - - If ``local_only`` is True (default), only return installations - local to the current virtualenv, if in a virtualenv. - - ``skip`` argument is an iterable of lower-case project names to - ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also - skip virtualenv?] - - """ - if local_only: - local_test = dist_is_local - else: - local_test = lambda d: True - return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip] - - -def egg_link_path(dist): - """ - Return the path where we'd expect to find a .egg-link file for - this distribution. (There doesn't seem to be any metadata in the - Distribution object for a develop egg that points back to its - .egg-link and easy-install.pth files). - - This won't find a globally-installed develop egg if we're in a - virtualenv. - - """ - return os.path.join(site_packages, dist.project_name) + '.egg-link' - - -def dist_location(dist): - """ - Get the site-packages location of this distribution. Generally - this is dist.location, except in the case of develop-installed - packages, where dist.location is the source code location, and we - want to know where the egg-link file is. - - """ - egg_link = egg_link_path(dist) - if os.path.exists(egg_link): - return egg_link - return dist.location - - -def get_terminal_size(): - """Returns a tuple (x, y) representing the width(x) and the height(x) - in characters of the terminal window.""" - def ioctl_GWINSZ(fd): - try: - import fcntl - import termios - import struct - cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, - '1234')) - except: - return None - if cr == (0, 0): - return None - if cr == (0, 0): - return None - return cr - cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) - if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) - os.close(fd) - except: - pass - if not cr: - cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) - return int(cr[1]), int(cr[0]) - - -def unzip_file(filename, location, flatten=True): - """Unzip the file (zip file located at filename) to the destination - location""" - if not os.path.exists(location): - os.makedirs(location) - zipfp = open(filename, 'rb') - try: - zip = zipfile.ZipFile(zipfp) - leading = has_leading_dir(zip.namelist()) and flatten - for name in zip.namelist(): - data = zip.read(name) - fn = name - if leading: - fn = split_leading_dir(name)[1] - fn = os.path.join(location, fn) - dir = os.path.dirname(fn) - if not os.path.exists(dir): - os.makedirs(dir) - if fn.endswith('/') or fn.endswith('\\'): - # A directory - if not os.path.exists(fn): - os.makedirs(fn) - else: - fp = open(fn, 'wb') - try: - fp.write(data) - finally: - fp.close() - finally: - zipfp.close() - - -def untar_file(filename, location): - """Untar the file (tar file located at filename) to the destination location""" - if not os.path.exists(location): - os.makedirs(location) - if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): - mode = 'r:gz' - elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'): - mode = 'r:bz2' - elif filename.lower().endswith('.tar'): - mode = 'r' - else: - logger.warn('Cannot determine compression type for file %s' % filename) - mode = 'r:*' - tar = tarfile.open(filename, mode) - try: - # note: python<=2.5 doesnt seem to know about pax headers, filter them - leading = has_leading_dir([ - member.name for member in tar.getmembers() - if member.name != 'pax_global_header' - ]) - for member in tar.getmembers(): - fn = member.name - if fn == 'pax_global_header': - continue - if leading: - fn = split_leading_dir(fn)[1] - path = os.path.join(location, fn) - if member.isdir(): - if not os.path.exists(path): - os.makedirs(path) - else: - try: - fp = tar.extractfile(member) - except (KeyError, AttributeError), e: - # Some corrupt tar files seem to produce this - # (specifically bad symlinks) - logger.warn( - 'In the tar file %s the member %s is invalid: %s' - % (filename, member.name, e)) - continue - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) - destfp = open(path, 'wb') - try: - shutil.copyfileobj(fp, destfp) - finally: - destfp.close() - fp.close() - finally: - tar.close() - - -def create_download_cache_folder(folder): - logger.indent -= 2 - logger.notify('Creating supposed download cache at %s' % folder) - logger.indent += 2 - os.makedirs(folder) - - -def cache_download(target_file, temp_location, content_type): - logger.notify('Storing download in cache at %s' % display_path(target_file)) - shutil.copyfile(temp_location, target_file) - fp = open(target_file+'.content-type', 'w') - fp.write(content_type) - fp.close() - os.unlink(temp_location) - - -def unpack_file(filename, location, content_type, link): - if (content_type == 'application/zip' - or filename.endswith('.zip') - or filename.endswith('.pybundle') - or zipfile.is_zipfile(filename)): - unzip_file(filename, location, flatten=not filename.endswith('.pybundle')) - elif (content_type == 'application/x-gzip' - or tarfile.is_tarfile(filename) - or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')): - untar_file(filename, location) - elif (content_type and content_type.startswith('text/html') - and is_svn_page(file_contents(filename))): - # We don't really care about this - from pip.vcs.subversion import Subversion - Subversion('svn+' + link.url).unpack(location) - else: - ## FIXME: handle? - ## FIXME: magic signatures? - logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format' - % (filename, location, content_type)) - raise InstallationError('Cannot determine archive format of %s' % location) - - - diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py deleted file mode 100755 index e110440c..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py +++ /dev/null @@ -1,238 +0,0 @@ -"""Handles all VCS (version control) support""" - -import os -import shutil -import urlparse -import urllib - -from pip.exceptions import BadCommand -from pip.log import logger -from pip.util import display_path, backup_dir, find_command, ask - - -__all__ = ['vcs', 'get_src_requirement', 'import_vcs_support'] - - -class VcsSupport(object): - _registry = {} - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp'] - - def __init__(self): - # Register more schemes with urlparse for various version control systems - urlparse.uses_netloc.extend(self.schemes) - urlparse.uses_fragment.extend(self.schemes) - super(VcsSupport, self).__init__() - - def __iter__(self): - return self._registry.__iter__() - - @property - def backends(self): - return self._registry.values() - - @property - def dirnames(self): - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self): - schemes = [] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls): - if not hasattr(cls, 'name'): - logger.warn('Cannot register VCS %s' % cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls - - def unregister(self, cls=None, name=None): - if name in self._registry: - del self._registry[name] - elif cls in self._registry.values(): - del self._registry[cls.name] - else: - logger.warn('Cannot unregister because no class or name given') - - def get_backend_name(self, location): - """ - Return the name of the version control backend if found at given - location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') - """ - for vc_type in self._registry.values(): - path = os.path.join(location, vc_type.dirname) - if os.path.exists(path): - return vc_type.name - return None - - def get_backend(self, name): - name = name.lower() - if name in self._registry: - return self._registry[name] - - def get_backend_from_location(self, location): - vc_type = self.get_backend_name(location) - if vc_type: - return self.get_backend(vc_type) - return None - - -vcs = VcsSupport() - - -class VersionControl(object): - name = '' - dirname = '' - - def __init__(self, url=None, *args, **kwargs): - self.url = url - self._cmd = None - super(VersionControl, self).__init__(*args, **kwargs) - - def _filter(self, line): - return (logger.INFO, line) - - def _is_local_repository(self, repo): - """ - posix absolute paths start with os.path.sep, - win32 ones ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or drive - - @property - def cmd(self): - if self._cmd is not None: - return self._cmd - command = find_command(self.name) - if command is None: - raise BadCommand('Cannot find command %r' % self.name) - logger.info('Found command %r at %r' % (self.name, command)) - self._cmd = command - return command - - def get_url_rev(self): - """ - Returns the correct repository URL and revision by parsing the given - repository URL - """ - url = self.url.split('+', 1)[1] - scheme, netloc, path, query, frag = urlparse.urlsplit(url) - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urlparse.urlunsplit((scheme, netloc, path, query, '')) - return url, rev - - def get_info(self, location): - """ - Returns (url, revision), where both are strings - """ - assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location - return self.get_url(location), self.get_revision(location) - - def normalize_url(self, url): - """ - Normalize a URL for comparison by unquoting it and removing any trailing slash. - """ - return urllib.unquote(url).rstrip('/') - - def compare_urls(self, url1, url2): - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return (self.normalize_url(url1) == self.normalize_url(url2)) - - def parse_vcs_bundle_file(self, content): - """ - Takes the contents of the bundled text file that explains how to revert - the stripped off version control data of the given package and returns - the URL and revision of it. - """ - raise NotImplementedError - - def obtain(self, dest): - """ - Called when installing or updating an editable package, takes the - source path of the checkout. - """ - raise NotImplementedError - - def switch(self, dest, url, rev_options): - """ - Switch the repo at ``dest`` to point to ``URL``. - """ - raise NotImplemented - - def update(self, dest, rev_options): - """ - Update an already-existing repo to the given ``rev_options``. - """ - raise NotImplementedError - - def check_destination(self, dest, url, rev_options, rev_display): - """ - Prepare a location to receive a checkout/clone. - - Return True if the location is ready for (and requires) a - checkout/clone, False otherwise. - """ - checkout = True - prompt = False - if os.path.exists(dest): - checkout = False - if os.path.exists(os.path.join(dest, self.dirname)): - existing_url = self.get_url(dest) - if self.compare_urls(existing_url, url): - logger.info('%s in %s exists, and has correct URL (%s)' - % (self.repo_name.title(), display_path(dest), url)) - logger.notify('Updating %s %s%s' - % (display_path(dest), self.repo_name, rev_display)) - self.update(dest, rev_options) - else: - logger.warn('%s %s in %s exists with URL %s' - % (self.name, self.repo_name, display_path(dest), existing_url)) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) - else: - logger.warn('Directory %s already exists, and is not a %s %s.' - % (dest, self.name, self.repo_name)) - prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) - if prompt: - logger.warn('The plan is to install the %s repository %s' - % (self.name, url)) - response = ask('What to do? %s' % prompt[0], prompt[1]) - - if response == 's': - logger.notify('Switching %s %s to %s%s' - % (self.repo_name, display_path(dest), url, rev_display)) - self.switch(dest, url, rev_options) - elif response == 'i': - # do nothing - pass - elif response == 'w': - logger.warn('Deleting %s' % display_path(dest)) - shutil.rmtree(dest) - checkout = True - elif response == 'b': - dest_dir = backup_dir(dest) - logger.warn('Backing up %s to %s' - % (display_path(dest), dest_dir)) - shutil.move(dest, dest_dir) - checkout = True - return checkout - - def unpack(self, location): - raise NotImplementedError - - def get_src_requirement(self, dist, location, find_tags=False): - raise NotImplementedError - - -def get_src_requirement(dist, location, find_tags): - version_control = vcs.get_backend_from_location(location) - if version_control: - return version_control().get_src_requirement(dist, location, find_tags) - logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location) - return dist.as_requirement() diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py deleted file mode 100755 index 3b6ea8f0..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py +++ /dev/null @@ -1,138 +0,0 @@ -import os -import shutil -import tempfile -import re -from pip import call_subprocess -from pip.log import logger -from pip.util import rmtree, display_path -from pip.vcs import vcs, VersionControl -from pip.download import path_to_url2 - - -class Bazaar(VersionControl): - name = 'bzr' - dirname = '.bzr' - repo_name = 'branch' - bundle_file = 'bzr-branch.txt' - schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp') - guide = ('# This was a Bazaar branch; to make it a branch again run:\n' - 'bzr branch -r %(rev)s %(url)s .\n') - - def parse_vcs_bundle_file(self, content): - url = rev = None - for line in content.splitlines(): - if not line.strip() or line.strip().startswith('#'): - continue - match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line) - if match: - rev = match.group(1).strip() - url = line[match.end():].strip().split(None, 1)[0] - if url and rev: - return url, rev - return None, None - - def unpack(self, location): - """Get the bzr branch at the url to the destination location""" - url, rev = self.get_url_rev() - logger.notify('Checking out bzr repository %s to %s' % (url, location)) - logger.indent += 2 - try: - if os.path.exists(location): - os.rmdir(location) - call_subprocess( - [self.cmd, 'branch', url, location], - filter_stdout=self._filter, show_stdout=False) - finally: - logger.indent -= 2 - - def export(self, location): - """Export the Bazaar repository at the url to the destination location""" - temp_dir = tempfile.mkdtemp('-export', 'pip-') - self.unpack(temp_dir) - if os.path.exists(location): - # Remove the location to make sure Bazaar can export it correctly - rmtree(location) - try: - call_subprocess([self.cmd, 'export', location], cwd=temp_dir, - filter_stdout=self._filter, show_stdout=False) - finally: - shutil.rmtree(temp_dir) - - def switch(self, dest, url, rev_options): - call_subprocess([self.cmd, 'switch', url], cwd=dest) - - def update(self, dest, rev_options): - call_subprocess( - [self.cmd, 'pull', '-q'] + rev_options, cwd=dest) - - def obtain(self, dest): - url, rev = self.get_url_rev() - if rev: - rev_options = ['-r', rev] - rev_display = ' (to revision %s)' % rev - else: - rev_options = [] - rev_display = '' - if self.check_destination(dest, url, rev_options, rev_display): - logger.notify('Checking out %s%s to %s' - % (url, rev_display, display_path(dest))) - call_subprocess( - [self.cmd, 'branch', '-q'] + rev_options + [url, dest]) - - def get_url_rev(self): - # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it - url, rev = super(Bazaar, self).get_url_rev() - if url.startswith('ssh://'): - url = 'bzr+' + url - return url, rev - - def get_url(self, location): - urls = call_subprocess( - [self.cmd, 'info'], show_stdout=False, cwd=location) - for line in urls.splitlines(): - line = line.strip() - for x in ('checkout of branch: ', - 'parent branch: '): - if line.startswith(x): - repo = line.split(x)[1] - if self._is_local_repository(repo): - return path_to_url2(repo) - return repo - return None - - def get_revision(self, location): - revision = call_subprocess( - [self.cmd, 'revno'], show_stdout=False, cwd=location) - return revision.splitlines()[-1] - - def get_tag_revs(self, location): - tags = call_subprocess( - [self.cmd, 'tags'], show_stdout=False, cwd=location) - tag_revs = [] - for line in tags.splitlines(): - tags_match = re.search(r'([.\w-]+)\s*(.*)$', line) - if tags_match: - tag = tags_match.group(1) - rev = tags_match.group(2) - tag_revs.append((rev.strip(), tag.strip())) - return dict(tag_revs) - - def get_src_requirement(self, dist, location, find_tags): - repo = self.get_url(location) - if not repo.lower().startswith('bzr:'): - repo = 'bzr+' + repo - egg_project_name = dist.egg_name().split('-', 1)[0] - if not repo: - return None - current_rev = self.get_revision(location) - tag_revs = self.get_tag_revs(location) - - if current_rev in tag_revs: - # It's a tag - full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) - else: - full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev) - return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) - - -vcs.register(Bazaar) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py deleted file mode 100755 index 0701e49e..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py +++ /dev/null @@ -1,204 +0,0 @@ -import os -import shutil -import tempfile -import re -from pip import call_subprocess -from pip.util import display_path -from pip.vcs import vcs, VersionControl -from pip.log import logger -from urllib import url2pathname -from urlparse import urlsplit, urlunsplit - - -class Git(VersionControl): - name = 'git' - dirname = '.git' - repo_name = 'clone' - schemes = ('git', 'git+http', 'git+ssh', 'git+git', 'git+file') - bundle_file = 'git-clone.txt' - guide = ('# This was a Git repo; to make it a repo again run:\n' - 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n') - - def __init__(self, url=None, *args, **kwargs): - - # Works around an apparent Git bug - # (see http://article.gmane.org/gmane.comp.version-control.git/146500) - if url: - scheme, netloc, path, query, fragment = urlsplit(url) - if scheme.endswith('file'): - initial_slashes = path[:-len(path.lstrip('/'))] - newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/') - url = urlunsplit((scheme, netloc, newpath, query, fragment)) - after_plus = scheme.find('+')+1 - url = scheme[:after_plus]+ urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment)) - - super(Git, self).__init__(url, *args, **kwargs) - - def parse_vcs_bundle_file(self, content): - url = rev = None - for line in content.splitlines(): - if not line.strip() or line.strip().startswith('#'): - continue - url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line) - if url_match: - url = url_match.group(1).strip() - rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line) - if rev_match: - rev = rev_match.group(1).strip() - if url and rev: - return url, rev - return None, None - - def unpack(self, location): - """Clone the Git repository at the url to the destination location""" - url, rev = self.get_url_rev() - logger.notify('Cloning Git repository %s to %s' % (url, location)) - logger.indent += 2 - try: - if os.path.exists(location): - os.rmdir(location) - call_subprocess( - [self.cmd, 'clone', url, location], - filter_stdout=self._filter, show_stdout=False) - finally: - logger.indent -= 2 - - def export(self, location): - """Export the Git repository at the url to the destination location""" - temp_dir = tempfile.mkdtemp('-export', 'pip-') - self.unpack(temp_dir) - try: - if not location.endswith('/'): - location = location + '/' - call_subprocess( - [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location], - filter_stdout=self._filter, show_stdout=False, cwd=temp_dir) - finally: - shutil.rmtree(temp_dir) - - def check_rev_options(self, rev, dest, rev_options): - """Check the revision options before checkout to compensate that tags - and branches may need origin/ as a prefix. - Returns the SHA1 of the branch or tag if found. - """ - revisions = self.get_tag_revs(dest) - revisions.update(self.get_branch_revs(dest)) - inverse_revisions = dict((v, k) for k, v in revisions.iteritems()) - # Check if rev is a branch name - origin_rev = 'origin/%s' % rev - if origin_rev in inverse_revisions: - return [inverse_revisions[origin_rev]] - elif rev in inverse_revisions: - return [inverse_revisions[rev]] - else: - logger.warn("Could not find a tag or branch '%s', assuming commit." % rev) - return rev_options - - def switch(self, dest, url, rev_options): - call_subprocess( - [self.cmd, 'config', 'remote.origin.url', url], cwd=dest) - call_subprocess( - [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) - - def update(self, dest, rev_options): - call_subprocess([self.cmd, 'pull', '-q'], cwd=dest) - call_subprocess( - [self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest) - - def obtain(self, dest): - url, rev = self.get_url_rev() - if rev: - rev_options = [rev] - rev_display = ' (to %s)' % rev - else: - rev_options = ['master'] - rev_display = '' - if self.check_destination(dest, url, rev_options, rev_display): - logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) - call_subprocess([self.cmd, 'clone', '-q', url, dest]) - if rev: - rev_options = self.check_rev_options(rev, dest, rev_options) - # Only do a checkout if rev_options differs from HEAD - if not self.get_revision(dest).startswith(rev_options[0]): - call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) - - def get_url(self, location): - url = call_subprocess( - [self.cmd, 'config', 'remote.origin.url'], - show_stdout=False, cwd=location) - return url.strip() - - def get_revision(self, location): - current_rev = call_subprocess( - [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location) - return current_rev.strip() - - def get_tag_revs(self, location): - tags = call_subprocess( - [self.cmd, 'tag', '-l'], - show_stdout=False, raise_on_returncode=False, cwd=location) - tag_revs = [] - for line in tags.splitlines(): - tag = line.strip() - rev = call_subprocess( - [self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location) - tag_revs.append((rev.strip(), tag)) - tag_revs = dict(tag_revs) - return tag_revs - - def get_branch_revs(self, location): - branches = call_subprocess( - [self.cmd, 'branch', '-r'], show_stdout=False, cwd=location) - branch_revs = [] - for line in branches.splitlines(): - line = line.split('->')[0].strip() - branch = "".join([b for b in line.split() if b != '*']) - rev = call_subprocess( - [self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location) - branch_revs.append((rev.strip(), branch)) - branch_revs = dict(branch_revs) - return branch_revs - - def get_src_requirement(self, dist, location, find_tags): - repo = self.get_url(location) - if not repo.lower().startswith('git:'): - repo = 'git+' + repo - egg_project_name = dist.egg_name().split('-', 1)[0] - if not repo: - return None - current_rev = self.get_revision(location) - tag_revs = self.get_tag_revs(location) - branch_revs = self.get_branch_revs(location) - - if current_rev in tag_revs: - # It's a tag - full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) - elif (current_rev in branch_revs and - branch_revs[current_rev] != 'origin/master'): - # It's the head of a branch - full_egg_name = '%s-%s' % (dist.egg_name(), - branch_revs[current_rev].replace('origin/', '')) - else: - full_egg_name = '%s-dev' % dist.egg_name() - - return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) - - def get_url_rev(self): - """ - Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. - That's required because although they use SSH they sometimes doesn't - work with a ssh:// scheme (e.g. Github). But we need a scheme for - parsing. Hence we remove it again afterwards and return it as a stub. - """ - if not '://' in self.url: - assert not 'file:' in self.url - self.url = self.url.replace('git+', 'git+ssh://') - url, rev = super(Git, self).get_url_rev() - url = url.replace('ssh://', '') - else: - url, rev = super(Git, self).get_url_rev() - - return url, rev - - -vcs.register(Git) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py deleted file mode 100755 index 70c8c833..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py +++ /dev/null @@ -1,162 +0,0 @@ -import os -import shutil -import tempfile -import re -import ConfigParser -from pip import call_subprocess -from pip.util import display_path -from pip.log import logger -from pip.vcs import vcs, VersionControl -from pip.download import path_to_url2 - - -class Mercurial(VersionControl): - name = 'hg' - dirname = '.hg' - repo_name = 'clone' - schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') - bundle_file = 'hg-clone.txt' - guide = ('# This was a Mercurial repo; to make it a repo again run:\n' - 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n') - - def parse_vcs_bundle_file(self, content): - url = rev = None - for line in content.splitlines(): - if not line.strip() or line.strip().startswith('#'): - continue - url_match = re.search(r'hg\s*pull\s*(.*)\s*', line) - if url_match: - url = url_match.group(1).strip() - rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line) - if rev_match: - rev = rev_match.group(1).strip() - if url and rev: - return url, rev - return None, None - - def unpack(self, location): - """Clone the Hg repository at the url to the destination location""" - url, rev = self.get_url_rev() - logger.notify('Cloning Mercurial repository %s to %s' % (url, location)) - logger.indent += 2 - try: - if os.path.exists(location): - os.rmdir(location) - call_subprocess( - [self.cmd, 'clone', url, location], - filter_stdout=self._filter, show_stdout=False) - finally: - logger.indent -= 2 - - def export(self, location): - """Export the Hg repository at the url to the destination location""" - temp_dir = tempfile.mkdtemp('-export', 'pip-') - self.unpack(temp_dir) - try: - call_subprocess( - [self.cmd, 'archive', location], - filter_stdout=self._filter, show_stdout=False, cwd=temp_dir) - finally: - shutil.rmtree(temp_dir) - - def switch(self, dest, url, rev_options): - repo_config = os.path.join(dest, self.dirname, 'hgrc') - config = ConfigParser.SafeConfigParser() - try: - config.read(repo_config) - config.set('paths', 'default', url) - config_file = open(repo_config, 'w') - config.write(config_file) - config_file.close() - except (OSError, ConfigParser.NoSectionError), e: - logger.warn( - 'Could not switch Mercurial repository to %s: %s' - % (url, e)) - else: - call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest) - - def update(self, dest, rev_options): - call_subprocess([self.cmd, 'pull', '-q'], cwd=dest) - call_subprocess( - [self.cmd, 'update', '-q'] + rev_options, cwd=dest) - - def obtain(self, dest): - url, rev = self.get_url_rev() - if rev: - rev_options = [rev] - rev_display = ' (to revision %s)' % rev - else: - rev_options = [] - rev_display = '' - if self.check_destination(dest, url, rev_options, rev_display): - logger.notify('Cloning hg %s%s to %s' - % (url, rev_display, display_path(dest))) - call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest]) - call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest) - - def get_url(self, location): - url = call_subprocess( - [self.cmd, 'showconfig', 'paths.default'], - show_stdout=False, cwd=location).strip() - if self._is_local_repository(url): - url = path_to_url2(url) - return url.strip() - - def get_tag_revs(self, location): - tags = call_subprocess( - [self.cmd, 'tags'], show_stdout=False, cwd=location) - tag_revs = [] - for line in tags.splitlines(): - tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line) - if tags_match: - tag = tags_match.group(1) - rev = tags_match.group(2) - tag_revs.append((rev.strip(), tag.strip())) - return dict(tag_revs) - - def get_branch_revs(self, location): - branches = call_subprocess( - [self.cmd, 'branches'], show_stdout=False, cwd=location) - branch_revs = [] - for line in branches.splitlines(): - branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line) - if branches_match: - branch = branches_match.group(1) - rev = branches_match.group(2) - branch_revs.append((rev.strip(), branch.strip())) - return dict(branch_revs) - - def get_revision(self, location): - current_revision = call_subprocess( - [self.cmd, 'parents', '--template={rev}'], - show_stdout=False, cwd=location).strip() - return current_revision - - def get_revision_hash(self, location): - current_rev_hash = call_subprocess( - [self.cmd, 'parents', '--template={node}'], - show_stdout=False, cwd=location).strip() - return current_rev_hash - - def get_src_requirement(self, dist, location, find_tags): - repo = self.get_url(location) - if not repo.lower().startswith('hg:'): - repo = 'hg+' + repo - egg_project_name = dist.egg_name().split('-', 1)[0] - if not repo: - return None - current_rev = self.get_revision(location) - current_rev_hash = self.get_revision_hash(location) - tag_revs = self.get_tag_revs(location) - branch_revs = self.get_branch_revs(location) - if current_rev in tag_revs: - # It's a tag - full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev]) - elif current_rev in branch_revs: - # It's the tip of a branch - full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev]) - else: - full_egg_name = '%s-dev' % dist.egg_name() - return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name) - -vcs.register(Mercurial) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py deleted file mode 100755 index 85715d97..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py +++ /dev/null @@ -1,260 +0,0 @@ -import os -import re -from pip import call_subprocess -from pip.index import Link -from pip.util import rmtree, display_path -from pip.log import logger -from pip.vcs import vcs, VersionControl - -_svn_xml_url_re = re.compile('url="([^"]+)"') -_svn_rev_re = re.compile('committed-rev="(\d+)"') -_svn_url_re = re.compile(r'URL: (.+)') -_svn_revision_re = re.compile(r'Revision: (.+)') - - -class Subversion(VersionControl): - name = 'svn' - dirname = '.svn' - repo_name = 'checkout' - schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https') - bundle_file = 'svn-checkout.txt' - guide = ('# This was an svn checkout; to make it a checkout again run:\n' - 'svn checkout --force -r %(rev)s %(url)s .\n') - - def get_info(self, location): - """Returns (url, revision), where both are strings""" - assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location - output = call_subprocess( - [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'}) - match = _svn_url_re.search(output) - if not match: - logger.warn('Cannot determine URL of svn checkout %s' % display_path(location)) - logger.info('Output that cannot be parsed: \n%s' % output) - return None, None - url = match.group(1).strip() - match = _svn_revision_re.search(output) - if not match: - logger.warn('Cannot determine revision of svn checkout %s' % display_path(location)) - logger.info('Output that cannot be parsed: \n%s' % output) - return url, None - return url, match.group(1) - - def parse_vcs_bundle_file(self, content): - for line in content.splitlines(): - if not line.strip() or line.strip().startswith('#'): - continue - match = re.search(r'^-r\s*([^ ])?', line) - if not match: - return None, None - rev = match.group(1) - rest = line[match.end():].strip().split(None, 1)[0] - return rest, rev - return None, None - - def unpack(self, location): - """Check out the svn repository at the url to the destination location""" - url, rev = self.get_url_rev() - logger.notify('Checking out svn repository %s to %s' % (url, location)) - logger.indent += 2 - try: - if os.path.exists(location): - # Subversion doesn't like to check out over an existing directory - # --force fixes this, but was only added in svn 1.5 - rmtree(location) - call_subprocess( - [self.cmd, 'checkout', url, location], - filter_stdout=self._filter, show_stdout=False) - finally: - logger.indent -= 2 - - def export(self, location): - """Export the svn repository at the url to the destination location""" - url, rev = self.get_url_rev() - logger.notify('Exporting svn repository %s to %s' % (url, location)) - logger.indent += 2 - try: - if os.path.exists(location): - # Subversion doesn't like to check out over an existing directory - # --force fixes this, but was only added in svn 1.5 - rmtree(location) - call_subprocess( - [self.cmd, 'export', url, location], - filter_stdout=self._filter, show_stdout=False) - finally: - logger.indent -= 2 - - def switch(self, dest, url, rev_options): - call_subprocess( - [self.cmd, 'switch'] + rev_options + [url, dest]) - - def update(self, dest, rev_options): - call_subprocess( - [self.cmd, 'update'] + rev_options + [dest]) - - def obtain(self, dest): - url, rev = self.get_url_rev() - if rev: - rev_options = ['-r', rev] - rev_display = ' (to revision %s)' % rev - else: - rev_options = [] - rev_display = '' - if self.check_destination(dest, url, rev_options, rev_display): - logger.notify('Checking out %s%s to %s' - % (url, rev_display, display_path(dest))) - call_subprocess( - [self.cmd, 'checkout', '-q'] + rev_options + [url, dest]) - - def get_location(self, dist, dependency_links): - for url in dependency_links: - egg_fragment = Link(url).egg_fragment - if not egg_fragment: - continue - if '-' in egg_fragment: - ## FIXME: will this work when a package has - in the name? - key = '-'.join(egg_fragment.split('-')[:-1]).lower() - else: - key = egg_fragment - if key == dist.key: - return url.split('#', 1)[0] - return None - - def get_revision(self, location): - """ - Return the maximum revision for all files under a given location - """ - # Note: taken from setuptools.command.egg_info - revision = 0 - - for base, dirs, files in os.walk(location): - if self.dirname not in dirs: - dirs[:] = [] - continue # no sense walking uncontrolled subdirs - dirs.remove(self.dirname) - entries_fn = os.path.join(base, self.dirname, 'entries') - if not os.path.exists(entries_fn): - ## FIXME: should we warn? - continue - f = open(entries_fn) - data = f.read() - f.close() - - if data.startswith('8') or data.startswith('9') or data.startswith('10'): - data = map(str.splitlines, data.split('\n\x0c\n')) - del data[0][0] # get rid of the '8' - dirurl = data[0][3] - revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0] - if revs: - localrev = max(revs) - else: - localrev = 0 - elif data.startswith('<?xml'): - dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL - revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0] - if revs: - localrev = max(revs) - else: - localrev = 0 - else: - logger.warn("Unrecognized .svn/entries format; skipping %s", base) - dirs[:] = [] - continue - if base == location: - base_url = dirurl+'/' # save the root url - elif not dirurl.startswith(base_url): - dirs[:] = [] - continue # not part of the same svn tree, skip it - revision = max(revision, localrev) - return revision - - def get_url_rev(self): - # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it - url, rev = super(Subversion, self).get_url_rev() - if url.startswith('ssh://'): - url = 'svn+' + url - return url, rev - - def get_url(self, location): - # In cases where the source is in a subdirectory, not alongside setup.py - # we have to look up in the location until we find a real setup.py - orig_location = location - while not os.path.exists(os.path.join(location, 'setup.py')): - last_location = location - location = os.path.dirname(location) - if location == last_location: - # We've traversed up to the root of the filesystem without finding setup.py - logger.warn("Could not find setup.py for directory %s (tried all parent directories)" - % orig_location) - return None - f = open(os.path.join(location, self.dirname, 'entries')) - data = f.read() - f.close() - if data.startswith('8') or data.startswith('9') or data.startswith('10'): - data = map(str.splitlines, data.split('\n\x0c\n')) - del data[0][0] # get rid of the '8' - return data[0][3] - elif data.startswith('<?xml'): - match = _svn_xml_url_re.search(data) - if not match: - raise ValueError('Badly formatted data: %r' % data) - return match.group(1) # get repository URL - else: - logger.warn("Unrecognized .svn/entries format in %s" % location) - # Or raise exception? - return None - - def get_tag_revs(self, svn_tag_url): - stdout = call_subprocess( - [self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False) - results = [] - for line in stdout.splitlines(): - parts = line.split() - rev = int(parts[0]) - tag = parts[-1].strip('/') - results.append((tag, rev)) - return results - - def find_tag_match(self, rev, tag_revs): - best_match_rev = None - best_tag = None - for tag, tag_rev in tag_revs: - if (tag_rev > rev and - (best_match_rev is None or best_match_rev > tag_rev)): - # FIXME: Is best_match > tag_rev really possible? - # or is it a sign something is wacky? - best_match_rev = tag_rev - best_tag = tag - return best_tag - - def get_src_requirement(self, dist, location, find_tags=False): - repo = self.get_url(location) - if repo is None: - return None - parts = repo.split('/') - ## FIXME: why not project name? - egg_project_name = dist.egg_name().split('-', 1)[0] - rev = self.get_revision(location) - if parts[-2] in ('tags', 'tag'): - # It's a tag, perfect! - full_egg_name = '%s-%s' % (egg_project_name, parts[-1]) - elif parts[-2] in ('branches', 'branch'): - # It's a branch :( - full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev) - elif parts[-1] == 'trunk': - # Trunk :-/ - full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev) - if find_tags: - tag_url = '/'.join(parts[:-1]) + '/tags' - tag_revs = self.get_tag_revs(tag_url) - match = self.find_tag_match(rev, tag_revs) - if match: - logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match) - repo = '%s/%s' % (tag_url, match) - full_egg_name = '%s-%s' % (egg_project_name, match) - else: - # Don't know what it is - logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo) - full_egg_name = '%s-dev_r%s' % (egg_project_name, rev) - return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name) - -vcs.register(Subversion) diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py deleted file mode 100755 index 708abb05..00000000 --- a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Tools for working with virtualenv environments""" - -import os -import sys -import subprocess -from pip.exceptions import BadCommand -from pip.log import logger - - -def restart_in_venv(venv, base, site_packages, args): - """ - Restart this script using the interpreter in the given virtual environment - """ - if base and not os.path.isabs(venv) and not venv.startswith('~'): - base = os.path.expanduser(base) - # ensure we have an abs basepath at this point: - # a relative one makes no sense (or does it?) - if os.path.isabs(base): - venv = os.path.join(base, venv) - - if venv.startswith('~'): - venv = os.path.expanduser(venv) - - if not os.path.exists(venv): - try: - import virtualenv - except ImportError: - print 'The virtual environment does not exist: %s' % venv - print 'and virtualenv is not installed, so a new environment cannot be created' - sys.exit(3) - print 'Creating new virtualenv environment in %s' % venv - virtualenv.logger = logger - logger.indent += 2 - virtualenv.create_environment(venv, site_packages=site_packages) - if sys.platform == 'win32': - python = os.path.join(venv, 'Scripts', 'python.exe') - # check for bin directory which is used in buildouts - if not os.path.exists(python): - python = os.path.join(venv, 'bin', 'python.exe') - else: - python = os.path.join(venv, 'bin', 'python') - if not os.path.exists(python): - python = venv - if not os.path.exists(python): - raise BadCommand('Cannot find virtual environment interpreter at %s' % python) - base = os.path.dirname(os.path.dirname(python)) - file = os.path.join(os.path.dirname(__file__), 'runner.py') - if file.endswith('.pyc'): - file = file[:-1] - proc = subprocess.Popen( - [python, file] + args + [base, '___VENV_RESTART___']) - proc.wait() - sys.exit(proc.returncode) diff --git a/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info b/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info deleted file mode 100644 index 0e358148..00000000 --- a/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info +++ /dev/null @@ -1,9 +0,0 @@ -Metadata-Version: 1.0 -Name: setuptools -Version: 0.6c11 -Summary: xxxx -Home-page: xxx -Author: xxx -Author-email: xxx -License: xxx -Description: xxx diff --git a/lib/python2.7/site-packages/setuptools.pth b/lib/python2.7/site-packages/setuptools.pth deleted file mode 100644 index 0f744252..00000000 --- a/lib/python2.7/site-packages/setuptools.pth +++ /dev/null @@ -1 +0,0 @@ -./distribute-0.6.14-py2.7.egg diff --git a/lib/python2.7/site.py b/lib/python2.7/site.py deleted file mode 100644 index a49cfc34..00000000 --- a/lib/python2.7/site.py +++ /dev/null @@ -1,713 +0,0 @@ -"""Append module search paths for third-party packages to sys.path. - -**************************************************************** -* This module is automatically imported during initialization. * -**************************************************************** - -In earlier versions of Python (up to 1.5a3), scripts or modules that -needed to use site-specific modules would place ``import site'' -somewhere near the top of their code. Because of the automatic -import, this is no longer necessary (but code that does it still -works). - -This will append site-specific paths to the module search path. On -Unix, it starts with sys.prefix and sys.exec_prefix (if different) and -appends lib/python<version>/site-packages as well as lib/site-python. -It also supports the Debian convention of -lib/python<version>/dist-packages. On other platforms (mainly Mac and -Windows), it uses just sys.prefix (and sys.exec_prefix, if different, -but this is unlikely). The resulting directories, if they exist, are -appended to sys.path, and also inspected for path configuration files. - -FOR DEBIAN, this sys.path is augmented with directories in /usr/local. -Local addons go into /usr/local/lib/python<version>/site-packages -(resp. /usr/local/lib/site-python), Debian addons install into -/usr/{lib,share}/python<version>/dist-packages. - -A path configuration file is a file whose name has the form -<package>.pth; its contents are additional directories (one per line) -to be added to sys.path. Non-existing directories (or -non-directories) are never added to sys.path; no directory is added to -sys.path more than once. Blank lines and lines beginning with -'#' are skipped. Lines starting with 'import' are executed. - -For example, suppose sys.prefix and sys.exec_prefix are set to -/usr/local and there is a directory /usr/local/lib/python2.X/site-packages -with three subdirectories, foo, bar and spam, and two path -configuration files, foo.pth and bar.pth. Assume foo.pth contains the -following: - - # foo package configuration - foo - bar - bletch - -and bar.pth contains: - - # bar package configuration - bar - -Then the following directories are added to sys.path, in this order: - - /usr/local/lib/python2.X/site-packages/bar - /usr/local/lib/python2.X/site-packages/foo - -Note that bletch is omitted because it doesn't exist; bar precedes foo -because bar.pth comes alphabetically before foo.pth; and spam is -omitted because it is not mentioned in either path configuration file. - -After these path manipulations, an attempt is made to import a module -named sitecustomize, which can perform arbitrary additional -site-specific customizations. If this import fails with an -ImportError exception, it is silently ignored. - -""" - -import sys -import os -import __builtin__ -try: - set -except NameError: - from sets import Set as set - -# Prefixes for site-packages; add additional prefixes like /usr/local here -PREFIXES = [sys.prefix, sys.exec_prefix] -# Enable per user site-packages directory -# set it to False to disable the feature or True to force the feature -ENABLE_USER_SITE = None -# for distutils.commands.install -USER_SITE = None -USER_BASE = None - -_is_pypy = hasattr(sys, 'pypy_version_info') -_is_jython = sys.platform[:4] == 'java' -if _is_jython: - ModuleType = type(os) - -def makepath(*paths): - dir = os.path.join(*paths) - if _is_jython and (dir == '__classpath__' or - dir.startswith('__pyclasspath__')): - return dir, dir - dir = os.path.abspath(dir) - return dir, os.path.normcase(dir) - -def abs__file__(): - """Set all module' __file__ attribute to an absolute path""" - for m in sys.modules.values(): - if ((_is_jython and not isinstance(m, ModuleType)) or - hasattr(m, '__loader__')): - # only modules need the abspath in Jython. and don't mess - # with a PEP 302-supplied __file__ - continue - f = getattr(m, '__file__', None) - if f is None: - continue - m.__file__ = os.path.abspath(f) - -def removeduppaths(): - """ Remove duplicate entries from sys.path along with making them - absolute""" - # This ensures that the initial path provided by the interpreter contains - # only absolute pathnames, even if we're running from the build directory. - L = [] - known_paths = set() - for dir in sys.path: - # Filter out duplicate paths (on case-insensitive file systems also - # if they only differ in case); turn relative paths into absolute - # paths. - dir, dircase = makepath(dir) - if not dircase in known_paths: - L.append(dir) - known_paths.add(dircase) - sys.path[:] = L - return known_paths - -# XXX This should not be part of site.py, since it is needed even when -# using the -S option for Python. See http://www.python.org/sf/586680 -def addbuilddir(): - """Append ./build/lib.<platform> in case we're running in the build dir - (especially for Guido :-)""" - from distutils.util import get_platform - s = "build/lib.%s-%.3s" % (get_platform(), sys.version) - if hasattr(sys, 'gettotalrefcount'): - s += '-pydebug' - s = os.path.join(os.path.dirname(sys.path[-1]), s) - sys.path.append(s) - -def _init_pathinfo(): - """Return a set containing all existing directory entries from sys.path""" - d = set() - for dir in sys.path: - try: - if os.path.isdir(dir): - dir, dircase = makepath(dir) - d.add(dircase) - except TypeError: - continue - return d - -def addpackage(sitedir, name, known_paths): - """Add a new path to known_paths by combining sitedir and 'name' or execute - sitedir if it starts with 'import'""" - if known_paths is None: - _init_pathinfo() - reset = 1 - else: - reset = 0 - fullname = os.path.join(sitedir, name) - try: - f = open(fullname, "rU") - except IOError: - return - try: - for line in f: - if line.startswith("#"): - continue - if line.startswith("import"): - exec line - continue - line = line.rstrip() - dir, dircase = makepath(sitedir, line) - if not dircase in known_paths and os.path.exists(dir): - sys.path.append(dir) - known_paths.add(dircase) - finally: - f.close() - if reset: - known_paths = None - return known_paths - -def addsitedir(sitedir, known_paths=None): - """Add 'sitedir' argument to sys.path if missing and handle .pth files in - 'sitedir'""" - if known_paths is None: - known_paths = _init_pathinfo() - reset = 1 - else: - reset = 0 - sitedir, sitedircase = makepath(sitedir) - if not sitedircase in known_paths: - sys.path.append(sitedir) # Add path component - try: - names = os.listdir(sitedir) - except os.error: - return - names.sort() - for name in names: - if name.endswith(os.extsep + "pth"): - addpackage(sitedir, name, known_paths) - if reset: - known_paths = None - return known_paths - -def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix): - """Add site-packages (and possibly site-python) to sys.path""" - prefixes = [os.path.join(sys_prefix, "local"), sys_prefix] - if exec_prefix != sys_prefix: - prefixes.append(os.path.join(exec_prefix, "local")) - - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos') or _is_jython: - sitedirs = [os.path.join(prefix, "Lib", "site-packages")] - elif _is_pypy: - sitedirs = [os.path.join(prefix, 'site-packages')] - elif sys.platform == 'darwin' and prefix == sys_prefix: - - if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python - - sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"), - os.path.join(prefix, "Extras", "lib", "python")] - - else: # any other Python distros on OSX work this way - sitedirs = [os.path.join(prefix, "lib", - "python" + sys.version[:3], "site-packages")] - - elif os.sep == '/': - sitedirs = [os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python"), - os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")] - lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages") - if (os.path.exists(lib64_dir) and - os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]): - sitedirs.append(lib64_dir) - try: - # sys.getobjects only available in --with-pydebug build - sys.getobjects - sitedirs.insert(0, os.path.join(sitedirs[0], 'debug')) - except AttributeError: - pass - # Debian-specific dist-packages directories: - sitedirs.append(os.path.join(prefix, "lib", - "python" + sys.version[:3], - "dist-packages")) - sitedirs.append(os.path.join(prefix, "local/lib", - "python" + sys.version[:3], - "dist-packages")) - sitedirs.append(os.path.join(prefix, "lib", "dist-python")) - else: - sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - for sitedir in sitedirs: - if os.path.isdir(sitedir): - addsitedir(sitedir, known_paths) - return None - -def check_enableusersite(): - """Check if user site directory is safe for inclusion - - The function tests for the command line flag (including environment var), - process uid/gid equal to effective uid/gid. - - None: Disabled for security reasons - False: Disabled by user (command line option) - True: Safe and enabled - """ - if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False): - return False - - if hasattr(os, "getuid") and hasattr(os, "geteuid"): - # check process uid == effective uid - if os.geteuid() != os.getuid(): - return None - if hasattr(os, "getgid") and hasattr(os, "getegid"): - # check process gid == effective gid - if os.getegid() != os.getgid(): - return None - - return True - -def addusersitepackages(known_paths): - """Add a per user site-package to sys.path - - Each user has its own python directory with site-packages in the - home directory. - - USER_BASE is the root directory for all Python versions - - USER_SITE is the user specific site-packages directory - - USER_SITE/.. can be used for data. - """ - global USER_BASE, USER_SITE, ENABLE_USER_SITE - env_base = os.environ.get("PYTHONUSERBASE", None) - - def joinuser(*args): - return os.path.expanduser(os.path.join(*args)) - - #if sys.platform in ('os2emx', 'riscos'): - # # Don't know what to put here - # USER_BASE = '' - # USER_SITE = '' - if os.name == "nt": - base = os.environ.get("APPDATA") or "~" - if env_base: - USER_BASE = env_base - else: - USER_BASE = joinuser(base, "Python") - USER_SITE = os.path.join(USER_BASE, - "Python" + sys.version[0] + sys.version[2], - "site-packages") - else: - if env_base: - USER_BASE = env_base - else: - USER_BASE = joinuser("~", ".local") - USER_SITE = os.path.join(USER_BASE, "lib", - "python" + sys.version[:3], - "site-packages") - - if ENABLE_USER_SITE and os.path.isdir(USER_SITE): - addsitedir(USER_SITE, known_paths) - if ENABLE_USER_SITE: - for dist_libdir in ("lib", "local/lib"): - user_site = os.path.join(USER_BASE, dist_libdir, - "python" + sys.version[:3], - "dist-packages") - if os.path.isdir(user_site): - addsitedir(user_site, known_paths) - return known_paths - - - -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ['BEGINLIBPATH'].split(';') - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ['BEGINLIBPATH'] = ';'.join(libpath) - - -def setquit(): - """Define new built-ins 'quit' and 'exit'. - These are simply strings that display a hint on how to exit. - - """ - if os.sep == ':': - eof = 'Cmd-Q' - elif os.sep == '\\': - eof = 'Ctrl-Z plus Return' - else: - eof = 'Ctrl-D (i.e. EOF)' - - class Quitter(object): - def __init__(self, name): - self.name = name - def __repr__(self): - return 'Use %s() or %s to exit' % (self.name, eof) - def __call__(self, code=None): - # Shells like IDLE catch the SystemExit, but listen when their - # stdin wrapper is closed. - try: - sys.stdin.close() - except: - pass - raise SystemExit(code) - __builtin__.quit = Quitter('quit') - __builtin__.exit = Quitter('exit') - - -class _Printer(object): - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice.""" - - MAXLINES = 23 - - def __init__(self, name, data, files=(), dirs=()): - self.__name = name - self.__data = data - self.__files = files - self.__dirs = dirs - self.__lines = None - - def __setup(self): - if self.__lines: - return - data = None - for dir in self.__dirs: - for filename in self.__files: - filename = os.path.join(dir, filename) - try: - fp = file(filename, "rU") - data = fp.read() - fp.close() - break - except IOError: - pass - if data: - break - if not data: - data = self.__data - self.__lines = data.split('\n') - self.__linecnt = len(self.__lines) - - def __repr__(self): - self.__setup() - if len(self.__lines) <= self.MAXLINES: - return "\n".join(self.__lines) - else: - return "Type %s() to see the full %s text" % ((self.__name,)*2) - - def __call__(self): - self.__setup() - prompt = 'Hit Return for more, or q (and Return) to quit: ' - lineno = 0 - while 1: - try: - for i in range(lineno, lineno + self.MAXLINES): - print self.__lines[i] - except IndexError: - break - else: - lineno += self.MAXLINES - key = None - while key is None: - key = raw_input(prompt) - if key not in ('', 'q'): - key = None - if key == 'q': - break - -def setcopyright(): - """Set 'copyright' and 'credits' in __builtin__""" - __builtin__.copyright = _Printer("copyright", sys.copyright) - if _is_jython: - __builtin__.credits = _Printer( - "credits", - "Jython is maintained by the Jython developers (www.jython.org).") - elif _is_pypy: - __builtin__.credits = _Printer( - "credits", - "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy") - else: - __builtin__.credits = _Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") - here = os.path.dirname(os.__file__) - __builtin__.license = _Printer( - "license", "See http://www.python.org/%.3s/license.html" % sys.version, - ["LICENSE.txt", "LICENSE"], - [os.path.join(here, os.pardir), here, os.curdir]) - - -class _Helper(object): - """Define the built-in 'help'. - This is a wrapper around pydoc.help (with a twist). - - """ - - def __repr__(self): - return "Type help() for interactive help, " \ - "or help(object) for help about object." - def __call__(self, *args, **kwds): - import pydoc - return pydoc.help(*args, **kwds) - -def sethelper(): - __builtin__.help = _Helper() - -def aliasmbcs(): - """On Windows, some default encodings are not provided by Python, - while they are always available as "mbcs" in each locale. Make - them usable by aliasing to "mbcs" in such a case.""" - if sys.platform == 'win32': - import locale, codecs - enc = locale.getdefaultlocale()[1] - if enc.startswith('cp'): # "cp***" ? - try: - codecs.lookup(enc) - except LookupError: - import encodings - encodings._cache[enc] = encodings._unknown - encodings.aliases.aliases[enc] = 'mbcs' - -def setencoding(): - """Set the string encoding used by the Unicode implementation. The - default is 'ascii', but if you're willing to experiment, you can - change this.""" - encoding = "ascii" # Default value set by _PyUnicode_Init() - if 0: - # Enable to support locale aware default string encodings. - import locale - loc = locale.getdefaultlocale() - if loc[1]: - encoding = loc[1] - if 0: - # Enable to switch off string to Unicode coercion and implicit - # Unicode to string conversion. - encoding = "undefined" - if encoding != "ascii": - # On Non-Unicode builds this will raise an AttributeError... - sys.setdefaultencoding(encoding) # Needs Python Unicode build ! - - -def execsitecustomize(): - """Run custom site specific code, if available.""" - try: - import sitecustomize - except ImportError: - pass - -def virtual_install_main_packages(): - f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt')) - sys.real_prefix = f.read().strip() - f.close() - pos = 2 - if sys.path[0] == '': - pos += 1 - if sys.platform == 'win32': - paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')] - elif _is_jython: - paths = [os.path.join(sys.real_prefix, 'Lib')] - elif _is_pypy: - cpyver = '%d.%d.%d' % sys.version_info[:3] - paths = [os.path.join(sys.real_prefix, 'lib_pypy'), - os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver), - os.path.join(sys.real_prefix, 'lib-python', cpyver)] - else: - paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])] - lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3]) - if os.path.exists(lib64_path): - paths.append(lib64_path) - # This is hardcoded in the Python executable, but relative to sys.prefix: - plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], - 'plat-%s' % sys.platform) - if os.path.exists(plat_path): - paths.append(plat_path) - # This is hardcoded in the Python executable, but - # relative to sys.prefix, so we have to fix up: - for path in list(paths): - tk_dir = os.path.join(path, 'lib-tk') - if os.path.exists(tk_dir): - paths.append(tk_dir) - - # These are hardcoded in the Apple's Python executable, - # but relative to sys.prefix, so we have to fix them up: - if sys.platform == 'darwin': - hardcoded_paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], module) - for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')] - - for path in hardcoded_paths: - if os.path.exists(path): - paths.append(path) - - sys.path.extend(paths) - -def force_global_eggs_after_local_site_packages(): - """ - Force easy_installed eggs in the global environment to get placed - in sys.path after all packages inside the virtualenv. This - maintains the "least surprise" result that packages in the - virtualenv always mask global packages, never the other way - around. - - """ - egginsert = getattr(sys, '__egginsert', 0) - for i, path in enumerate(sys.path): - if i > egginsert and path.startswith(sys.prefix): - egginsert = i - sys.__egginsert = egginsert + 1 - -def virtual_addsitepackages(known_paths): - force_global_eggs_after_local_site_packages() - return addsitepackages(known_paths, sys_prefix=sys.real_prefix) - -def fixclasspath(): - """Adjust the special classpath sys.path entries for Jython. These - entries should follow the base virtualenv lib directories. - """ - paths = [] - classpaths = [] - for path in sys.path: - if path == '__classpath__' or path.startswith('__pyclasspath__'): - classpaths.append(path) - else: - paths.append(path) - sys.path = paths - sys.path.extend(classpaths) - -def execusercustomize(): - """Run custom user specific code, if available.""" - try: - import usercustomize - except ImportError: - pass - - -def main(): - global ENABLE_USER_SITE - virtual_install_main_packages() - abs__file__() - paths_in_sys = removeduppaths() - if (os.name == "posix" and sys.path and - os.path.basename(sys.path[-1]) == "Modules"): - addbuilddir() - if _is_jython: - fixclasspath() - GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt')) - if not GLOBAL_SITE_PACKAGES: - ENABLE_USER_SITE = False - if ENABLE_USER_SITE is None: - ENABLE_USER_SITE = check_enableusersite() - paths_in_sys = addsitepackages(paths_in_sys) - paths_in_sys = addusersitepackages(paths_in_sys) - if GLOBAL_SITE_PACKAGES: - paths_in_sys = virtual_addsitepackages(paths_in_sys) - if sys.platform == 'os2emx': - setBEGINLIBPATH() - setquit() - setcopyright() - sethelper() - aliasmbcs() - setencoding() - execsitecustomize() - if ENABLE_USER_SITE: - execusercustomize() - # Remove sys.setdefaultencoding() so that users cannot change the - # encoding after initialization. The test for presence is needed when - # this module is run as a script, because this code is executed twice. - if hasattr(sys, "setdefaultencoding"): - del sys.setdefaultencoding - -main() - -def _script(): - help = """\ - %s [--user-base] [--user-site] - - Without arguments print some useful information - With arguments print the value of USER_BASE and/or USER_SITE separated - by '%s'. - - Exit codes with --user-base or --user-site: - 0 - user site directory is enabled - 1 - user site directory is disabled by user - 2 - uses site directory is disabled by super user - or for security reasons - >2 - unknown error - """ - args = sys.argv[1:] - if not args: - print "sys.path = [" - for dir in sys.path: - print " %r," % (dir,) - print "]" - def exists(path): - if os.path.isdir(path): - return "exists" - else: - return "doesn't exist" - print "USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)) - print "USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)) - print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE - sys.exit(0) - - buffer = [] - if '--user-base' in args: - buffer.append(USER_BASE) - if '--user-site' in args: - buffer.append(USER_SITE) - - if buffer: - print os.pathsep.join(buffer) - if ENABLE_USER_SITE: - sys.exit(0) - elif ENABLE_USER_SITE is False: - sys.exit(1) - elif ENABLE_USER_SITE is None: - sys.exit(2) - else: - sys.exit(3) - else: - import textwrap - print textwrap.dedent(help % (sys.argv[0], os.pathsep)) - sys.exit(10) - -if __name__ == '__main__': - _script() diff --git a/lib/python2.7/sre.py b/lib/python2.7/sre.py deleted file mode 120000 index 27f81b93..00000000 --- a/lib/python2.7/sre.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/sre.py \ No newline at end of file diff --git a/lib/python2.7/sre_compile.py b/lib/python2.7/sre_compile.py deleted file mode 120000 index dce5da4c..00000000 --- a/lib/python2.7/sre_compile.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/sre_compile.py \ No newline at end of file diff --git a/lib/python2.7/sre_constants.py b/lib/python2.7/sre_constants.py deleted file mode 120000 index b9c9797e..00000000 --- a/lib/python2.7/sre_constants.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/sre_constants.py \ No newline at end of file diff --git a/lib/python2.7/sre_parse.py b/lib/python2.7/sre_parse.py deleted file mode 120000 index f33a572f..00000000 --- a/lib/python2.7/sre_parse.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/sre_parse.py \ No newline at end of file diff --git a/lib/python2.7/stat.py b/lib/python2.7/stat.py deleted file mode 120000 index c1d654c1..00000000 --- a/lib/python2.7/stat.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/stat.py \ No newline at end of file diff --git a/lib/python2.7/types.py b/lib/python2.7/types.py deleted file mode 120000 index 55464783..00000000 --- a/lib/python2.7/types.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/types.py \ No newline at end of file diff --git a/lib/python2.7/warnings.py b/lib/python2.7/warnings.py deleted file mode 120000 index a9c47309..00000000 --- a/lib/python2.7/warnings.py +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/python2.7/warnings.py \ No newline at end of file -- cgit v1.2.3-54-g00ecf From ac34e358103ee718369692b9ba5afe6830a1df92 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 12:25:54 -0600 Subject: reporead: fix filesonly needs update checks This was broken after the select for update changes. We really should split the whole filesonly update into another method instead of the current shotgun approach with conditionals everywhere. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index cf101d97..f8cc2034 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -382,11 +382,13 @@ def db_update(archname, reponame, pkgs, options): # files/depends/all related items to be double-imported. if filesonly: with transaction.commit_on_success(): + if not dbpkg.files_last_update or not dbpkg.last_update: + pass + elif dbpkg.files_last_update > dbpkg.last_update: + logger.debug("Files for %s are up to date", pkg.name) + continue # TODO Django 1.4 select_for_update() will work once released dbpkg = select_pkg_for_update(dbpkg) - if pkg_same_version(pkg, dbpkg): - logger.debug("Package %s was already updated", pkg.name) - continue logger.debug("Checking files for package %s", pkg.name) populate_files(dbpkg, pkg, force=force) else: -- cgit v1.2.3-54-g00ecf From 9279d9a04cc99691ab6c7fbafe7db03a757d23e8 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 12:27:57 -0600 Subject: Fix minor validation error Signed-off-by: Dan McGee <dan@archlinux.org> --- templates/devel/profile.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/devel/profile.html b/templates/devel/profile.html index 2f16801b..b6580ab8 100644 --- a/templates/devel/profile.html +++ b/templates/devel/profile.html @@ -6,7 +6,7 @@ <h2>Developer Profile</h2> - <form id="edit-profile-form" enctype="multipart/form-data" method="post">{% csrf_token %} + <form id="edit-profile-form" enctype="multipart/form-data" method="post" action="">{% csrf_token %} <p><em>Note:</em> This is the public information shown on the developer and/or TU profiles page, so please be appropriate with the information you provide here.</p> -- cgit v1.2.3-54-g00ecf From b05eae96a4be8990a1de81795013587ded7ed3aa Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 12:28:11 -0600 Subject: Add more precise sort for last update signoff column This allows sorting by the exact time recorded in the database, even if we don't show it directly to the user, which makes finding the most recent updates a lot easier. Signed-off-by: Dan McGee <dan@archlinux.org> --- media/archweb.js | 13 +++++++++++++ templates/packages/signoffs.html | 4 ++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/media/archweb.js b/media/archweb.js index 4f098c7d..151d0f81 100644 --- a/media/archweb.js +++ b/media/archweb.js @@ -54,6 +54,19 @@ if (typeof $.tablesorter !== 'undefined') { }, type: 'numeric' }); + $.tablesorter.addParser({ + id: 'epochdate', + is: function(s) { return false; }, + format: function(s, t, c) { + /* TODO: this assumes our magic class is the only one */ + var epoch = $(c).attr('class'); + if (!epoch.indexOf('epoch-') == 0) { + return 0; + } + return epoch.slice(6); + }, + type: 'numeric' + }); $.tablesorter.addParser({ id: 'longDateTime', re: /^(\d{4})-(\d{2})-(\d{2}) ([012]\d):([0-5]\d)(:([0-5]\d))?( (\w+))?$/, diff --git a/templates/packages/signoffs.html b/templates/packages/signoffs.html index bd84289c..b032e656 100644 --- a/templates/packages/signoffs.html +++ b/templates/packages/signoffs.html @@ -56,7 +56,7 @@ <h3>Filter Displayed Signoffs</h3> <td>{{ group.target_repo }}</td> <td>{{ group.packager|default:"Unknown" }}</td> <td>{{ group.packages|length }}</td> - <td>{{ group.last_update|date }}</td> + <td class="epoch-{{ group.last_update|date:'U' }}">{{ group.last_update|date }}</td> {% if group.specification.known_bad %} <td class="approval signoff-bad">Bad</td> {% else %} @@ -85,7 +85,7 @@ <h3>Filter Displayed Signoffs</h3> $(document).ready(function() { $('a.signoff-link').click(signoff_package); $(".results").tablesorter({widgets: ['zebra'], sortList: [[0,0]], - headers: { 7: { sorter: false }, 8: {sorter: false } } }); + headers: { 5: { sorter: 'epochdate' }, 7: { sorter: false }, 8: {sorter: false } } }); $('#signoffs_filter input').change(filter_signoffs); $('#criteria_reset').click(filter_signoffs_reset); // fire function on page load to ensure the current form selections take effect -- cgit v1.2.3-54-g00ecf From dca9fd2ea687b8d20fd5c39c1449846ddef1e3c2 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 13:04:15 -0600 Subject: reporead: split out filesonly update method This removes a bunch of the conditional logic at a slight cost of some code duplication. However, the methods and madness is now much easier to follow. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 170 +++++++++++++++++++--------------- 1 file changed, 95 insertions(+), 75 deletions(-) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index f8cc2034..e4ba8580 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -282,40 +282,20 @@ def select_pkg_for_update(dbpkg): return list(new_pkg)[0] -def db_update(archname, reponame, pkgs, options): - """ - Parses a list and updates the Arch dev database accordingly. - - Arguments: - pkgs -- A list of Pkg objects. - - """ - logger.info('Updating Arch: %s', archname) - force = options.get('force', False) - filesonly = options.get('filesonly', False) - +def update_common(archname, reponame, pkgs, sanity_check=True): with transaction.commit_manually(): repository = Repo.objects.get(name__iexact=reponame) architecture = Arch.objects.get(name__iexact=archname) # no-arg order_by() removes even the default ordering; we don't need it dbpkgs = Package.objects.filter( arch=architecture, repo=repository).order_by() - # This makes our inner loop where we find packages by name *way* more - # efficient by not having to go to the database for each package to - # SELECT them by name. - dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) - - logger.debug("Creating sets") - dbset = set(dbdict.keys()) - syncset = set([pkg.name for pkg in pkgs]) - logger.info("%d packages in current web DB", len(dbset)) - logger.info("%d packages in new updating db", len(syncset)) - in_sync_not_db = syncset - dbset - logger.info("%d packages in sync not db", len(in_sync_not_db)) + + logger.info("%d packages in current web DB", len(dbpkgs)) + logger.info("%d packages in new updating DB", len(pkgs)) # Try to catch those random package deletions that make Eric so unhappy. - if len(dbset): - dbpercent = 100.0 * len(syncset) / len(dbset) + if len(dbpkgs): + dbpercent = 100.0 * len(pkgs) / len(dbpkgs) else: dbpercent = 0.0 logger.info("DB package ratio: %.1f%%", dbpercent) @@ -324,11 +304,13 @@ def db_update(archname, reponame, pkgs, options): # means we expect the repo to fluctuate a lot. msg = "Package database has %.1f%% the number of packages in the " \ "web database" % dbpercent - if len(dbset) == 0 and len(syncset) == 0: + if not sanity_check: + pass + elif repository.testing or repository.staging: pass - elif not filesonly and \ - len(dbset) > 20 and dbpercent < 50.0 and \ - not repository.testing and not repository.staging: + elif len(dbpkgs) == 0 and len(pkgs) == 0: + pass + elif len(dbpkgs) > 20 and dbpercent < 50.0: logger.error(msg) raise Exception(msg) elif dbpercent < 75.0: @@ -339,27 +321,45 @@ def db_update(archname, reponame, pkgs, options): # to guard against simultaneous updates transaction.commit() - if not filesonly: - # packages in syncdb and not in database (add to database) - for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db): - logger.info("Adding package %s", pkg.name) - dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository) - try: - with transaction.commit_on_success(): - populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow()) - except IntegrityError: - logger.warning("Could not add package %s; " - "not fatal if another thread beat us to it.", - pkg.name, exc_info=True) - - # packages in database and not in syncdb (remove from database) - for pkgname in (dbset - syncset): - logger.info("Removing package %s", pkgname) - dbpkg = dbdict[pkgname] + return dbpkgs + +def db_update(archname, reponame, pkgs, force=False): + """ + Parses a list of packages and updates the packages database accordingly. + """ + logger.info('Updating %s (%s)', reponame, archname) + dbpkgs = update_common(archname, reponame, pkgs, True) + + # This makes our inner loop where we find packages by name *way* more + # efficient by not having to go to the database for each package to + # SELECT them by name. + dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) + + dbset = set(dbdict.keys()) + syncset = set([pkg.name for pkg in pkgs]) + + in_sync_not_db = syncset - dbset + logger.info("%d packages in sync not db", len(in_sync_not_db)) + # packages in syncdb and not in database (add to database) + for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db): + logger.info("Adding package %s", pkg.name) + dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository) + try: with transaction.commit_on_success(): - # no race condition here as long as simultaneous threads both - # issue deletes; second delete will be a no-op - dbpkg.delete() + populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow()) + except IntegrityError: + logger.warning("Could not add package %s; " + "not fatal if another thread beat us to it.", + pkg.name, exc_info=True) + + # packages in database and not in syncdb (remove from database) + for pkgname in (dbset - syncset): + logger.info("Removing package %s", pkgname) + dbpkg = dbdict[pkgname] + with transaction.commit_on_success(): + # no race condition here as long as simultaneous threads both + # issue deletes; second delete will be a no-op + dbpkg.delete() # packages in both database and in syncdb (update in database) pkg_in_both = syncset & dbset @@ -369,9 +369,7 @@ def db_update(archname, reponame, pkgs, options): timestamp = None # for a force, we don't want to update the timestamp. # for a non-force, we don't want to do anything at all. - if filesonly: - pass - elif pkg_same_version(pkg, dbpkg): + if pkg_same_version(pkg, dbpkg): if not force: continue else: @@ -380,28 +378,45 @@ def db_update(archname, reponame, pkgs, options): # The odd select_for_update song and dance here are to ensure # simultaneous updates don't happen on a package, causing # files/depends/all related items to be double-imported. - if filesonly: - with transaction.commit_on_success(): - if not dbpkg.files_last_update or not dbpkg.last_update: - pass - elif dbpkg.files_last_update > dbpkg.last_update: - logger.debug("Files for %s are up to date", pkg.name) - continue - # TODO Django 1.4 select_for_update() will work once released - dbpkg = select_pkg_for_update(dbpkg) - logger.debug("Checking files for package %s", pkg.name) - populate_files(dbpkg, pkg, force=force) - else: - with transaction.commit_on_success(): - # TODO Django 1.4 select_for_update() will work once released - dbpkg = select_pkg_for_update(dbpkg) - if pkg_same_version(pkg, dbpkg): - logger.debug("Package %s was already updated", pkg.name) - continue - logger.info("Updating package %s", pkg.name) - populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp) + with transaction.commit_on_success(): + # TODO Django 1.4 select_for_update() will work once released + dbpkg = select_pkg_for_update(dbpkg) + if pkg_same_version(pkg, dbpkg): + logger.debug("Package %s was already updated", pkg.name) + continue + logger.info("Updating package %s", pkg.name) + populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp) + + logger.info('Finished updating arch: %s', archname) + + +def filesonly_update(archname, reponame, pkgs, force=False): + """ + Parses a list of packages and updates the packages database accordingly. + """ + logger.info('Updating files for %s (%s)', reponame, archname) + dbpkgs = update_common(archname, reponame, pkgs, False) + dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs) + dbset = set(dbdict.keys()) + + for pkg in (pkg for pkg in pkgs if pkg.name in dbset): + dbpkg = dbdict[pkg.name] + + # The odd select_for_update song and dance here are to ensure + # simultaneous updates don't happen on a package, causing + # files to be double-imported. + with transaction.commit_on_success(): + if not dbpkg.files_last_update or not dbpkg.last_update: + pass + elif dbpkg.files_last_update > dbpkg.last_update: + logger.debug("Files for %s are up to date", pkg.name) + continue + # TODO Django 1.4 select_for_update() will work once released + dbpkg = select_pkg_for_update(dbpkg) + logger.debug("Checking files for package %s", pkg.name) + populate_files(dbpkg, pkg, force=force) - logger.info('Finished updating Arch: %s', archname) + logger.info('Finished updating arch: %s', archname) def parse_info(iofile): @@ -490,6 +505,8 @@ def read_repo(primary_arch, repo_file, options): """ # always returns an Arch object, regardless of what is passed in primary_arch = locate_arch(primary_arch) + force = options.get('force', False) + filesonly = options.get('filesonly', False) repo, packages = parse_repo(repo_file) @@ -509,7 +526,10 @@ def read_repo(primary_arch, repo_file, options): logger.info('Starting database updates for %s.', repo_file) for arch in sorted(packages_arches.keys()): - db_update(arch, repo, packages_arches[arch], options) + if filesonly: + filesonly_update(arch, repo, packages_arches[arch], force) + else: + db_update(arch, repo, packages_arches[arch], force) logger.info('Finished database updates for %s.', repo_file) return 0 -- cgit v1.2.3-54-g00ecf From 04e830a1dad665da0b05c3c8349eff21667d805d Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 13:11:14 -0600 Subject: Add initial migration for devel app Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/migrations/0001_initial.py | 18 ++++++++++++++++++ devel/migrations/__init__.py | 0 2 files changed, 18 insertions(+) create mode 100644 devel/migrations/0001_initial.py create mode 100644 devel/migrations/__init__.py diff --git a/devel/migrations/0001_initial.py b/devel/migrations/0001_initial.py new file mode 100644 index 00000000..c28fc20f --- /dev/null +++ b/devel/migrations/0001_initial.py @@ -0,0 +1,18 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + def forwards(self, orm): + pass + + + def backwards(self, orm): + pass + + + models = {} + + complete_apps = ['devel'] diff --git a/devel/migrations/__init__.py b/devel/migrations/__init__.py new file mode 100644 index 00000000..e69de29b -- cgit v1.2.3-54-g00ecf From 0afedf606f071d23df9ed613abc873b0b25d700d Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 13:24:39 -0600 Subject: Move main fields to separate module Signed-off-by: Dan McGee <dan@archlinux.org> --- main/fields.py | 42 ++++++++++++++++++++++++++++++++++++++++++ main/models.py | 36 ++---------------------------------- 2 files changed, 44 insertions(+), 34 deletions(-) create mode 100644 main/fields.py diff --git a/main/fields.py b/main/fields.py new file mode 100644 index 00000000..948cb5d9 --- /dev/null +++ b/main/fields.py @@ -0,0 +1,42 @@ +from django.db import models +from django.core.validators import RegexValidator + + +class PositiveBigIntegerField(models.BigIntegerField): + _south_introspects = True + + def get_internal_type(self): + return "BigIntegerField" + + def formfield(self, **kwargs): + defaults = { 'min_value': 0 } + defaults.update(kwargs) + return super(PositiveBigIntegerField, self).formfield(**defaults) + +class PGPKeyField(models.CharField): + _south_introspects = True + + def __init__(self, *args, **kwargs): + super(PGPKeyField, self).__init__(*args, **kwargs) + self.validators.append(RegexValidator(r'^[0-9A-F]{40}$', + "Ensure this value consists of 40 hex characters.", 'hex_char')) + + def to_python(self, value): + if value == '' or value is None: + return None + value = super(PGPKeyField, self).to_python(value) + # remove all spaces + value = value.replace(' ', '') + # prune prefixes, either 0x or 2048R/ type + if value.startswith('0x'): + value = value[2:] + value = value.split('/')[-1] + # make all (hex letters) uppercase + return value.upper() + + def formfield(self, **kwargs): + # override so we don't set max_length form field attribute + return models.Field.formfield(self, **kwargs) + + +# vim: set ts=4 sw=4 et: diff --git a/main/models.py b/main/models.py index d7780b91..990cc8ca 100644 --- a/main/models.py +++ b/main/models.py @@ -1,47 +1,17 @@ from django.db import models from django.db.models.signals import pre_save -from django.core.validators import RegexValidator from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.forms import ValidationError -from main.utils import cache_function, make_choice, set_created_field +from .fields import PositiveBigIntegerField, PGPKeyField +from .utils import cache_function, make_choice, set_created_field from packages.models import PackageRelation from datetime import datetime from itertools import groupby import pytz -class PositiveBigIntegerField(models.BigIntegerField): - _south_introspects = True - - def get_internal_type(self): - return "BigIntegerField" - - def formfield(self, **kwargs): - defaults = {'min_value': 0} - defaults.update(kwargs) - return super(PositiveBigIntegerField, self).formfield(**defaults) - -class PGPKeyField(models.CharField): - _south_introspects = True - - def to_python(self, value): - if value == '' or value is None: - return None - value = super(PGPKeyField, self).to_python(value) - # remove all spaces - value = value.replace(' ', '') - # prune prefixes, either 0x or 2048R/ type - if value.startswith('0x'): - value = value[2:] - value = value.split('/')[-1] - # make all (hex letters) uppercase - return value.upper() - - def formfield(self, **kwargs): - # override so we don't set max_length form field attribute - return models.Field.formfield(self, **kwargs) class UserProfile(models.Model): notify = models.BooleanField( @@ -62,8 +32,6 @@ class UserProfile(models.Model): other_contact = models.CharField(max_length=100, null=True, blank=True) pgp_key = PGPKeyField(max_length=40, null=True, blank=True, verbose_name="PGP key fingerprint", - validators=[RegexValidator(r'^[0-9A-F]{40}$', - "Ensure this value consists of 40 hex characters.", 'hex_char')], help_text="consists of 40 hex digits; use `gpg --fingerprint`") website = models.CharField(max_length=200, null=True, blank=True) yob = models.IntegerField("Year of birth", null=True, blank=True) -- cgit v1.2.3-54-g00ecf From da61fed4be6a28c870580f448c7c486f46d7b088 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 13:31:51 -0600 Subject: Add master key model and admin integration --- devel/admin.py | 12 +++++ devel/migrations/0002_auto__add_masterkey.py | 76 ++++++++++++++++++++++++++++ devel/models.py | 17 +++++++ 3 files changed, 105 insertions(+) create mode 100644 devel/admin.py create mode 100644 devel/migrations/0002_auto__add_masterkey.py diff --git a/devel/admin.py b/devel/admin.py new file mode 100644 index 00000000..84082fb8 --- /dev/null +++ b/devel/admin.py @@ -0,0 +1,12 @@ +from django.contrib import admin + +from .models import MasterKey + + +class MasterKeyAdmin(admin.ModelAdmin): + list_display = ('pgp_key', 'owner', 'created', 'revoker', 'revoked') + search_fields = ('pgp_key', 'owner', 'revoker') + +admin.site.register(MasterKey, MasterKeyAdmin) + +# vim: set ts=4 sw=4 et: diff --git a/devel/migrations/0002_auto__add_masterkey.py b/devel/migrations/0002_auto__add_masterkey.py new file mode 100644 index 00000000..ac1f745a --- /dev/null +++ b/devel/migrations/0002_auto__add_masterkey.py @@ -0,0 +1,76 @@ +# encoding: utf-8 +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + +class Migration(SchemaMigration): + + depends_on = ( + ("main", "0051_auto__chg_field_userprofile_pgp_key"), + ) + + def forwards(self, orm): + # Adding model 'MasterKey' + db.create_table('devel_masterkey', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_owner', to=orm['auth.User'])), + ('revoker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_revoker', to=orm['auth.User'])), + ('pgp_key', self.gf('main.fields.PGPKeyField')(max_length=40)), + ('created', self.gf('django.db.models.fields.DateTimeField')()), + ('revoked', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)), + )) + db.send_create_signal('devel', ['MasterKey']) + + def backwards(self, orm): + db.delete_table('devel_masterkey') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'devel.masterkey': { + 'Meta': {'object_name': 'MasterKey'}, + 'created': ('django.db.models.fields.DateTimeField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_owner'", 'to': "orm['auth.User']"}), + 'pgp_key': ('main.fields.PGPKeyField', [], {'max_length': '40'}), + 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + 'revoker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_revoker'", 'to': "orm['auth.User']"}) + } + } + + complete_apps = ['devel'] diff --git a/devel/models.py b/devel/models.py index e69de29b..ea0f4d55 100644 --- a/devel/models.py +++ b/devel/models.py @@ -0,0 +1,17 @@ +from django.db import models +from django.contrib.auth.models import User + +from main.fields import PGPKeyField + + +class MasterKey(models.Model): + owner = models.ForeignKey(User, related_name='masterkey_owner', + help_text="The developer holding this master key") + revoker = models.ForeignKey(User, related_name='masterkey_revoker', + help_text="The developer holding the revocation certificate") + pgp_key = PGPKeyField(max_length=40, verbose_name="PGP key fingerprint", + help_text="consists of 40 hex digits; use `gpg --fingerprint`") + created = models.DateTimeField() + revoked = models.DateTimeField(null=True, blank=True) + +# vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 6b8ef446bcd6a1cbc794d0846968e806034d3aad Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 13:55:36 -0600 Subject: Add master key overview page And a bunch of text that may suck, but is better than nothing. Signed-off-by: Dan McGee <dan@archlinux.org> --- main/models.py | 12 ++++++++++ main/templatetags/pgp.py | 13 +++++++++++ public/views.py | 18 ++++++++++----- templates/public/keys.html | 57 ++++++++++++++++++++++++++++++++++++++++++++++ urls.py | 1 + 5 files changed, 95 insertions(+), 6 deletions(-) create mode 100644 templates/public/keys.html diff --git a/main/models.py b/main/models.py index 990cc8ca..9156fb51 100644 --- a/main/models.py +++ b/main/models.py @@ -53,6 +53,18 @@ class Meta: verbose_name = 'Additional Profile Data' verbose_name_plural = 'Additional Profile Data' + def get_absolute_url(self): + # TODO: this is disgusting. find a way to consolidate this logic with + # public.views.userlist among other places, and make some constants or + # something so we aren't using copies of string names everywhere. + group_names = self.user.groups.values_list('name', flat=True) + if "Developers" in group_names: + prefix = "developers" + elif "Trusted Users" in group_names: + prefix = "trustedusers" + else: + prefix = "fellows" + return '/%s/#%s' % (prefix, self.user.username) class TodolistManager(models.Manager): def incomplete(self): diff --git a/main/templatetags/pgp.py b/main/templatetags/pgp.py index 67f5e08d..d69e2918 100644 --- a/main/templatetags/pgp.py +++ b/main/templatetags/pgp.py @@ -1,5 +1,7 @@ from django import template from django.conf import settings +from django.utils.html import conditional_escape +from django.utils.safestring import mark_safe register = template.Library() @@ -26,4 +28,15 @@ def pgp_key_link(key_id): values = (url, format_key(key_id), key_id[-8:]) return '<a href="%s" title="PGP key search for %s">0x%s</a>' % values +@register.filter +def pgp_fingerprint(key_id, autoescape=True): + if not key_id: + return u'' + if autoescape: + esc = conditional_escape + else: + esc = lambda x: x + return mark_safe(format_key(esc(key_id))) +pgp_fingerprint.needs_autoescape = True + # vim: set ts=4 sw=4 et: diff --git a/public/views.py b/public/views.py index c28fd303..95b590fc 100644 --- a/public/views.py +++ b/public/views.py @@ -1,17 +1,17 @@ -from main.models import Arch, Repo, Donor -from mirrors.models import MirrorUrl -from news.models import News -from . import utils - from django.conf import settings from django.contrib.auth.models import User from django.http import Http404 from django.views.generic import list_detail from django.views.generic.simple import direct_to_template +from devel.models import MasterKey +from main.models import Arch, Repo, Donor +from mirrors.models import MirrorUrl +from news.models import News +from utils import get_recent_updates def index(request): - pkgs = utils.get_recent_updates() + pkgs = get_recent_updates() context = { 'news_updates': News.objects.order_by('-postdate', '-id')[:15], 'pkg_updates': pkgs, @@ -77,4 +77,10 @@ def feeds(request): } return direct_to_template(request, 'public/feeds.html', context) +def keys(request): + context = { + 'keys': MasterKey.objects.select_related('owner', 'revoker').all(), + } + return direct_to_template(request, 'public/keys.html', context) + # vim: set ts=4 sw=4 et: diff --git a/templates/public/keys.html b/templates/public/keys.html new file mode 100644 index 00000000..2e7fcebe --- /dev/null +++ b/templates/public/keys.html @@ -0,0 +1,57 @@ +{% extends "base.html" %} +{% load pgp %} + +{% block title %}Arch Linux - Master Signing Keys{% endblock %} + +{% block content %} +<div id="signing-keys" class="box"> + <h2>Master Signing Keys</h2> + + <p>This page lists the Arch Linux Master Keys. This is a distributed set of + keys that are seen as "official" signing keys of the distribution. Each key + is held by a different developer, and a revocation certificate for the key + is held by a different developer. Thus, no one developer has absolute hold + on any sort of absolute, root trust.</p> + <p>The {{ keys|length }} key{{ keys|pluralize }} listed below should be + regarded as the current set of master keys. They are available on public + keyservers and should be signed by the owner of the key.</p> + <p>All official Arch Linux developers and trusted users should have their + key signed by at least three of these master keys. This is in accordance + with the PGP <em>web of trust</em> concept. If a user is willing to + marginally trust all of the master keys, three signatures from different + master keys will consider a given developer's key as valid. For more + information on trust, please consult the + <a href="http://www.gnupg.org/gph/en/manual.html">GNU Privacy Handbook</a> + and <a href="http://www.gnupg.org/gph/en/manual.html#AEN385">Using trust to + validate keys</a>.</p> + + <table class="pretty2"> + <thead> + <tr> + <th>Master Key</th> + <th>Full Fingerprint</th> + <th>Owner</th> + <th>Owner's Signing Key</th> + <th>Revoker</th> + <th>Revoker's Signing Key</th> + </tr> + </thead> + <tbody> + {% for key in keys %} + <tr> + <td>{% pgp_key_link key.pgp_key %}</td> + <td>{{ key.pgp_key|pgp_fingerprint }}</td> + {% with key.owner.userprofile as owner_profile %} + <td><a href="{{ owner_profile.get_absolute_url }}">{{ key.owner.get_full_name }}</a></td> + <td>{% pgp_key_link owner_profile.pgp_key %}</td> + {% endwith %} + {% with key.revoker.userprofile as revoker_profile %} + <td><a href="{{ revoker_profile.get_absolute_url }}">{{ key.revoker.get_full_name }}</a></td> + <td>{% pgp_key_link revoker_profile.pgp_key %}</td> + {% endwith %} + </tr> + {% endfor %} + </tbody> + </table> +</div> +{% endblock %} diff --git a/urls.py b/urls.py index 1d06f0f2..b01d2ee3 100644 --- a/urls.py +++ b/urls.py @@ -67,6 +67,7 @@ (r'^fellows/$', 'userlist', { 'user_type':'fellows' }, 'page-fellows'), (r'^donate/$', 'donate', {}, 'page-donate'), (r'^download/$', 'download', {}, 'page-download'), + (r'^master-keys/$', 'keys', {}, 'page-keys'), ) # Includes and other remaining stuff -- cgit v1.2.3-54-g00ecf From 4590196d79273c49172e2da74e7a7b31e59d7a27 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 14:07:35 -0600 Subject: Integrate master key into rest of site Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/generate_keyring.py | 4 ++++ sitemaps.py | 22 ++++++++++++++++++---- templates/public/index.html | 2 ++ 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py index 35ab8874..a3a764b4 100644 --- a/devel/management/commands/generate_keyring.py +++ b/devel/management/commands/generate_keyring.py @@ -13,6 +13,7 @@ import subprocess import sys +from devel.models import MasterKey from main.models import UserProfile logging.basicConfig( @@ -48,11 +49,14 @@ def generate_keyring(keyserver, keyring): pgp_key__isnull=False).extra(where=["pgp_key != ''"]).values_list( "pgp_key", flat=True) logger.info("%d keys fetched from user profiles", len(key_ids)) + master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True) + logger.info("%d keys fetched from master keys", len(master_key_ids)) gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring, "--keyserver", keyserver, "--recv-keys"] logger.info("running command: %r", gpg_cmd) gpg_cmd.extend(key_ids) + gpg_cmd.extend(master_key_ids) subprocess.check_call(gpg_cmd) logger.info("keyring at %s successfully updated", keyring) diff --git a/sitemaps.py b/sitemaps.py index 7718002d..958d1f44 100644 --- a/sitemaps.py +++ b/sitemaps.py @@ -71,10 +71,24 @@ class BaseSitemap(Sitemap): base_viewnames = ( ('index', 1.0, 'hourly'), ('packages-search', 0.8, 'hourly'), - 'page-about', 'page-art', 'page-svn', 'page-devs', 'page-tus', - 'page-fellows', 'page-donate', 'page-download', 'news-list', - 'feeds-list', 'groups-list', 'mirror-list', 'mirror-status', - 'mirrorlist', 'packages-differences', 'releng-test-overview', + ('page-keys', 0.8, 'weekly'), + ('news-list', 0.7, 'weekly'), + ('groups-list', 0.5, 'weekly'), + ('mirror-status', 0.4, 'hourly'), + 'page-about', + 'page-art', + 'page-svn', + 'page-devs', + 'page-tus', + 'page-fellows', + 'page-donate', + 'page-download', + 'feeds-list', + 'mirror-list', + 'mirrorlist', + 'packages-differences', + 'releng-test-overview', + 'visualize-index', ) def items(self): diff --git a/templates/public/index.html b/templates/public/index.html index 854bd447..4bd26f6b 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -170,6 +170,8 @@ <h4>Tools</h4> <h4>Development</h4> <ul> + <li><a href="{% url page-keys %}" + title="Package/Database signing master keys">Master Keys</a></li> <li><a href="/packages/" title="View/search the package repository database">Packages</a></li> <li><a href="/groups/" -- cgit v1.2.3-54-g00ecf From 7c84bea7dabdfbc307d373620b00214777d91a97 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 14:25:51 -0600 Subject: Allow generation of an ownertrust file Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/generate_keyring.py | 29 ++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py index a3a764b4..062c738b 100644 --- a/devel/management/commands/generate_keyring.py +++ b/devel/management/commands/generate_keyring.py @@ -24,7 +24,7 @@ logger = logging.getLogger() class Command(BaseCommand): - args = "<keyserver> <keyring_path>" + args = "<keyserver> <keyring_path> [ownertrust_path]" help = "Assemble a GPG keyring with all known developer keys." def handle(self, *args, **options): @@ -36,10 +36,14 @@ def handle(self, *args, **options): elif v == 2: logger.level = logging.DEBUG - if len(args) != 2: + if len(args) < 2: raise CommandError("keyserver and keyring_path must be provided") - return generate_keyring(args[0], args[1]) + generate_keyring(args[0], args[1]) + + if len(args) > 2: + generate_ownertrust(args[2]) + def generate_keyring(keyserver, keyring): logger.info("getting all known key IDs") @@ -60,4 +64,23 @@ def generate_keyring(keyserver, keyring): subprocess.check_call(gpg_cmd) logger.info("keyring at %s successfully updated", keyring) + +TRUST_LEVELS = { + 'unknown': 0, + 'expired': 1, + 'undefined': 2, + 'never': 3, + 'marginal': 4, + 'fully': 5, + 'ultimate': 6, +} + + +def generate_ownertrust(trust_path): + master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True) + with open(trust_path, "w") as trustfile: + for key_id in master_key_ids: + trustfile.write("%s:%d:\n" % (key_id, TRUST_LEVELS['marginal'])) + logger.info("trust file at %s created or overwritten", trust_path) + # vim: set ts=4 sw=4 et: -- cgit v1.2.3-54-g00ecf From 025042ef0d23378a5f10fe1cff8f660f51638615 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 14:45:05 -0600 Subject: Master keys: reduce query count, add default ordering Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/models.py | 3 +++ public/views.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/devel/models.py b/devel/models.py index ea0f4d55..f31b8fbb 100644 --- a/devel/models.py +++ b/devel/models.py @@ -14,4 +14,7 @@ class MasterKey(models.Model): created = models.DateTimeField() revoked = models.DateTimeField(null=True, blank=True) + class Meta: + ordering = ('created',) + # vim: set ts=4 sw=4 et: diff --git a/public/views.py b/public/views.py index 95b590fc..40512614 100644 --- a/public/views.py +++ b/public/views.py @@ -79,7 +79,8 @@ def feeds(request): def keys(request): context = { - 'keys': MasterKey.objects.select_related('owner', 'revoker').all(), + 'keys': MasterKey.objects.select_related('owner', 'revoker', + 'owner__userprofile', 'revoker__userprofile').all(), } return direct_to_template(request, 'public/keys.html', context) -- cgit v1.2.3-54-g00ecf From 474290795424774700867205f32dec0f2618b614 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 00:14:18 -0500 Subject: move media/silhouette.png to media/devs/silhouette.png --- media/devs/silhouette.png | Bin 0 -> 33090 bytes media/silhouette.png | Bin 33090 -> 0 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 media/devs/silhouette.png delete mode 100644 media/silhouette.png diff --git a/media/devs/silhouette.png b/media/devs/silhouette.png new file mode 100644 index 00000000..afa87cd1 Binary files /dev/null and b/media/devs/silhouette.png differ diff --git a/media/silhouette.png b/media/silhouette.png deleted file mode 100644 index afa87cd1..00000000 Binary files a/media/silhouette.png and /dev/null differ -- cgit v1.2.3-54-g00ecf From ad54af54f3af11bad7b020527c11d18cf94c2ab3 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 00:53:12 -0500 Subject: Update localsettings.py.example, README, etc for parabolaweb. --- README | 85 +++++++++++++++++++++++++++++++---------------- local_settings.py.example | 29 ++++++++++------ media/admin_media | 2 +- requirements.txt | 1 + 4 files changed, 78 insertions(+), 39 deletions(-) diff --git a/README b/README index 0d3ee082..0e9d19af 100644 --- a/README +++ b/README @@ -1,4 +1,4 @@ -# Archweb README +# Parabolaweb README To get a pretty version of this document, run @@ -19,55 +19,84 @@ See AUTHORS file. # Python dependencies -More detail in `requirements.txt` and `requirements_prod.txt`; it is best to -use virtualenv and pip to handle these. But if you insist on (Arch Linux) -packages, you will probably want the following: - -- mysql-python or python-pysqlite -- django -- python-markdown -- python-south -- python-memcached - +We're going to use pip to handle python dependencies, m'kay? +Worry about that in step 3. + # Testing Installation +Throughout this, we assume that parabolaweb is installed in a +directory called `parabolaweb`. This is not nescessarily true. On the +main server, it's in `/srv/http/web'. Wherever you see this in a +command, know that you should just replace it with the correct path +for your install. + 1. Run `virtualenv2`. - $ cd /path/to/archweb && virtualenv2 ../archweb-env + $ cd /path/to/parabolaweb && virtualenv2 `pwd`-env + + Here I just had you use `pwd` to choose the environment + directory. You can use anything, but it is recomended that it not + be the same directory as the install. 2. Activate the virtualenv. - $ source ../archweb-env/bin/activate + $ source `pwd`-env/bin/activate + +3. Fix symlink to the environment + + (parabolaweb-env) $ ln -sf ../../parabolaweb-env/lib/python2.7/site-packages/django/contrib/admin/media media/admin_meda -2. Install dependencies through `pip`. + Of ourse change `../../parabolaweb-env` to the relative path to + your environment. Keep in mind that the path is relative from + inside the `media/` directory, not the current directory. - (archweb-env) $ pip install -r requirements.txt +4. Install dependencies through `pip`. -3. Copy `local_settings.py.example` to `local_settings.py` and modify. + To install base dependencies, run + (parabolweb-env) $ pip install -r requirements.txt + + After that you will need to install a database engine for python. + This means `MySQL-python==1.2.3`, `pysqlite` or whatever the + package is for PostgreSQL. Eg: + (parabolweb-env) $ pip install MySQL-python==1.2.3 + + You may also want to install memcached + (parabolweb-env) $ pip install python-memcached==1.47 + + Alternately, to have MySQL and memcached installed automatically, run + (parabolweb-env) $ pip install -r requirements_prod.txt + +5. Copy `local_settings.py.example` to `local_settings.py` and modify. Make sure to uncomment the appropriate db section (either sqlite or mysql). -4. Sync the database to create it. +6. Sync the database to create it. - (archweb-env) $ ./manage.py syncdb + (parabolaweb-env) $ ./manage.py syncdb -5. Migrate changes. +7. Migrate changes. - (archweb-env) $ ./manage.py migrate + (parabolaweb-env) $ ./manage.py migrate -6. Load the fixtures to prepopulate some data. If you don't want some of the +8. Load the fixtures to prepopulate some data. If you don't want some of the provided data, adjust the file glob accordingly. - (archweb-env) $ ./manage.py loaddata */fixtures/*.json + (parabolaweb-env) $ for file in */fixtures/*.json; do \ + ./manage.py loaddata $file; \ + done + + The reason we use a loop instead of just calling them with a glob + is that groups.json is expected to fail, and this way it won't + prevent others from loading. -7. Use the following commands to start a service instance +9. Use the following commands to start a service instance - (archweb-env) $ ./manage.py runserver + (parabolaweb-env) $ ./manage.py runserver -8. To optionally populate the database with real data: +10. To optionally populate the database with real data: - (archweb-env) $ wget ftp://ftp.archlinux.org/core/os/i686/core.db.tar.gz - (archweb-env) $ ./manage.py reporead i686 core.db.tar.gz - (archweb-env) $ ./manage.py syncisos + $ wget http://repo.parabolagnulinux.org/core/os/i686/core.db.tar.gz + $ ./manage.py reporead i686 core.db.tar.gz + $ ./manage.py syncisos Alter architecture and repo to get x86\_64 and packages from other repos if needed. diff --git a/local_settings.py.example b/local_settings.py.example index bad03921..4ad87a36 100644 --- a/local_settings.py.example +++ b/local_settings.py.example @@ -1,9 +1,9 @@ -### Django settings for archlinux project. +### Django settings for Parabola GNU/Linux-libre project. ## Debug settings -DEBUG = False -TEMPLATE_DEBUG = True -DEBUG_TOOLBAR = True +DEBUG = True # As far as I can tell, this must be True for /media/* URLs to work +TEMPLATE_DEBUG = True # More helpful this way +DEBUG_TOOLBAR = False # Must install package django-debug-toolbar to use ## For django debug toolbar INTERNAL_IPS = ('127.0.0.1',) @@ -17,15 +17,24 @@ ADMINS = ( DATABASES = { 'default': { 'ENGINE' : 'django.db.backends.mysql', - 'NAME' : 'archlinux', - 'USER' : 'archlinux', - 'PASSWORD': 'archlinux', + 'NAME' : 'parabola', + 'USER' : 'parabola', + 'PASSWORD': 'parabola', 'HOST' : '', 'PORT' : '', - 'OPTIONS' : {'init_command': 'SET storage_engine=InnoDB'}, + # InnoDB WILL NOT work + 'OPTIONS' : {'init_command': 'SET storage_engine=MyISAM'}, }, } +## sqlite3 Database settings +#DATABASES = { +# 'default': { +# 'ENGINE' : 'sqlite3', +# 'NAME' : '/srv/http/web/db.sqlite', +# }, +#} + ## Define cache settings CACHES = { 'default': { @@ -43,10 +52,10 @@ CACHE_MIDDLEWARE_SECONDS = 300 SESSION_COOKIE_SECURE = False ## location for saving dev pictures -MEDIA_ROOT = '/srv/example.com/img/' +MEDIA_ROOT = '/srv/http/media/devs/' ## web url for serving image files -MEDIA_URL = 'http://example.com/img/' +MEDIA_URL = '/media/' ## Make this unique, and don't share it with anybody. SECRET_KEY = '00000000000000000000000000000000000000000000000' diff --git a/media/admin_media b/media/admin_media index 585cf837..3d6781eb 120000 --- a/media/admin_media +++ b/media/admin_media @@ -1 +1 @@ -/usr/lib/python2.7/site-packages/django/contrib/admin/media \ No newline at end of file +../../web-env/lib/python2.7/site-packages/django/contrib/admin/media/ \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index fd58616e..5704435e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ Django==1.3.1 Markdown==2.0.3 South==0.7.3 +pyinotify==0.9.2 pytz>=2011n -- cgit v1.2.3-54-g00ecf From 4d02cd5b5d4437dd1543e2d45044db72da1989f4 Mon Sep 17 00:00:00 2001 From: Dan McGee <dan@archlinux.org> Date: Wed, 30 Nov 2011 23:56:12 -0600 Subject: reporead: fix not defined variable Way to fail at refactoring, Dan. Signed-off-by: Dan McGee <dan@archlinux.org> --- devel/management/commands/reporead.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index e4ba8580..c444538b 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -329,6 +329,8 @@ def db_update(archname, reponame, pkgs, force=False): """ logger.info('Updating %s (%s)', reponame, archname) dbpkgs = update_common(archname, reponame, pkgs, True) + repository = Repo.objects.get(name__iexact=reponame) + architecture = Arch.objects.get(name__iexact=archname) # This makes our inner loop where we find packages by name *way* more # efficient by not having to go to the database for each package to -- cgit v1.2.3-54-g00ecf From 56f60fcfbe1cd2951dd1867360ae364213686b5f Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 01:12:40 -0500 Subject: this probably should have happened during the merge (small stuff) --- packages/templatetags/package_extras.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index fc2201e5..827f6438 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -37,7 +37,7 @@ def render(self, context): qs['sort'] = ['-' + self.sortfield] else: qs['sort'] = [self.sortfield] - return urlencode(qs, True) + return urlencode(qs, True).replace('&', '&') @register.tag(name='buildsortqs') def do_buildsortqs(parser, token): @@ -73,12 +73,6 @@ def userpkgs(user): return '' -def svn_link(package, svnpath): - '''Helper function for the two real SVN link methods.''' - parts = (package.repo.svn_root, package.pkgbase, svnpath) - linkbase = "http://projects.archlinux.org/svntogit/%s.git/tree/%s/%s/" - return linkbase % tuple(urlquote(part) for part in parts) - @register.simple_tag def svn_arch(package): repo = package.repo.name.lower() -- cgit v1.2.3-54-g00ecf From 3eb75b1670fd5f5b581ac9c0d2b1343d205118a8 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 01:13:31 -0500 Subject: add get_wiki_link() to packages/templatetags/package_extras.py (fix the 500 error) --- packages/templatetags/package_extras.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 827f6438..01d9afd6 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -72,6 +72,14 @@ def userpkgs(user): ) return '' +@register.simple_tag +def get_wiki_link(package): + url = "https://wiki.parabolagnulinux.org/index.php" + data = { + 'title': "Special:Search", + 'search': package.pkgname, + } + return link_encode(url, data) @register.simple_tag def svn_arch(package): -- cgit v1.2.3-54-g00ecf From 623e0453cee5e3f663a0b18d68db0396cc812983 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 23:35:41 -0500 Subject: I think this fixes all the broken links (that point to parabolagnulinux.org anyway) --- devel/views.py | 2 +- settings.py | 2 +- templates/base.html | 2 +- templates/packages/flaghelp.html | 2 +- templates/public/art.html | 2 +- templates/public/download.html | 2 +- templates/public/feeds.html | 2 +- templates/public/index.html | 4 ++-- todolists/views.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/devel/views.py b/devel/views.py index 7cc45419..b9bd7cce 100644 --- a/devel/views.py +++ b/devel/views.py @@ -292,7 +292,7 @@ def save(self, commit=True): send_mail("Your new parabolaweb account", template.render(ctx), - 'Parabola <dev@list.parabolagnulinux.org>', + 'Parabola <dev@lists.parabolagnulinux.org>', [user.email], fail_silently=False) diff --git a/settings.py b/settings.py index cb17c96c..e1fc45d5 100644 --- a/settings.py +++ b/settings.py @@ -13,7 +13,7 @@ MANAGERS = ADMINS # Package out-of-date emails for orphans -NOTIFICATIONS = ['packages@list.parabolagnulinux.org'] +NOTIFICATIONS = ['dev@lists.parabolagnulinux.org'] # Full path to the data directory DEPLOY_PATH = os.path.dirname(os.path.realpath(__file__)) diff --git a/templates/base.html b/templates/base.html index ca492281..b537c603 100644 --- a/templates/base.html +++ b/templates/base.html @@ -39,7 +39,7 @@ <li><a href="/packages/differences/" title="Package architecture differences">Architecture Differences</a></li> <li><a - href="http://list.parabolagnulinux.org/pipermail/dev-parabolagnulinux.org/" + href="http://lists.parabolagnulinux.org/pipermail/dev/" title="dev mailing list archives">Archives</a></li> <li><a href="/devel/clock/" title="Developer world clocks">Dev Clocks</a></li> {% if user.is_staff %} diff --git a/templates/packages/flaghelp.html b/templates/packages/flaghelp.html index e33ba0f5..d60018fa 100644 --- a/templates/packages/flaghelp.html +++ b/templates/packages/flaghelp.html @@ -25,7 +25,7 @@ <h3>Flagging Packages</h3> <p>The message box portion of the flag utility is optional, and meant for short messages only. If you need more than 200 characters for your message, then file a bug report, email the maintainer directly, or send - an email to the <a target="_blank" href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org" + an email to the <a target="_blank" href="https://lists.parabolagnulinux.org/mailman/listinfo/dev" title="Visit the parabola dev mailing list">parabola mailing list</a> with your additional text.</p> diff --git a/templates/public/art.html b/templates/public/art.html index 68179f23..3a92b8b4 100644 --- a/templates/public/art.html +++ b/templates/public/art.html @@ -10,7 +10,7 @@ <h2>Parabola Logos and Artwork</h2> <p>You can help by creating artwork for Parabola GNU/Linux-libre.</p> - <p>Send your designs to web@list.parabolagnulinux.org and state they are CC-by-sa + <p>Send your designs to dev@lists.parabolagnulinux.org and state they are CC-by-sa or another free culture friendly license.</p> diff --git a/templates/public/download.html b/templates/public/download.html index 7a1cd855..207414dc 100644 --- a/templates/public/download.html +++ b/templates/public/download.html @@ -23,7 +23,7 @@ <h3>Release Info</h3> <li><strong>Resources:</strong> <ul> <li><a - href="http://list.parabolagnulinux.org/listinfo.cgi/dev-parabolagnulinux.org" + href="https://lists.parabolagnulinux.org/mailman/listinfo/dev" title="Parabola Hackers Discussion List">Mailing List</a></li> </ul> </li> diff --git a/templates/public/feeds.html b/templates/public/feeds.html index 79e8a1aa..da70e6ee 100644 --- a/templates/public/feeds.html +++ b/templates/public/feeds.html @@ -17,7 +17,7 @@ <h3>News and Activity Feeds</h3> the Parabola staff.</p> <p>The <a - href="http://wiki.parabolagnulinux.org/feed.php" + href="https://wiki.parabolagnulinux.org/index.php?title=Special:RecentChanges&feed=atom" title="ParabolaWiki Recent Changes feed" class="rss">Parabola Wiki: Recent changes feed</a> is also available to track document changes from the <a href="http://wiki.parabolagnulinux.org/" title="Parabola Wiki community diff --git a/templates/public/index.html b/templates/public/index.html index 3805b997..044b586f 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -123,9 +123,9 @@ <h4>Documentation</h4> <h4>Community</h4> <ul> - <li><a href="http://list.parabolagnulinux.org/listinfo.cgi" + <li><a href="http://lists.parabolagnulinux.org/" title="Community and developer mailing lists">Mailing Lists</a></li> - <li><a href="http://list.parabolagnulinux.org/pipermail/dev-parabolagnulinux.org/" + <li><a href="https://lists.parabolagnulinux.org/pipermail/dev/" title="dev mailing list archives">Dev Archives</a></li> <li><a href="http://wiki.parabolagnulinux.org/IRC_Channels" title="Official and regional IRC communities">IRC Channels</a></li> diff --git a/todolists/views.py b/todolists/views.py index 43763545..233102cf 100644 --- a/todolists/views.py +++ b/todolists/views.py @@ -161,7 +161,7 @@ def send_todolist_emails(todo_list, new_packages): template = loader.get_template('todolists/email_notification.txt') send_mail('Packages added to todo list \'%s\'' % todo_list.name, template.render(ctx), - 'Parabola <packages@list.parabolagnulinux.org>', + 'Parabola <dev@lists.parabolagnulinux.org>', [maint], fail_silently=True) -- cgit v1.2.3-54-g00ecf From a10b6d884f4f102f5216f4b341f35bb341cee176 Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Thu, 1 Dec 2011 23:55:27 -0500 Subject: Normalize the URI scheme used for inbound links. Don't use one when possible, else use https --- README | 2 +- packages/templatetags/package_extras.py | 8 ++++---- public/views.py | 2 +- settings.py | 2 +- templates/base.html | 12 ++++++------ templates/packages/flag.html | 2 +- templates/packages/flaghelp.html | 4 ++-- templates/packages/search.html | 2 +- templates/public/about.html | 2 +- templates/public/donate.html | 2 +- templates/public/download.html | 16 ++++++++-------- templates/public/feeds.html | 4 ++-- templates/public/https.html | 5 ++--- templates/public/index.html | 18 +++++++++--------- templates/public/svn.html | 2 +- 15 files changed, 41 insertions(+), 42 deletions(-) diff --git a/README b/README index 0e9d19af..acd8b8f9 100644 --- a/README +++ b/README @@ -94,7 +94,7 @@ for your install. 10. To optionally populate the database with real data: - $ wget http://repo.parabolagnulinux.org/core/os/i686/core.db.tar.gz + $ wget https://repo.parabolagnulinux.org/core/os/i686/core.db.tar.gz $ ./manage.py reporead i686 core.db.tar.gz $ ./manage.py syncisos diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 01d9afd6..c8c88ca7 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -74,7 +74,7 @@ def userpkgs(user): @register.simple_tag def get_wiki_link(package): - url = "https://wiki.parabolagnulinux.org/index.php" + url = "//wiki.parabolagnulinux.org/index.php" data = { 'title': "Special:Search", 'search': package.pkgname, @@ -92,7 +92,7 @@ def svn_trunk(package): @register.simple_tag def bugs_list(package): - url = "https://bugs.parabolagnulinux.org/bugs/issue?" + url = "//bugs.parabolagnulinux.org/bugs/issue?" data = { '@action': 'search', 'title': package.pkgname, @@ -101,7 +101,7 @@ def bugs_list(package): @register.simple_tag def bug_report(package): - url = "https://bugs.parabolagnulinux.org/bugs/issue?" + url = "//bugs.parabolagnulinux.org/bugs/issue?" data = { '@template': 'item', 'keyword': 'packages', @@ -118,5 +118,5 @@ def flag_unfree(package): 'priority': 'critical', 'title': '[%s] Please put your reasons here (register first if you haven\'t)' % package.pkgname, } - return "https://bugs.parabolagnulinux.org/bugs/issue?%s" % urlencode(data) + return "//bugs.parabolagnulinux.org/bugs/issue?%s" % urlencode(data) # vim: set ts=4 sw=4 et: diff --git a/public/views.py b/public/views.py index bb9cd454..43b46b12 100644 --- a/public/views.py +++ b/public/views.py @@ -52,7 +52,7 @@ def donate(request): return direct_to_template(request, 'public/donate.html', context) def download(request): - return redirect('http://wiki.parabolagnulinux.org/get', permanent=True) + return redirect('//wiki.parabolagnulinux.org/get', permanent=True) def feeds(request): context = { diff --git a/settings.py b/settings.py index e1fc45d5..ed12bbec 100644 --- a/settings.py +++ b/settings.py @@ -132,6 +132,6 @@ INSTALLED_APPS = list(INSTALLED_APPS) + [ 'debug_toolbar' ] # URL to fetch a current list of available ISOs -ISO_LIST_URL = 'http://repo.parabolagnulinux.org/isos/' +ISO_LIST_URL = 'https://repo.parabolagnulinux.org/isos/' # vim: set ts=4 sw=4 et: diff --git a/templates/base.html b/templates/base.html index b537c603..746c6cf4 100644 --- a/templates/base.html +++ b/templates/base.html @@ -19,10 +19,10 @@ <ul id="archnavbarlist"> <li id="anb-home"><a href="/" title="Parabola news, packages, projects and more">Home</a></li> <li id="anb-packages"><a href="/packages/" title="Package Database">Packages</a></li> - <li id="anb-wiki"><a href="http://wiki.parabolagnulinux.org" title="Community documentation">Wiki</a></li> - <li id="anb-bugs"><a href="https://bugs.parabolagnulinux.org" title="Issue Tracker">Bugs</a></li> - <li id="anb-projects"><a href="https://projects.parabolagnulinux.org" title="Our Code">Projects</a></li> - <li id="anb-download"><a href="http://wiki.parabolagnulinux.org/Download" title="Get Parabola">Download</a></li> + <li id="anb-wiki"><a href="//wiki.parabolagnulinux.org" title="Community documentation">Wiki</a></li> + <li id="anb-bugs"><a href="//bugs.parabolagnulinux.org" title="Issue Tracker">Bugs</a></li> + <li id="anb-projects"><a href="//projects.parabolagnulinux.org" title="Our Code">Projects</a></li> + <li id="anb-download"><a href="//wiki.parabolagnulinux.org/Download" title="Get Parabola">Download</a></li> </ul> </div> </div><!-- #archnavbar --> @@ -32,14 +32,14 @@ {% if user.is_authenticated %} <ul> <li><a href="/devel/" title="Developer Dashboard">Dashboard</a></li> - <li><a href="https://projects.parabolagnulinux.org/" title="Git Projects">Projects</a></li> + <li><a href="//projects.parabolagnulinux.org/" title="Git Projects">Projects</a></li> <li><a href="{% url news-list as newsl %}{{ newsl }}" title="Manage news articles">News</a></li> <li><a href="/packages/signoffs/" title="Package signoffs">Signoffs</a></li> <li><a href="/todo/" title="Developer todo lists">Todos</a></li> <li><a href="/packages/differences/" title="Package architecture differences">Architecture Differences</a></li> <li><a - href="http://lists.parabolagnulinux.org/pipermail/dev/" + href="//lists.parabolagnulinux.org/pipermail/dev/" title="dev mailing list archives">Archives</a></li> <li><a href="/devel/clock/" title="Developer world clocks">Dev Clocks</a></li> {% if user.is_staff %} diff --git a/templates/packages/flag.html b/templates/packages/flag.html index 4bb23b85..ea199618 100644 --- a/templates/packages/flag.html +++ b/templates/packages/flag.html @@ -29,7 +29,7 @@ <h2>Flag Package: {{ package.pkgname }} {{ package.full_version }} ({{ package.a <p><strong>Note:</strong> Do <em>not</em> use this facility if the package is broken! The package will be unflagged and the report will be ignored! - <a href="https://bugs.parabolagnulinux.org/" title="Parabola Bugtracker">Use the + <a href="//bugs.parabolagnulinux.org/" title="Parabola Bugtracker">Use the bugtracker to file a bug</a> instead.</p> <p>Please confirm your flag request for {{package.pkgname}}:</p> diff --git a/templates/packages/flaghelp.html b/templates/packages/flaghelp.html index d60018fa..eac13f83 100644 --- a/templates/packages/flaghelp.html +++ b/templates/packages/flaghelp.html @@ -25,12 +25,12 @@ <h3>Flagging Packages</h3> <p>The message box portion of the flag utility is optional, and meant for short messages only. If you need more than 200 characters for your message, then file a bug report, email the maintainer directly, or send - an email to the <a target="_blank" href="https://lists.parabolagnulinux.org/mailman/listinfo/dev" + an email to the <a target="_blank" href="//lists.parabolagnulinux.org/mailman/listinfo/dev" title="Visit the parabola dev mailing list">parabola mailing list</a> with your additional text.</p> <p><strong>Note:</strong> Please do <em>not</em> use this facility if the - package is broken! Use the <a target="_blank" href="https://bugs.parabolagnulinux.org" + package is broken! Use the <a target="_blank" href="//bugs.parabolagnulinux.org" title="Parabola Bugtracker">bugtracker</a> instead.</p> </body> diff --git a/templates/packages/search.html b/templates/packages/search.html index 4a61298e..bb5c1c8b 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -165,7 +165,7 @@ <h3>Package Search</h3> <p>You are browsing the Parabola package database. From here you can find detailed information about packages located in the official supported repositories. If you need the sourceball from where a package is built, you can look at our <a - href='http://repo.parabolagnulinux.org/sources/packages' + href='//repo.parabolagnulinux.org/sources/packages' title='Sourceballed packages'>sources repo</a>.</p> </div> <script type="text/javascript" src="/jsi18n/"></script> diff --git a/templates/public/about.html b/templates/public/about.html index a01a33fe..099d5513 100644 --- a/templates/public/about.html +++ b/templates/public/about.html @@ -53,7 +53,7 @@ <h3>Participate</h3> <li>Host repositories. Mirrors are not abundant.</li> - <li>Take a look at our <a href="http://wiki.parabolagnulinux.org/TODO" title="TODO">TODO list</a></li> + <li>Take a look at our <a href="//wiki.parabolagnulinux.org/TODO" title="TODO">TODO list</a></li> </ul> </div> diff --git a/templates/public/donate.html b/templates/public/donate.html index ef7f252d..c6e055f5 100644 --- a/templates/public/donate.html +++ b/templates/public/donate.html @@ -18,7 +18,7 @@ <h3>We don't accept any money donations</h3> anything, because we are a really small community of hackers.</p> <p>If you want, we have a pretty nice <a - href='http://wiki.parabolagnulinux.org/TODO' title='The TODO + href='//wiki.parabolagnulinux.org/TODO' title='The TODO list!'>TODO list</a> you can check to help us by donating some of your time. That will be very much appreciated by us :)</p> diff --git a/templates/public/download.html b/templates/public/download.html index 207414dc..2e1024b0 100644 --- a/templates/public/download.html +++ b/templates/public/download.html @@ -23,14 +23,14 @@ <h3>Release Info</h3> <li><strong>Resources:</strong> <ul> <li><a - href="https://lists.parabolagnulinux.org/mailman/listinfo/dev" + href="//lists.parabolagnulinux.org/mailman/listinfo/dev" title="Parabola Hackers Discussion List">Mailing List</a></li> </ul> </li> <li><strong>Instructions:</strong> <ul> <li><a - href="http://wiki.parabolagnulinux.org/installation_guide" + href="//wiki.parabolagnulinux.org/installation_guide" title="Official Installation Guide">Parabola Install Guide</a>. </li> </ul> @@ -42,7 +42,7 @@ <h3>Existing Arch Users</h3> <p>If you are an Arch user, there is no need to download the ISO to update your existing system to Parabola. You can just follow the instructions in our wiki to convert your existing Arch system into a free - as in freedom one. <a href='http://wiki.parabolagnulinux.org/migration' + as in freedom one. <a href='//wiki.parabolagnulinux.org/migration' title='Migration Guide'>More here.</a></p> <h3>BitTorrent Download (recommended)</h3> @@ -68,12 +68,12 @@ <h3>BitTorrent Download (recommended)</h3> </td> <td class="cpu-arch"> - <a href="http://repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-netinstall-i686.iso.torrent" + <a href="//repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-netinstall-i686.iso.torrent" title="Download for i686 architecture">Download</a> </td> <td class="cpu-arch"> - <a href="http://repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-netinstall-x86_64.iso.torrent" + <a href="//repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-netinstall-x86_64.iso.torrent" title="Download for x86-64 architecture">Download</a> </td> @@ -94,10 +94,10 @@ <h3>BitTorrent Download (recommended)</h3> <td> Core Image </td><td class="cpu-arch"> - <a href="http://repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-core-i686.iso.torrent" + <a href="//repo.parabolagnulinux.org/isos/i686/parabola-{{version}}-core-i686.iso.torrent" title="Download for i686 architecture">Download</a> </td><td class="cpu-arch"> - <a href="http://repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-core-x86_64.iso.torrent" + <a href="//repo.parabolagnulinux.org/isos/x86_64/parabola-{{version}}-core-x86_64.iso.torrent" title="Download for x86-64 architecture">Download</a> </td><td class="magnet-link"> <a href="magnet:?xt=urn:btih:d9bb9f9641a222d2d302988da95225f570bcdb6d&dn=parabola-2010.12.29-core-i686.iso&tr=http%3A%2F%2Ftracker.publicbt.com%2Fannounce" @@ -119,7 +119,7 @@ <h3>HTTP/FTP Direct Downloads</h3> image matches the checksum from the MD5SUMS or SHA256SUMS file in the same directory as the image.</p> - <p><a href='http://repo.parabolagnulinux.org/isos/' title='Parabola ISOs + <p><a href='//repo.parabolagnulinux.org/isos/' title='Parabola ISOs directory'>Go to the Parabola ISOs directory.</a></p> {%endwith%} diff --git a/templates/public/feeds.html b/templates/public/feeds.html index da70e6ee..69789150 100644 --- a/templates/public/feeds.html +++ b/templates/public/feeds.html @@ -17,10 +17,10 @@ <h3>News and Activity Feeds</h3> the Parabola staff.</p> <p>The <a - href="https://wiki.parabolagnulinux.org/index.php?title=Special:RecentChanges&feed=atom" + href="//wiki.parabolagnulinux.org/index.php?title=Special:RecentChanges&feed=atom" title="ParabolaWiki Recent Changes feed" class="rss">Parabola Wiki: Recent changes feed</a> is also available to track document changes from the - <a href="http://wiki.parabolagnulinux.org/" title="Parabola Wiki community + <a href="//wiki.parabolagnulinux.org/" title="Parabola Wiki community documentation">Parabola Wiki</a>.</p> <h3>Package Feeds</h3> diff --git a/templates/public/https.html b/templates/public/https.html index 7cfe44e9..e53dc8e9 100644 --- a/templates/public/https.html +++ b/templates/public/https.html @@ -19,9 +19,8 @@ <h2 class="title">Parabola GNU/Linux-libre</h2> <h3>I just want to get the ISOs</h3> <p>You can proceed to our <a - href="http://wiki.parabolagnulinux.org/get">ISOs download page</a> on <a - href="http://wiki.parabolagnulinux.org/">our wiki</a> (which isn't secured - yet, sadly).</p> + href="https://wiki.parabolagnulinux.org/get">ISOs download page</a> on <a + href="https://wiki.parabolagnulinux.org/">our wiki</a>.</p> <h3>I want to do this the right way</h3> diff --git a/templates/public/index.html b/templates/public/index.html index 044b586f..188c572f 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -112,22 +112,22 @@ <h3>Recent Updates <span class="more">(<a href="/packages/?sort=-last_update" <h4>Documentation</h4> <ul> - <li><a href="http://wiki.parabolagnulinux.org/" + <li><a href="//wiki.parabolagnulinux.org/" title="Community documentation">Wiki</a></li> - <li><a href="http://wiki.parabolagnulinux.org/installation_guide" + <li><a href="//wiki.parabolagnulinux.org/installation_guide" title="Parabola Installation Guide">Official Parabola Installation Guide</a></li> - <li><a href="http://wiki.parabolagnulinux.org/Migration" + <li><a href="//wiki.parabolagnulinux.org/Migration" title="Free your Arch instalation">Migration from Archlinux</a></li> </ul> <h4>Community</h4> <ul> - <li><a href="http://lists.parabolagnulinux.org/" + <li><a href="//lists.parabolagnulinux.org/" title="Community and developer mailing lists">Mailing Lists</a></li> - <li><a href="https://lists.parabolagnulinux.org/pipermail/dev/" + <li><a href="//lists.parabolagnulinux.org/pipermail/dev/" title="dev mailing list archives">Dev Archives</a></li> - <li><a href="http://wiki.parabolagnulinux.org/IRC_Channels" + <li><a href="//wiki.parabolagnulinux.org/IRC_Channels" title="Official and regional IRC communities">IRC Channels</a></li> <li><a href="http://identi.ca/group/parabola" title="Parabola at identi.ca">Identi.ca group</a></li> </ul> @@ -152,9 +152,9 @@ <h4>Development</h4> title="View/search the package repository database">Packages</a></li> <li><a href="/groups/" title="View the available package groups">Package Groups</a></li> - <li><a href="https://projects.parabolagnulinux.org" + <li><a href="//projects.parabolagnulinux.org" title="Official Parabola projects (git)">Projects in Git</a></li> - <li><a href="https://bugs.parabolagnulinux.org/" + <li><a href="//bugs.parabolagnulinux.org/" title="Parabola's Issue Tracker">Issue Tracker</a></li> <li><a href="/todolists/" title="Hacker Todo Lists">Todo Lists</a></li> @@ -166,7 +166,7 @@ <h4>About</h4> <li><a href="{% url page-about %}" title="Learn more about Parabola">About Parabola</a></li> <li><a href="/download/" title="Get Parabola">Download Parabola</a></li> - <li><a href="http://wiki.parabolagnulinux.org/Media" + <li><a href="//wiki.parabolagnulinux.org/Media" title="Parabola in the media">Media Appearances</a></li> <li><a href="{% url page-art %}" title="Parabola logos and other artwork for promotional use">Logos & Artwork</a></li> <li><a href="{% url news-list %}" title="News Archives">News Archives</a></li> diff --git a/templates/public/svn.html b/templates/public/svn.html index 80367eaf..4321f7c2 100644 --- a/templates/public/svn.html +++ b/templates/public/svn.html @@ -4,7 +4,7 @@ <div class="box"> <h2 class="title">SVN Repositories</h2> <p>Parabola doesn't use any SVN repositories. But you can find our <a - href="https://projects.parabolagnulinux.org" title="Projects + href="//projects.parabolagnulinux.org" title="Projects page">Projects</a> on git! </p> -- cgit v1.2.3-54-g00ecf From d265eb8b328fc58879618d20177967c51fde383e Mon Sep 17 00:00:00 2001 From: Luke Shumaker <LukeShu@sbcglobal.net> Date: Fri, 2 Dec 2011 00:13:47 -0500 Subject: These for references to arch slipped through. --- packages/management/commands/signoff_report.py | 2 +- packages/views/flag.py | 2 +- releng/views.py | 2 +- templates/packages/flag.html | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/management/commands/signoff_report.py b/packages/management/commands/signoff_report.py index 3b67f518..f822c8ad 100644 --- a/packages/management/commands/signoff_report.py +++ b/packages/management/commands/signoff_report.py @@ -119,7 +119,7 @@ def generate_report(email, repo_name): 'old_days': old_days, 'leaders': leaders, }) - from_addr = 'Arch Website Notification <nobody@archlinux.org>' + from_addr = 'Parabola Website Notification <nobody@parabolagnulinux.org>' send_mail(subject, t.render(c), from_addr, [email]) # vim: set ts=4 sw=4 et: diff --git a/packages/views/flag.py b/packages/views/flag.py index 7e9d87c7..5db2ea69 100644 --- a/packages/views/flag.py +++ b/packages/views/flag.py @@ -71,7 +71,7 @@ def flag(request, name, repo, arch): }) send_mail(subject, tmpl.render(ctx), - 'Arch Website Notification <nobody@archlinux.org>', + 'Parabola Website Notification <nobody@parabolagnulinux.org>', toemail, fail_silently=True) diff --git a/releng/views.py b/releng/views.py index 2b3d0936..e17a6e9c 100644 --- a/releng/views.py +++ b/releng/views.py @@ -42,7 +42,7 @@ class TestForm(forms.ModelForm): success = forms.BooleanField( help_text="Only check this if everything went fine. " \ "If you ran into problems please create a ticket on <a " \ - "href=\"https://bugs.archlinux.org/index.php?project=6\">the " \ + "href=\"//bugs.parabolagnulinux.org/\">the " \ "bugtracker</a> (or check that one already exists) and link to " \ "it in the comments.", required=False) diff --git a/templates/packages/flag.html b/templates/packages/flag.html index ea199618..bb6b274f 100644 --- a/templates/packages/flag.html +++ b/templates/packages/flag.html @@ -23,8 +23,8 @@ <h2>Flag Package: {{ package.pkgname }} {{ package.full_version }} ({{ package.a <p>The message box portion of the flag utility is optional, and meant for short messages only. If you need more than 200 characters for your message, then file a bug report, email the maintainer directly, or send - an email to the <a href="http://mailman.archlinux.org/mailman/listinfo/arch-general" - title="Visit the arch-general mailing list">arch-general mailing list</a> + an email to the <a href="//lists.parabolagnulinux.org/mailman/listinfo/dev" + title="Visit the dev mailing list">Parabola Development mailing list</a> with your additional text.</p> <p><strong>Note:</strong> Do <em>not</em> use this facility if the -- cgit v1.2.3-54-g00ecf From 3dc0acff1a1de5688c0ae2bebe72541f81bdf374 Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Fri, 2 Dec 2011 19:15:58 +0100 Subject: Spelling in Readme --- README | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README b/README index 0e9d19af..497f4fe9 100644 --- a/README +++ b/README @@ -25,7 +25,7 @@ Worry about that in step 3. # Testing Installation Throughout this, we assume that parabolaweb is installed in a -directory called `parabolaweb`. This is not nescessarily true. On the +directory called `parabolaweb`. This is not necessarily true. On the main server, it's in `/srv/http/web'. Wherever you see this in a command, know that you should just replace it with the correct path for your install. @@ -35,7 +35,7 @@ for your install. $ cd /path/to/parabolaweb && virtualenv2 `pwd`-env Here I just had you use `pwd` to choose the environment - directory. You can use anything, but it is recomended that it not + directory. You can use anything, but it is recommended that it not be the same directory as the install. 2. Activate the virtualenv. @@ -44,9 +44,9 @@ for your install. 3. Fix symlink to the environment - (parabolaweb-env) $ ln -sf ../../parabolaweb-env/lib/python2.7/site-packages/django/contrib/admin/media media/admin_meda + (parabolaweb-env) $ ln -sf ../../parabolaweb-env/lib/python2.7/site-packages/django/contrib/admin/media media/admin_media - Of ourse change `../../parabolaweb-env` to the relative path to + Of course change `../../parabolaweb-env` to the relative path to your environment. Keep in mind that the path is relative from inside the `media/` directory, not the current directory. -- cgit v1.2.3-54-g00ecf From 0dc6c41899def8127b8176f4ea2d863757b19560 Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Fri, 2 Dec 2011 19:16:20 +0100 Subject: Fix bug tracker URLs --- packages/templatetags/package_extras.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index 01d9afd6..bcbda210 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -92,7 +92,7 @@ def svn_trunk(package): @register.simple_tag def bugs_list(package): - url = "https://bugs.parabolagnulinux.org/bugs/issue?" + url = "https://bugs.parabolagnulinux.org/bugs/issue" data = { '@action': 'search', 'title': package.pkgname, @@ -101,7 +101,7 @@ def bugs_list(package): @register.simple_tag def bug_report(package): - url = "https://bugs.parabolagnulinux.org/bugs/issue?" + url = "https://bugs.parabolagnulinux.org/bugs/issue" data = { '@template': 'item', 'keyword': 'packages', -- cgit v1.2.3-54-g00ecf From 8ceb83e52897c1c0bccc6322cffb0e864664afd1 Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Fri, 2 Dec 2011 19:46:36 +0100 Subject: Fix download link. Solution looks a bit unclean to me. --- packages/templatetags/package_extras.py | 12 ++++++++++++ templates/packages/details.html | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/templatetags/package_extras.py b/packages/templatetags/package_extras.py index bcbda210..c31990d7 100644 --- a/packages/templatetags/package_extras.py +++ b/packages/templatetags/package_extras.py @@ -72,6 +72,18 @@ def userpkgs(user): ) return '' +@register.simple_tag +def get_download_link(package): + parts = { + "repo": package.repo.name.lower(), + "arch": package.arch.name, + "pkgfile": package.filename + } + if parts["arch"] == "any": + parts["arch"] = "i686" + linkbase = "https://repo.parabolagnulinux.org/%(repo)s/os/%(arch)s/%(pkgfile)s" + return linkbase % parts + @register.simple_tag def get_wiki_link(package): url = "https://wiki.parabolagnulinux.org/index.php" diff --git a/templates/packages/details.html b/templates/packages/details.html index ef501c83..4570627f 100644 --- a/templates/packages/details.html +++ b/templates/packages/details.html @@ -39,7 +39,7 @@ <h4>Package Actions</h4> onclick="return !window.open('/packages/flaghelp/','FlagHelp', 'height=350,width=450,location=no,scrollbars=yes,menubars=no,toolbars=no,resizable=no');">(?)</a></li> {% endif %} - <li><a href="download/" rel="nofollow" title="Download {{ pkg.pkgname }} from mirror">Download From Mirror</a></li> + <li><a href="{% get_download_link pkg %}" rel="nofollow" title="Download {{ pkg.pkgname }} from mirror">Download From Mirror</a></li> </ul> {% if perms.main.change_package %} -- cgit v1.2.3-54-g00ecf From 4b819c267d6973e634e06f599b5d64cec26cfd95 Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Fri, 2 Dec 2011 20:11:03 +0100 Subject: Add database settings for PostgreSQL --- README | 4 ++-- local_settings.py.example | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/README b/README index 497f4fe9..6f9075ac 100644 --- a/README +++ b/README @@ -56,8 +56,8 @@ for your install. (parabolweb-env) $ pip install -r requirements.txt After that you will need to install a database engine for python. - This means `MySQL-python==1.2.3`, `pysqlite` or whatever the - package is for PostgreSQL. Eg: + This means `MySQL-python==1.2.3`, `pysqlite` or `psycopg2` for PostgreSQL. + Eg: (parabolweb-env) $ pip install MySQL-python==1.2.3 You may also want to install memcached diff --git a/local_settings.py.example b/local_settings.py.example index 4ad87a36..b5883720 100644 --- a/local_settings.py.example +++ b/local_settings.py.example @@ -27,6 +27,19 @@ DATABASES = { }, } +## PostgreSQL Database settings +#DATABASES = { +# 'default': { +# 'ENGINE' : 'django.db.backends.postgresql_psycopg2', +# 'NAME' : 'parabola', +# 'USER' : 'parabola', +# 'PASSWORD': 'parabola', +# 'HOST' : '', +# 'PORT' : '', +# 'OPTIONS' : {}, +# }, +#} + ## sqlite3 Database settings #DATABASES = { # 'default': { -- cgit v1.2.3-54-g00ecf From 04e356c5a826ab5838e280f4cafd2b4306749cea Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Sat, 3 Dec 2011 12:16:53 +0100 Subject: Add mips64el to arches fixture --- main/fixtures/arches.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/main/fixtures/arches.json b/main/fixtures/arches.json index 6334c2d3..1e9dbc91 100644 --- a/main/fixtures/arches.json +++ b/main/fixtures/arches.json @@ -22,5 +22,13 @@ "agnostic": false, "name": "x86_64" } + }, + { + "pk": 4, + "model": "main.arch", + "fields": { + "agnostic": false, + "name": "mips64el" + } } ] -- cgit v1.2.3-54-g00ecf From e81d8848650b88af2ed08c4e4fa9e6d98757aebc Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Sat, 3 Dec 2011 13:05:44 +0100 Subject: Re-enable fixture import by deleting the offending permissions for now --- main/fixtures/groups.json | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/main/fixtures/groups.json b/main/fixtures/groups.json index 8a6b2287..aa826b83 100644 --- a/main/fixtures/groups.json +++ b/main/fixtures/groups.json @@ -10,11 +10,6 @@ "main", "package" ], - [ - "add_signoff", - "main", - "signoff" - ], [ "add_todolist", "main", @@ -39,16 +34,6 @@ "delete_todolistpkg", "main", "todolistpkg" - ], - [ - "add_news", - "news", - "news" - ], - [ - "change_news", - "news", - "news" ] ] } @@ -341,11 +326,6 @@ "main", "package" ], - [ - "add_signoff", - "main", - "signoff" - ], [ "add_todolist", "main", @@ -403,4 +383,4 @@ ] } } -] \ No newline at end of file +] -- cgit v1.2.3-54-g00ecf From f71f3e23eda8e1ff447340812025ab724aff14cb Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Sat, 3 Dec 2011 14:18:07 +0100 Subject: Fix differences query if there are more than 2 architectures --- packages/utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/utils.py b/packages/utils.py index f8e1f2a1..82d47bc7 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -106,7 +106,11 @@ def get_differences_info(arch_a, arch_b): AND p.arch_id != q.arch_id AND p.id != q.id ) - WHERE p.arch_id IN (%s, %s) + WHERE p.arch_id in (%s, %s) + AND ( + q.arch_id in (%s, %s) + OR q.id IS NULL + ) AND ( q.id IS NULL OR p.pkgver != q.pkgver @@ -115,7 +119,7 @@ def get_differences_info(arch_a, arch_b): ) """ cursor = connection.cursor() - cursor.execute(sql, [arch_a.id, arch_b.id]) + cursor.execute(sql, [arch_a.id, arch_b.id, arch_a.id, arch_b.id]) results = cursor.fetchall() # column A will always have a value, column B might be NULL to_fetch = [row[0] for row in results] -- cgit v1.2.3-54-g00ecf From 183c4d9cefa95f46c3fa3a6936f837542426eac2 Mon Sep 17 00:00:00 2001 From: Johannes Krampf <johannes.krampf@gmail.com> Date: Sat, 3 Dec 2011 14:44:42 +0100 Subject: Allow architecure selection for difference view --- packages/views/__init__.py | 5 +++-- templates/packages/differences.html | 29 +++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/packages/views/__init__.py b/packages/views/__init__.py index e3264161..bbfe7c9f 100644 --- a/packages/views/__init__.py +++ b/packages/views/__init__.py @@ -226,13 +226,14 @@ def download(request, name, repo, arch): def arch_differences(request): # TODO: we have some hardcoded magic here with respect to the arches. - arch_a = Arch.objects.get(name='i686') - arch_b = Arch.objects.get(name='x86_64') + arch_a = Arch.objects.get(name=request.GET.get('arch_a', 'i686')) + arch_b = Arch.objects.get(name=request.GET.get('arch_b', 'x86_64')) differences = get_differences_info(arch_a, arch_b) context = { 'arch_a': arch_a, 'arch_b': arch_b, 'differences': differences, + 'arches': Arch.objects.filter(agnostic=False) } return direct_to_template(request, 'packages/differences.html', context) diff --git a/templates/packages/differences.html b/templates/packages/differences.html index d9b5f088..0412f8c2 100644 --- a/templates/packages/differences.html +++ b/templates/packages/differences.html @@ -6,6 +6,35 @@ {% if differences %} <div id="differences-filter" class="box filter-criteria"> <h2>Package Differences by Architecture</h2> + <h3>Select architectures</h3> + <form id="arch_selector" method="get" action="."> + <fieldset> + <legend>Select arches</legend> + <div><label for="arch_a" title="Architecture A">Architecture A</label> + <select name="arch_a" id="arch_a"> + {% for arch in arches %} + <option + {% if arch == arch_a %} + selected="selected" + {% endif %} + >{{ arch }}</option> + {% endfor %} + </select> + </div> + <div><label for="arch_b" title="Architecture B">Architecture B</label> + <select name="arch_b" id="arch_b"> + {% for arch in arches %} + <option + {% if arch == arch_b %} + selected="selected" + {% endif %} + >{{ arch }}</option> + {% endfor %} + </select> + </div> + <div><label> </label><input type="submit" title="Show difference between selected architectures"></div> + </fieldset> + </form> <h3>Filter Differences View</h3> <form id="diff_filter" method="post" action="."> <fieldset> -- cgit v1.2.3-54-g00ecf