diff options
-rw-r--r-- | .gitignore | 1 | ||||
-rw-r--r-- | devel/management/commands/reporead.py | 3 | ||||
-rw-r--r-- | devel/urls.py | 10 | ||||
-rw-r--r-- | feeds.py | 2 | ||||
-rw-r--r-- | main/models.py | 68 | ||||
-rw-r--r-- | mirrors/management/commands/mirrorcheck.py | 36 | ||||
-rw-r--r-- | mirrors/models.py | 2 | ||||
-rw-r--r-- | mirrors/utils.py | 2 | ||||
-rw-r--r-- | mirrors/views.py | 39 | ||||
-rw-r--r-- | news/urls.py | 14 | ||||
-rw-r--r-- | packages/urls.py | 37 | ||||
-rw-r--r-- | packages/views.py | 43 | ||||
-rw-r--r-- | public/utils.py | 4 | ||||
-rw-r--r-- | requirements.txt | 4 | ||||
-rw-r--r-- | requirements_prod.txt | 6 | ||||
-rw-r--r-- | templates/devel/index.html | 8 | ||||
-rw-r--r-- | templates/mirrors/status.html | 8 | ||||
-rw-r--r-- | templates/packages/search.html | 3 | ||||
-rw-r--r-- | templates/public/index.html | 4 | ||||
-rw-r--r-- | templates/public/svn.html | 34 | ||||
-rw-r--r-- | templates/todolists/public_list.html | 92 | ||||
-rw-r--r-- | todolists/urls.py | 13 | ||||
-rw-r--r-- | urls.py | 129 |
23 files changed, 358 insertions, 204 deletions
@@ -3,4 +3,5 @@ *.swo local_settings.py archweb.db +archweb.db-* testing/ diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py index 6f7fd958..b8b075a6 100644 --- a/devel/management/commands/reporead.py +++ b/devel/management/commands/reporead.py @@ -139,7 +139,6 @@ def find_user(userstring): for token in name.split(): # ignore quoted parts; e.g. nicknames in strings if re.match(r'^[\'"].*[\'"]$', token): - print "token match:", token continue name_q &= (Q(first_name__icontains=token) | Q(last_name__icontains=token)) @@ -233,6 +232,7 @@ def populate_files(dbpkg, repopkg, force=False): dbpkg.files_last_update = datetime.now() dbpkg.save() +@transaction.commit_on_success def db_update(archname, reponame, pkgs, options): """ Parses a list and updates the Arch dev database accordingly. @@ -399,7 +399,6 @@ def validate_arch(arch): available_arches = [x.name for x in Arch.objects.all()] return arch in available_arches -@transaction.commit_on_success def read_repo(primary_arch, repo_file, options): """ Parses repo.db.tar.gz file and returns exit status. diff --git a/devel/urls.py b/devel/urls.py new file mode 100644 index 00000000..23dd2d9f --- /dev/null +++ b/devel/urls.py @@ -0,0 +1,10 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('devel.views', + (r'^$', 'index'), + (r'^notify/$', 'change_notify'), + (r'^profile/$', 'change_profile'), + (r'^newuser/$', 'new_user_form'), +) + +# vim: set ts=4 sw=4 et: @@ -71,7 +71,7 @@ class PackageFeed(Feed): obj['arch'] = a if repo != '': # feed for a single arch AND repo - r = Repo.objects.get(name=repo) + r = Repo.objects.get(name__iexact=repo) qs = qs.filter(repo=r) obj['repo'] = r obj['qs'] = qs[:50] diff --git a/main/models.py b/main/models.py index 6fdb862f..8858b17b 100644 --- a/main/models.py +++ b/main/models.py @@ -5,6 +5,9 @@ from django.contrib.sites.models import Site from main.utils import cache_function from packages.models import PackageRelation +from itertools import groupby +from operator import attrgetter + class UserProfile(models.Model): id = models.AutoField(primary_key=True) # not technically needed notify = models.BooleanField( @@ -67,6 +70,9 @@ class Arch(models.Model): def __unicode__(self): return self.name + def __lt__(self, other): + return self.name < other.name + class Meta: db_table = 'arches' ordering = ['name'] @@ -85,6 +91,9 @@ class Repo(models.Model): def __unicode__(self): return self.name + def __lt__(self, other): + return self.name < other.name + class Meta: db_table = 'repos' ordering = ['name'] @@ -151,34 +160,61 @@ class Package(models.Model): return len(self.signoffs) >= 2 @cache_function(300) + def applicable_arches(self): + '''The list of (this arch) + (available agnostic arches).''' + arches = set(Arch.objects.filter(agnostic=True)) + arches.add(self.arch) + return list(arches) + + @cache_function(300) def get_requiredby(self): """ - Returns a list of package objects. + Returns a list of package objects. An attempt will be made to keep this + list slim by including the corresponding package in the same testing + category as this package if that check makes sense. """ - arches = list(Arch.objects.filter(agnostic=True)) - arches.append(self.arch) requiredby = Package.objects.select_related('arch', 'repo').filter( packagedepend__depname=self.pkgname, - arch__in=arches).distinct() - return requiredby.order_by('pkgname') + arch__in=self.applicable_arches() + ).distinct().order_by('pkgname') + + # find another package by this name in the opposite testing setup + if not Package.objects.filter(pkgname=self.pkgname, + arch=self.arch).exclude(id=self.id, + repo__testing=self.repo.testing).exists(): + # there isn't one? short circuit, all required by entries are fine + return requiredby + + trimmed = [] + # for each unique package name, try to screen our package list down to + # those packages in the same testing category (yes or no) iff there is + # a package in the same testing category. + for name, pkgs in groupby(requiredby, attrgetter('pkgname')): + pkgs = list(pkgs) + pkg = pkgs[0] + if len(pkgs) > 1: + pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing] + if len(pkgs) > 0: + pkg = pkgs[0] + trimmed.append(pkg) + return trimmed @cache_function(300) def get_depends(self): """ - Returns a list of dicts. Each dict contains ('pkg' and 'dep'). - If it represents a found package both vars will be available; - else pkg will be None if it is a 'virtual' dependency. + Returns a list of dicts. Each dict contains ('pkg' and 'dep'). If it + represents a found package both vars will be available; else pkg will + be None if it is a 'virtual' dependency. Packages will match the + testing status of this package if possible. """ deps = [] - arches = list(Arch.objects.filter(agnostic=True)) - arches.append(self.arch) # TODO: we can use list comprehension and an 'in' query to make this more effective for dep in self.packagedepend_set.order_by('depname'): pkgs = Package.objects.select_related('arch', 'repo').filter( pkgname=dep.depname) if not self.arch.agnostic: # make sure we match architectures if possible - pkgs = pkgs.filter(arch__in=arches) + pkgs = pkgs.filter(arch__in=self.applicable_arches()) if len(pkgs) == 0: # couldn't find a package in the DB # it should be a virtual depend (or a removed package) @@ -189,10 +225,8 @@ class Package(models.Model): # more than one package, see if we can't shrink it down # grab the first though in case we fail pkg = pkgs[0] - if self.repo.testing: - pkgs = pkgs.filter(repo__testing=True) - else: - pkgs = pkgs.filter(repo__testing=False) + # prevents yet more DB queries, these lists should be short + pkgs = [p for p in pkgs if p.repo.testing == self.repo.testing] if len(pkgs) > 0: pkg = pkgs[0] deps.append({'dep': dep, 'pkg': pkg}) @@ -225,11 +259,11 @@ class Package(models.Model): repo.testing flag. For any non-split packages, the return value will be an empty list. """ - return Package.objects.filter(arch=self.arch, + return Package.objects.filter(arch__in=self.applicable_arches, repo__testing=self.repo.testing, pkgbase=self.pkgbase).exclude(id=self.id) def get_svn_link(self, svnpath): - linkbase = "http://repos.archlinux.org/wsvn/%s/%s/%s/" + linkbase = "http://projects.archlinux.org/svntogit/%s.git/tree/%s/%s/" return linkbase % (self.repo.svn_root, self.pkgbase, svnpath) def get_arch_svn_link(self): diff --git a/mirrors/management/commands/mirrorcheck.py b/mirrors/management/commands/mirrorcheck.py index bab79219..d7aad4e2 100644 --- a/mirrors/management/commands/mirrorcheck.py +++ b/mirrors/management/commands/mirrorcheck.py @@ -10,7 +10,6 @@ Usage: ./manage.py mirrorcheck """ from django.core.management.base import NoArgsCommand -from django.db.models import Q from datetime import datetime, timedelta import logging @@ -51,10 +50,10 @@ class Command(NoArgsCommand): return check_current_mirrors() -def parse_rfc3339_datetime(time): +def parse_rfc3339_datetime(time_string): # '2010-09-02 11:05:06+02:00' m = re.match('^(\d{4})-(\d{2})-(\d{2}) ' - '(\d{2}):(\d{2}):(\d{2})([-+])(\d{2}):(\d{2})', time) + '(\d{2}):(\d{2}):(\d{2})([-+])(\d{2}):(\d{2})', time_string) if m: vals = m.groups() parsed = datetime(int(vals[0]), int(vals[1]), int(vals[2]), @@ -123,46 +122,55 @@ def check_mirror_url(mirror_url): log.error = "Connection timed out." logger.debug("failed: %s, %s" % (url, log.error)) - log.save() return log -def mirror_url_worker(queue): +def mirror_url_worker(work, output): while True: try: - item = queue.get(block=False) + item = work.get(block=False) try: - check_mirror_url(item) + log = check_mirror_url(item) + output.put(log) finally: - queue.task_done() + work.task_done() except Empty: return 0 class MirrorCheckPool(object): def __init__(self, work, num_threads=10): self.tasks = Queue() - for i in work: + self.logs = Queue() + for i in list(work): self.tasks.put(i) self.threads = [] for i in range(num_threads): - thread = Thread(target=mirror_url_worker, args=(self.tasks,)) + thread = Thread(target=mirror_url_worker, + args=(self.tasks, self.logs)) thread.daemon = True self.threads.append(thread) - def run_and_join(self): + def run(self): logger.debug("starting threads") for t in self.threads: t.start() logger.debug("joining on all threads") self.tasks.join() + logger.debug("processing log entries") + try: + while True: + log = self.logs.get(block=False) + log.save() + self.logs.task_done() + except Empty: + logger.debug("all log items saved to database") def check_current_mirrors(): urls = MirrorUrl.objects.filter( - Q(protocol__protocol__iexact='HTTP') | - Q(protocol__protocol__iexact='FTP'), + protocol__is_download=True, mirror__active=True, mirror__public=True) pool = MirrorCheckPool(urls) - pool.run_and_join() + pool.run() return 0 # For lack of a better place to put it, here is a query to get latest check diff --git a/mirrors/models.py b/mirrors/models.py index f1286026..e070b1cd 100644 --- a/mirrors/models.py +++ b/mirrors/models.py @@ -80,7 +80,7 @@ class MirrorLog(models.Model): error = models.CharField(max_length=255, blank=True, default='') def __unicode__(self): - return "Check of %s at %s" % (url.url, check_time) + return "Check of %s at %s" % (self.url.url, self.check_time) class Meta: verbose_name = 'Mirror Check Log' diff --git a/mirrors/utils.py b/mirrors/utils.py index c64d53c5..124b66e6 100644 --- a/mirrors/utils.py +++ b/mirrors/utils.py @@ -51,7 +51,7 @@ def get_mirror_statuses(cutoff=default_cutoff): check_frequency = None for url in urls: - url.completion_pct = float(url.success_count) / num_checks + url.completion_pct = float(url.success_count) / url.check_count if url.id in delays: url_delays = delays[url.id] d = sum(url_delays, datetime.timedelta()) / len(url_delays) diff --git a/mirrors/views.py b/mirrors/views.py index fb7d3361..a2b94de8 100644 --- a/mirrors/views.py +++ b/mirrors/views.py @@ -1,10 +1,12 @@ from django import forms +from django.core.serializers.json import DjangoJSONEncoder from django.db.models import Avg, Count, Max, Min, StdDev from django.db.models import Q -from django.http import Http404 +from django.http import Http404, HttpResponse from django.shortcuts import get_object_or_404 from django.views.decorators.csrf import csrf_exempt from django.views.generic.simple import direct_to_template +from django.utils import simplejson from main.utils import make_choice from .models import Mirror, MirrorUrl, MirrorProtocol @@ -34,7 +36,7 @@ class MirrorlistForm(forms.Form): @csrf_exempt def generate_mirrorlist(request): - if request.REQUEST.get('country', ''): + if request.method == 'POST' or len(request.GET) > 0: form = MirrorlistForm(data=request.REQUEST) if form.is_valid(): countries = form.cleaned_data['country'] @@ -129,4 +131,37 @@ def status(request): }) return direct_to_template(request, 'mirrors/status.html', context) +class MirrorStatusJSONEncoder(DjangoJSONEncoder): + '''Base JSONEncoder extended to handle datetime.timedelta and MirrorUrl + serialization. The base class takes care of datetime.datetime types.''' + url_attributes = ['url', 'protocol', 'last_sync', 'completion_pct', + 'delay', 'duration_avg', 'duration_stddev', 'score'] + + def default(self, obj): + if isinstance(obj, datetime.timedelta): + # always returned as integer seconds + return obj.days * 24 * 3600 + obj.seconds + if hasattr(obj, '__iter__'): + # mainly for queryset serialization + return list(obj) + if isinstance(obj, MirrorUrl): + data = {} + for attr in self.url_attributes: + data[attr] = getattr(obj, attr) + # separate because it isn't on the URL directly + data['country'] = obj.mirror.country + return data + if isinstance(obj, MirrorProtocol): + return unicode(obj) + return super(MirrorStatusJSONEncoder, self).default(obj) + +def status_json(request): + status_info = get_mirror_statuses() + data = status_info.copy() + data['version'] = 1 + to_json = simplejson.dumps(data, ensure_ascii=False, + cls=MirrorStatusJSONEncoder) + response = HttpResponse(to_json, mimetype='application/json') + return response + # vim: set ts=4 sw=4 et: diff --git a/news/urls.py b/news/urls.py new file mode 100644 index 00000000..548d80ea --- /dev/null +++ b/news/urls.py @@ -0,0 +1,14 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('news.views', + (r'^$', 'news_list', {}, 'news-list'), + (r'^add/$', 'add'), + (r'^preview/$', 'preview'), + (r'^(?P<slug>[-\w]+)/$', 'view'), + (r'^(?P<slug>[-\w]+)/edit/$', 'edit'), + (r'^(?P<slug>[-\w]+)/delete/$', 'delete'), + # old news URLs, permanent redirect view so we don't break all links + (r'^(?P<object_id>\d+)/$', 'view_redirect'), +) + +# vim: set ts=4 sw=4 et: diff --git a/packages/urls.py b/packages/urls.py new file mode 100644 index 00000000..da24ee03 --- /dev/null +++ b/packages/urls.py @@ -0,0 +1,37 @@ +from django.conf.urls.defaults import include, patterns + +package_patterns = patterns('packages.views', + (r'^$', 'details'), + (r'^files/$', 'files'), + (r'^maintainer/$', 'getmaintainer'), + (r'^flag/$', 'flag'), + (r'^unflag/$', 'unflag'), + (r'^download/$', 'download'), +) + +urlpatterns = patterns('packages.views', + (r'^flaghelp/$', 'flaghelp'), + (r'^signoffs/$', 'signoffs'), + (r'^signoff_package/(?P<arch>[A-z0-9]+)/(?P<pkgname>[A-z0-9\-+.]+)/$', + 'signoff_package'), + (r'^update/$', 'update'), + + # Preference is for the non-search url below, but search is kept + # because other projects link to it + (r'^search/$', 'search'), + (r'^search/(?P<page>\d+)/$', 'search'), + (r'^$', 'search'), + (r'^(?P<page>\d+)/$', 'search'), + + (r'^differences/$', 'arch_differences'), + + (r'^(?P<name>[A-z0-9\-+.]+)/$', + 'details'), + (r'^(?P<repo>[A-z0-9\-]+)/(?P<name>[A-z0-9\-+.]+)/$', + 'details'), + # canonical package url. subviews defined above + (r'^(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/', + include(package_patterns)), +) + +# vim: set ts=4 sw=4 et: diff --git a/packages/views.py b/packages/views.py index 40e8a08e..77fa3a07 100644 --- a/packages/views.py +++ b/packages/views.py @@ -37,38 +37,43 @@ def opensearch(request): @permission_required('main.change_package') def update(request): ids = request.POST.getlist('pkgid') - mode = None - if request.POST.has_key('adopt'): - mode = 'adopt' - if request.POST.has_key('disown'): - mode = 'disown' + count = 0 - if mode: + if request.POST.has_key('adopt'): repos = request.user.userprofile.allowed_repos.all() pkgs = Package.objects.filter(id__in=ids, repo__in=repos) disallowed_pkgs = Package.objects.filter(id__in=ids).exclude( repo__in=repos) - count = 0 + + if disallowed_pkgs: + messages.warning(request, + "You do not have permission to adopt: %s." % ( + ' '.join([p.pkgname for p in disallowed_pkgs]) + )) + for pkg in pkgs: - maints = pkg.maintainers - if mode == 'adopt' and request.user not in maints: + if request.user not in pkg.maintainers: prel = PackageRelation(pkgbase=pkg.pkgbase, user=request.user, type=PackageRelation.MAINTAINER) count += 1 prel.save() - elif mode == 'disown' and request.user in maints: + + messages.info(request, "%d base packages adopted." % count) + + elif request.POST.has_key('disown'): + # allow disowning regardless of allowed repos, helps things like + # [community] -> [extra] moves + for pkg in Package.objects.filter(id__in=ids): + if request.user in pkg.maintainers: rels = PackageRelation.objects.filter(pkgbase=pkg.pkgbase, - user=request.user) + user=request.user, + type=PackageRelation.MAINTAINER) count += rels.count() rels.delete() - messages.info(request, "%d base packages %sed." % (count, mode)) - if disallowed_pkgs: - messages.warning(request, - "You do not have permission to %s: %s" % ( - mode, ' '.join([p.pkgname for p in disallowed_pkgs]) - )) + messages.info(request, "%d base packages disowned." % count) + else: messages.error(request, "Are you trying to adopt or disown?") return redirect('/packages/') @@ -199,7 +204,7 @@ def search(request, page=None): if packages.count() == 1: return redirect(packages[0]) - allowed_sort = ["arch", "repo", "pkgname", "last_update"] + allowed_sort = ["arch", "repo", "pkgname", "last_update", "flag_date"] allowed_sort += ["-" + s for s in allowed_sort] sort = request.GET.get('sort', None) # TODO: sorting by multiple fields makes using a DB index much harder @@ -208,7 +213,7 @@ def search(request, page=None): request.GET['sort'], 'repo', 'arch', 'pkgname') page_dict['sort'] = sort else: - packages = packages.order_by('repo', 'arch', '-last_update', 'pkgname') + packages = packages.order_by('pkgname') return list_detail.object_list(request, packages, template_name="packages/search.html", diff --git a/public/utils.py b/public/utils.py index 2801c939..81f589f7 100644 --- a/public/utils.py +++ b/public/utils.py @@ -13,7 +13,7 @@ def get_recent_updates(): # grab a few extra so we can hopefully catch everything we need pkgs += list(Package.objects.select_related( 'arch', 'repo').filter(arch=arch).order_by('-last_update')[:50]) - pkgs.sort(key=lambda q: q.last_update) + pkgs.sort(key=attrgetter('last_update')) updates = [] ctr = 0 while ctr < 15 and len(pkgs) > 0: @@ -22,7 +22,7 @@ def get_recent_updates(): is_same = lambda q: p.is_same_version(q) and p.repo == q.repo samepkgs = filter(is_same, pkgs) samepkgs.append(p) - samepkgs.sort(key=attrgetter('arch.name')) + samepkgs.sort(key=attrgetter('arch')) updates.append(samepkgs) for q in samepkgs: if p != q: pkgs.remove(q) diff --git a/requirements.txt b/requirements.txt index 6d858a11..f4d80eeb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ -Django==1.2.3 +Django==1.2.4 Markdown==2.0.3 -South==0.7.2 +South==0.7.3 diff --git a/requirements_prod.txt b/requirements_prod.txt index 7f5abb55..49ca44c4 100644 --- a/requirements_prod.txt +++ b/requirements_prod.txt @@ -1,5 +1,5 @@ -Django==1.2.3 +Django==1.2.4 Markdown==2.0.3 MySQL-python==1.2.3c1 -South==0.7.2 -python-memcached==1.45 +South==0.7.3 +python-memcached==1.47 diff --git a/templates/devel/index.html b/templates/devel/index.html index e1fe8deb..54bb317d 100644 --- a/templates/devel/index.html +++ b/templates/devel/index.html @@ -11,7 +11,7 @@ <table id="dash-myflagged" class="results dash-stats"> <thead> <tr> - <th class="key">Name</th> + <th>Name</th> <th>Repo</th> <th>Version</th> <th>Arch</th> @@ -42,7 +42,7 @@ <thead> <tr> <th>Todo List</th> - <th class="key">Name</th> + <th>Name</th> <th>Repo</th> <th>Arch</th> <th>Maintainer(s)</th> @@ -78,10 +78,10 @@ <tbody> {% for todo in todos %} <tr class="{% cycle 'odd' 'even' %}"> - <td class="key"><a href="{{ todo.get_absolute_url }}" + <td><a href="{{ todo.get_absolute_url }}" title="View todo list: {{ todo.name }}">{{ todo.name }}</a></td> <td>{{ todo.date_added }}</td> - <td>{{ todo.description|safe }}</td> + <td class="wrap">{{ todo.description|safe }}</td> </tr> {% empty %} <tr class="empty"><td colspan="3"><em>No package todo lists to display</em></td></tr> diff --git a/templates/mirrors/status.html b/templates/mirrors/status.html index 7d022799..cd56f8f9 100644 --- a/templates/mirrors/status.html +++ b/templates/mirrors/status.html @@ -50,8 +50,12 @@ </ul> <p>The last mirror check ran at {{ last_check|date:'Y-m-d H:i' }} UTC. - Checks have ran {{ num_checks }} times in the last {{ cutoff|hours }} at an average - interval of {{ check_frequency|duration }} (hh:mm).</p> + Checks have ran {{ num_checks }} times in the last {{ cutoff|hours }} at + an average interval of {{ check_frequency|duration }} (hh:mm).</p> + + <p>This data is also available in <a href="json/">JSON format</a>. The + units of all time/duration values are in seconds; date/time values are + UTC.</p> <a name="outofsync" id="outofsync"></a> <h3>Out of Sync Mirrors</h3> diff --git a/templates/packages/search.html b/templates/packages/search.html index 6e430487..f23c93b8 100644 --- a/templates/packages/search.html +++ b/templates/packages/search.html @@ -84,6 +84,8 @@ <th>Description</th> <th><a href="/packages/{% buildsortqs "-last_update" %}" title="Sort packages by last update">Last Updated</a></th> + <th><a href="/packages/{% buildsortqs "-flag_date" %}" + title="Sort packages by when marked-out of-date">Flag Date</a></th> </tr> </thead> <tbody> @@ -103,6 +105,7 @@ {% endif %} <td class="wrap">{{ pkg.pkgdesc }}</td> <td>{{ pkg.last_update|date }}</td> + <td>{{ pkg.flag_date|date }}</td> </tr> {% endfor %} </tbody> diff --git a/templates/public/index.html b/templates/public/index.html index 708c5347..6555debc 100644 --- a/templates/public/index.html +++ b/templates/public/index.html @@ -75,10 +75,10 @@ {% for update in pkg_updates %} {% with update|first as fpkg %} <tr> - <td class="pkg-name"><span class="{{ fpkg.repo.name|lower }}">{{ fpkg.pkgname }} {{ fpkg.pkgver }}-{{ fpkg.pkgrel }}</span></td> + <td class="pkg-name"><span class="{{ fpkg.repo|lower }}">{{ fpkg.pkgname }} {{ fpkg.pkgver }}-{{ fpkg.pkgrel }}</span></td> <td class="pkg-arch"> {% for pkg in update %}<a href="{{ pkg.get_absolute_url }}" - title="View package details for {{ pkg.pkgname }}">{{ pkg.arch }}</a>{% if not forloop.last %}/{% endif %}{% endfor %} + title="Details for {{ pkg.pkgname }} [{{ pkg.repo|lower }}]">{{ pkg.arch }}</a>{% if not forloop.last %}/{% endif %}{% endfor %} </td> </tr> {% endwith %} diff --git a/templates/public/svn.html b/templates/public/svn.html index 7b084379..10a58ba6 100644 --- a/templates/public/svn.html +++ b/templates/public/svn.html @@ -7,27 +7,29 @@ The PKGBUILD files can be fetched via the ABS utility. To learn more about ABS, see <a href="https://wiki.archlinux.org/index.php/ABS">the ABS wiki page</a>. </p> - <p> - You can view the history of all the PKGBUILD files from the Repository - <a href="http://repos.archlinux.org/">WebSVN</a> interface. - </p> + <p>The SVN repositories have been cloned into git repositories and can be + viewed via the cgit interface. + <a href="http://projects.archlinux.org/svntogit/packages.git/">All + packages</a> are available here except for + <a href="http://projects.archlinux.org/svntogit/community.git/">community + and multilib</a> which are available in a different repository.</p> <p> You can also get individual PKGBUILDs directly from SVN. This can be especially useful if you need to compile an older version of a package. <strong>DO NOT CHECK OUT THE ENTIRE SVN REPO</strong>. Your address may be blocked. Use the following commands to check out a specific package: - </p> - <pre> - svn checkout --depth=empty svn://svn.archlinux.org/packages - cd packages - svn update <your-package-name> - </pre> - For the community repository, use the following commands instead: - <pre> - svn checkout --depth=empty svn://svn.archlinux.org/community - cd community - svn update <your-package-name> - </pre> + </p> + + <pre>svn checkout --depth=empty svn://svn.archlinux.org/packages +cd packages +svn update <your-package-name></pre> + + For the community and multilib repositories, use the following commands + instead: + <pre>svn checkout --depth=empty svn://svn.archlinux.org/community +cd community +svn update <your-package-name></pre> + <p> Visit <a href="https://wiki.archlinux.org/index.php?title=Getting_PKGBUILDS_From_SVN">the wiki</a> for more tips on checking out and updating svn PKGBUILDs. diff --git a/templates/todolists/public_list.html b/templates/todolists/public_list.html index 97a0325a..5b957692 100644 --- a/templates/todolists/public_list.html +++ b/templates/todolists/public_list.html @@ -5,44 +5,64 @@ {% block content %} <div class="box"> <h2>Developer Todo Lists</h2> - <div id="public_todo_lists"> - {% for list in todo_lists %} - <h4>{{list.name}}</h4> - <div class="todo_list"> - <p>{{list.description|safe|linebreaks}}</p> - <table "todo-pkglist-{{ list.id }}" class="results todo-table"> - <thead> - <tr> - <th>Name</th> - <th>Arch</th> - <th>Repo</th> - <th>Maintainer</th> - <th>Status</th> - </tr> - </thead> - <tbody> - {% for pkg in list.packages %} - <tr class="{% cycle 'odd' 'even' %}"> - <td><a href="{{ pkg.pkg.get_absolute_url }}" - title="View package details for {{ pkg.pkg.pkgname }}">{{ pkg.pkg.pkgname }}</a></td> - <td>{{ pkg.pkg.arch.name }}</td> - <td>{{ pkg.pkg.repo.name|capfirst }}</td> - <td>{{ pkg.pkg.maintainers|join:', ' }}</td> - <td> - {% if pkg.complete %} - <span class="complete">Complete</a> - {% else %} - <span class="incomplete">Incomplete</a> - {% endif %} - </td> - </tr> - {% endfor %} - </tbody> - </table> - </div> - {% endfor %} + <div id="public_todo_lists_toc"> + <h3>Open Developer Todo Lists</h3> + <p>Todo lists are used by the developers when a rebuild of a set of + packages is needed. This is common when a library has an .so version + bump; during a toolchain rebuild, or a general cleanup of packages in + the repositories. The progress can be tracked here. Only todo lists + with currently incomplete packages are shown.</p> + {% if todo_lists %}<ul> + {% for list in todo_lists %} + <li><a href="#{{ list.id }}">{{ list.name }}</a></li> + {% endfor %} + </ul>{% else %} + <p>There are currently no incomplete developer todo lists.</p> + {% endif %} </div> </div> +{% if todo_lists %} +<div id="public_todo_lists"> + {% for list in todo_lists %} + <div class="box"> + <a name="{{ list.id }}"></a> + <h4>{{ list.name }}</h4> + <div class="todo_list"> + <div>{{ list.description|safe|linebreaks }}</div> + <table id="todo-pkglist-{{ list.id }}" class="results todo-table"> + <thead> + <tr> + <th>Name</th> + <th>Arch</th> + <th>Repo</th> + <th>Maintainer</th> + <th>Status</th> + </tr> + </thead> + <tbody> + {% for pkg in list.packages %} + <tr class="{% cycle 'odd' 'even' %}"> + <td><a href="{{ pkg.pkg.get_absolute_url }}" + title="View package details for {{ pkg.pkg.pkgname }}">{{ pkg.pkg.pkgname }}</a></td> + <td>{{ pkg.pkg.arch.name }}</td> + <td>{{ pkg.pkg.repo.name|capfirst }}</td> + <td>{{ pkg.pkg.maintainers|join:', ' }}</td> + <td> + {% if pkg.complete %} + <span class="complete">Complete</span> + {% else %} + <span class="incomplete">Incomplete</span> + {% endif %} + </td> + </tr> + {% endfor %} + </tbody> + </table> + </div> + </div> + {% endfor %} +</div> +{% endif %} {% load cdn %}{% jquery %} <script type="text/javascript" src="/media/jquery.tablesorter.min.js"></script> <script type="text/javascript"> diff --git a/todolists/urls.py b/todolists/urls.py new file mode 100644 index 00000000..187d4820 --- /dev/null +++ b/todolists/urls.py @@ -0,0 +1,13 @@ +from django.conf.urls.defaults import patterns + +urlpatterns = patterns('todolists.views', + (r'^$', 'list'), + (r'^(\d+)/$', 'view'), + (r'^add/$', 'add'), + (r'^edit/(?P<list_id>\d+)/$', 'edit'), + (r'^flag/(\d+)/(\d+)/$', 'flag'), + (r'^delete/(?P<object_id>\d+)/$', + 'delete_todolist'), +) + +# vim: set ts=4 sw=4 et: @@ -4,7 +4,6 @@ from django.contrib import admin from django.views.generic.simple import direct_to_template -from main.models import Todolist from feeds import PackageFeed, NewsFeed import sitemaps @@ -17,78 +16,35 @@ sitemaps = { admin.autodiscover() -urlpatterns = patterns('', - (r'^packages/flaghelp/$', 'packages.views.flaghelp'), - (r'^packages/signoffs/$', 'packages.views.signoffs'), - (r'^packages/signoff_package/(?P<arch>[A-z0-9]+)/(?P<pkgname>[A-z0-9\-+.]+)/$', - 'packages.views.signoff_package'), - (r'^packages/update/$', 'packages.views.update'), - - # Preference is for the packages/ url below, but search is kept - # because other projects link to it - (r'^packages/search/$', 'packages.views.search'), - (r'^packages/search/(?P<page>\d+)/$', 'packages.views.search'), - (r'^packages/differences/$', 'packages.views.arch_differences'), - (r'^packages/$', 'packages.views.search'), - (r'^packages/(?P<page>\d+)/$', 'packages.views.search'), - - (r'^packages/(?P<name>[A-z0-9\-+.]+)/$', - 'packages.views.details'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<name>[A-z0-9\-+.]+)/$', - 'packages.views.details'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/$', - 'packages.views.details'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/files/$', - 'packages.views.files'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/maintainer/$', - 'packages.views.getmaintainer'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/flag/$', - 'packages.views.flag'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/unflag/$', - 'packages.views.unflag'), - (r'^packages/(?P<repo>[A-z0-9\-]+)/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/download/$', - 'packages.views.download'), - - (r'^groups/$', 'packages.views.groups'), +urlpatterns = patterns('packages.views', + (r'^groups/$', 'groups'), (r'^groups/(?P<arch>[A-z0-9]+)/(?P<name>[A-z0-9\-+.]+)/$', - 'packages.views.group_details'), - - (r'^todo/(\d+)/$', 'todolists.views.view'), - (r'^todo/add/$', 'todolists.views.add'), - (r'^todo/edit/(?P<list_id>\d+)/$', 'todolists.views.edit'), - (r'^todo/flag/(\d+)/(\d+)/$', 'todolists.views.flag'), - (r'^todo/delete/(?P<object_id>\d+)/$', - 'todolists.views.delete_todolist'), - (r'^todo/$', 'todolists.views.list'), - (r'^todolists/$', 'todolists.views.public_list'), - - (r'^news/add/$', 'news.views.add'), - (r'^news/preview/$', 'news.views.preview'), - # old news URLs, permanent redirect view so we don't break all links - (r'^news/(?P<object_id>\d+)/$', 'news.views.view_redirect'), - (r'^news/(?P<slug>[-\w]+)/$', 'news.views.view'), - (r'^news/(?P<slug>[-\w]+)/edit/$', 'news.views.edit'), - (r'^news/(?P<slug>[-\w]+)/delete/$', 'news.views.delete'), - (r'^news/$', 'news.views.news_list', {}, 'news-list'), - - (r'^mirrors/$', 'mirrors.views.mirrors', {}, 'mirrors-list'), - (r'^mirrors/status/$', 'mirrors.views.status', {}, 'mirror-status'), - (r'^mirrors/(?P<name>[\.\-\w]+)/$', 'mirrors.views.mirror_details'), - - (r'^mirrorlist/$', 'mirrors.views.generate_mirrorlist', {}, 'mirrorlist'), - (r'^mirrorlist/all/$', 'mirrors.views.find_mirrors', {'countries': ['all']}), - (r'^mirrorlist/all/ftp/$', 'mirrors.views.find_mirrors', - {'countries': ['all'], 'protocols': ['ftp']}), - (r'^mirrorlist/all/http/$', 'mirrors.views.find_mirrors', - {'countries': ['all'], 'protocols': ['http']}), + 'group_details'), - (r'^devel/$', 'devel.views.index'), - (r'^devel/notify/$', 'devel.views.change_notify'), - (r'^devel/profile/$', 'devel.views.change_profile'), + (r'^opensearch/packages/$', 'opensearch', {}, 'opensearch-packages'), +) + +urlpatterns += patterns('todolists.views', + (r'^todolists/$', 'public_list'), +) - (r'^devel/newuser/$', 'devel.views.new_user_form'), +urlpatterns += patterns('mirrors.views', + (r'^mirrors/status/$', 'status', {}, 'mirror-status'), + (r'^mirrors/status/json/$', 'status_json', {}, 'mirror-status-json'), + + (r'^mirrors/$', 'mirrors', {}, 'mirrors-list'), + (r'^mirrors/(?P<name>[\.\-\w]+)/$', 'mirror_details'), + + (r'^mirrorlist/$', 'generate_mirrorlist', {}, 'mirrorlist'), + (r'^mirrorlist/all/$', 'find_mirrors', {'countries': ['all']}), + (r'^mirrorlist/all/ftp/$', 'find_mirrors', + {'countries': ['all'], 'protocols': ['ftp']}), + (r'^mirrorlist/all/http/$', 'find_mirrors', + {'countries': ['all'], 'protocols': ['http']}), +) # Feeds and sitemaps +urlpatterns += patterns('', (r'^feeds/$', 'public.views.feeds', {}, 'feeds-list'), (r'^feeds/news/$', NewsFeed()), (r'^feeds/packages/$', PackageFeed()), @@ -100,20 +56,23 @@ urlpatterns = patterns('', {'sitemaps': sitemaps}), (r'^sitemap-(?P<section>.+)\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}), +) # Authentication / Admin - (r'^login/$', 'django.contrib.auth.views.login', { +urlpatterns += patterns('django.contrib.auth.views', + (r'^login/$', 'login', { 'template_name': 'registration/login.html'}), - (r'^accounts/login/$', 'django.contrib.auth.views.login', { + (r'^accounts/login/$', 'login', { 'template_name': 'registration/login.html'}), - (r'^logout/$', 'django.contrib.auth.views.logout', { + (r'^logout/$', 'logout', { 'template_name': 'registration/logout.html'}), - (r'^accounts/logout/$', 'django.contrib.auth.views.logout', { + (r'^accounts/logout/$', 'logout', { 'template_name': 'registration/logout.html'}), - (r'^admin/', include(admin.site.urls)), +) -# (mostly) Static Pages - (r'^$', 'public.views.index', {}, 'index'), +# Public pages +urlpatterns += patterns('public.views', + (r'^$', 'index', {}, 'index'), (r'^about/$', direct_to_template, {'template': 'public/about.html'}, 'page-about'), (r'^art/$', direct_to_template, {'template': 'public/art.html'}, 'page-art'), (r'^svn/$', direct_to_template, {'template': 'public/svn.html'}, 'page-svn'), @@ -121,12 +80,22 @@ urlpatterns = patterns('', (r'^fellows/$', 'public.views.userlist', { 'type':'Fellows' }, 'page-fellows'), (r'^donate/$', 'public.views.donate', {}, 'page-donate'), (r'^download/$', 'public.views.download', {}, 'page-download'), - (r'^opensearch/packages/$', 'packages.views.opensearch', {}, 'opensearch-packages'), +) -# Some django internals we use +# Includes and other remaining stuff +urlpatterns += patterns('', + (r'^admin/', include(admin.site.urls)), (r'^jsi18n/$', 'django.views.i18n.null_javascript_catalog'), -# Static content - (r'^media/(.*)$', 'django.views.static.serve', {'document_root': settings.DEPLOY_PATH+'/media'}) + + (r'^devel/', include('devel.urls')), + (r'^news/', include('news.urls')), + (r'^packages/', include('packages.urls')), + (r'^todo/', include('todolists.urls')), ) - + +if settings.DEBUG == True: + urlpatterns += patterns('', + (r'^media/(.*)$', 'django.views.static.serve', + {'document_root': settings.DEPLOY_PATH+'/media'})) + # vim: set ts=4 sw=4 et: |