summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--devel/management/commands/reporead.py41
-rw-r--r--media/archweb.js9
-rw-r--r--mirrors/management/commands/mirrorcheck.py12
-rw-r--r--mirrors/management/commands/mirrorresolv.py6
-rw-r--r--urls.py4
5 files changed, 39 insertions, 33 deletions
diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py
index b8b075a6..e31478c5 100644
--- a/devel/management/commands/reporead.py
+++ b/devel/management/commands/reporead.py
@@ -178,8 +178,8 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
dbpkg.build_date = datetime.strptime(repopkg.builddate,
'%a %b %d %H:%M:%S %Y')
except ValueError:
- logger.warning('Package %s had unparsable build date %s' % \
- (repopkg.name, repopkg.builddate))
+ logger.warning('Package %s had unparsable build date %s',
+ repopkg.name, repopkg.builddate)
dbpkg.packager_str = repopkg.packager
# attempt to find the corresponding django user for this string
dbpkg.packager = find_user(repopkg.packager)
@@ -198,10 +198,10 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
# yes *sigh* i have seen them in pkgbuilds
dpname, dpvcmp = re.match(r"([a-z0-9._+-]+)(.*)", y).groups()
if dpname == repopkg.name:
- logger.warning('Package %s has a depend on itself' % repopkg.name)
+ logger.warning('Package %s has a depend on itself', repopkg.name)
continue
dbpkg.packagedepend_set.create(depname=dpname, depvcmp=dpvcmp)
- logger.debug('Added %s as dep for pkg %s' % (dpname, repopkg.name))
+ logger.debug('Added %s as dep for pkg %s', dpname, repopkg.name)
dbpkg.packagegroup_set.all().delete()
if 'groups' in repopkg.__dict__:
@@ -212,11 +212,10 @@ def populate_pkg(dbpkg, repopkg, force=False, timestamp=None):
def populate_files(dbpkg, repopkg, force=False):
if not force:
if dbpkg.pkgver != repopkg.ver or dbpkg.pkgrel != repopkg.rel:
- logger.info("db version (%s) didn't match repo version (%s) "
- "for package %s, skipping file list addition" %
- ('-'.join((dbpkg.pkgver, dbpkg.pkgrel)),
- '-'.join((repopkg.ver, repopkg.rel)),
- dbpkg.pkgname))
+ logger.info("db version (%s-%s) didn't match repo version (%s-%s) "
+ "for package %s, skipping file list addition",
+ dbpkg.pkgver, dbpkg.pkgrel, repopkg.ver, repopkg.rel,
+ dbpkg.pkgname)
return
if not dbpkg.files_last_update or not dbpkg.last_update:
pass
@@ -225,8 +224,8 @@ def populate_files(dbpkg, repopkg, force=False):
# only delete files if we are reading a DB that contains them
if 'files' in repopkg.__dict__:
dbpkg.packagefile_set.all().delete()
- logger.info("adding %d files for package %s" %
- (len(repopkg.files), dbpkg.pkgname))
+ logger.info("adding %d files for package %s",
+ len(repopkg.files), dbpkg.pkgname)
for x in repopkg.files:
dbpkg.packagefile_set.create(path=x)
dbpkg.files_last_update = datetime.now()
@@ -241,7 +240,7 @@ def db_update(archname, reponame, pkgs, options):
pkgs -- A list of Pkg objects.
"""
- logger.info('Updating Arch: %s' % archname)
+ logger.info('Updating Arch: %s', archname)
force = options.get('force', False)
filesonly = options.get('filesonly', False)
repository = Repo.objects.get(name__iexact=reponame)
@@ -259,17 +258,17 @@ def db_update(archname, reponame, pkgs, options):
logger.debug("Creating sets")
dbset = set([pkg.pkgname for pkg in dbpkgs])
syncset = set([pkg.name for pkg in pkgs])
- logger.info("%d packages in current web DB" % len(dbset))
- logger.info("%d packages in new updating db" % len(syncset))
+ logger.info("%d packages in current web DB", len(dbset))
+ logger.info("%d packages in new updating db", len(syncset))
in_sync_not_db = syncset - dbset
- logger.info("%d packages in sync not db" % len(in_sync_not_db))
+ logger.info("%d packages in sync not db", len(in_sync_not_db))
# Try to catch those random orphaning issues that make Eric so unhappy.
if len(dbset):
dbpercent = 100.0 * len(syncset) / len(dbset)
else:
dbpercent = 0.0
- logger.info("DB package ratio: %.1f%%" % dbpercent)
+ logger.info("DB package ratio: %.1f%%", dbpercent)
# Fewer than 20 packages makes the percentage check unreliable, but it also
# means we expect the repo to fluctuate a lot.
@@ -319,7 +318,7 @@ def db_update(archname, reponame, pkgs, options):
logger.info("Updating package %s in database", p.name)
populate_pkg(dbp, p, force=force, timestamp=timestamp)
- logger.info('Finished updating Arch: %s' % archname)
+ logger.info('Finished updating Arch: %s', archname)
def parse_info(iofile):
@@ -384,14 +383,14 @@ def parse_repo(repopath):
p = pkgs.setdefault(pkgid, Pkg(reponame))
p.populate(data)
except UnicodeDecodeError, e:
- logger.warn("Could not correctly decode %s, skipping file" % \
+ logger.warn("Could not correctly decode %s, skipping file",
tarinfo.name)
data_file.close()
logger.debug("Done parsing file %s", fname)
repodb.close()
- logger.info("Finished repo parsing, %d total packages" % len(pkgs))
+ logger.info("Finished repo parsing, %d total packages", len(pkgs))
return (reponame, pkgs.values())
def validate_arch(arch):
@@ -415,8 +414,8 @@ def read_repo(primary_arch, repo_file, options):
packages_arches[package.arch].append(package)
else:
# we don't include mis-arched packages
- logger.warning("Package %s arch = %s" % (
- package.name,package.arch))
+ logger.warning("Package %s arch = %s",
+ package.name,package.arch)
logger.info('Starting database updates.')
for (arch, pkgs) in packages_arches.items():
db_update(arch, repo, pkgs, options)
diff --git a/media/archweb.js b/media/archweb.js
index 330eeff9..52e817a4 100644
--- a/media/archweb.js
+++ b/media/archweb.js
@@ -58,9 +58,12 @@ if (typeof $.tablesorter !== 'undefined') {
var matches = this.re.exec(s);
if (!matches) return 0;
/* skip group 6, group 7 is optional seconds */
- if (matches[7] == undefined) matches[7] = '0';
- return $.tablesorter.formatFloat(new Date(
- matches[1],matches[2],matches[3],matches[4],matches[5],matches[7]).getTime());
+ if (matches[7] == undefined) matches[7] = 0;
+ /* The awesomeness of the JS date constructor. Month needs to be
+ * between 0-11, because things have to be difficult. */
+ var date = new Date(matches[1], matches[2] - 1, matches[3],
+ matches[4], matches[5], matches[7]);
+ return $.tablesorter.formatFloat(date.getTime());
},
type: 'numeric'
});
diff --git a/mirrors/management/commands/mirrorcheck.py b/mirrors/management/commands/mirrorcheck.py
index d7aad4e2..44d2b364 100644
--- a/mirrors/management/commands/mirrorcheck.py
+++ b/mirrors/management/commands/mirrorcheck.py
@@ -68,7 +68,7 @@ def parse_rfc3339_datetime(time_string):
def check_mirror_url(mirror_url):
url = mirror_url.url + 'lastsync'
- logger.info("checking URL %s" % url)
+ logger.info("checking URL %s", url)
log = MirrorLog(url=mirror_url, check_time=datetime.utcnow())
try:
start = time.time()
@@ -86,7 +86,7 @@ def check_mirror_url(mirror_url):
# it is bad news to try logging the lastsync value;
# sometimes we get a crazy-encoded web page.
logger.info("attempting to parse generated lastsync file"
- " from mirror %s" % url)
+ " from mirror %s", url)
parsed_time = parse_rfc3339_datetime(data)
log.last_sync = parsed_time
@@ -95,7 +95,7 @@ def check_mirror_url(mirror_url):
log.error = "Could not parse time from lastsync"
log.is_success = False
log.duration = end - start
- logger.debug("success: %s, %.2f" % (url, log.duration))
+ logger.debug("success: %s, %.2f", url, log.duration)
except urllib2.HTTPError, e:
if e.code == 404:
# we have a duration, just not a success
@@ -103,7 +103,7 @@ def check_mirror_url(mirror_url):
log.duration = end - start
log.is_success = False
log.error = str(e)
- logger.debug("failed: %s, %s" % (url, log.error))
+ logger.debug("failed: %s, %s", url, log.error)
except urllib2.URLError, e:
log.is_success = False
log.error = e.reason
@@ -116,11 +116,11 @@ def check_mirror_url(mirror_url):
log.error = "Connection timed out."
elif isinstance(e.reason, socket.error):
log.error = e.reason.args[1]
- logger.debug("failed: %s, %s" % (url, log.error))
+ logger.debug("failed: %s, %s", url, log.error)
except socket.timeout, e:
log.is_success = False
log.error = "Connection timed out."
- logger.debug("failed: %s, %s" % (url, log.error))
+ logger.debug("failed: %s, %s", url, log.error)
return log
diff --git a/mirrors/management/commands/mirrorresolv.py b/mirrors/management/commands/mirrorresolv.py
index bcb24a52..8cbd51a3 100644
--- a/mirrors/management/commands/mirrorresolv.py
+++ b/mirrors/management/commands/mirrorresolv.py
@@ -43,13 +43,15 @@ def resolve_mirrors():
for mirrorurl in MirrorUrl.objects.filter(mirror__active=True):
try:
hostname = urlparse(mirrorurl.url).hostname
- logger.debug("resolving %3i (%s)" % (mirrorurl.id, hostname))
+ logger.debug("resolving %3i (%s)", mirrorurl.id, hostname)
info = socket.getaddrinfo(hostname, None, 0, socket.SOCK_STREAM)
families = [x[0] for x in info]
mirrorurl.has_ipv4 = socket.AF_INET in families
mirrorurl.has_ipv6 = socket.AF_INET6 in families
+ logger.debug("%s: v4: %s v6: %s", hostname,
+ mirrorurl.has_ipv4, mirrorurl.has_ipv6)
mirrorurl.save()
except socket.error, e:
- logger.warn("error resolving %s: %s" % (hostname, e))
+ logger.warn("error resolving %s: %s", hostname, e)
# vim: set ts=4 sw=4 et:
diff --git a/urls.py b/urls.py
index bcbc78dc..a2c86d57 100644
--- a/urls.py
+++ b/urls.py
@@ -1,3 +1,5 @@
+import os.path
+
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
@@ -97,6 +99,6 @@ urlpatterns += patterns('',
if settings.DEBUG == True:
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve',
- {'document_root': settings.DEPLOY_PATH+'/media'}))
+ {'document_root': os.path.join(settings.DEPLOY_PATH, 'media')}))
# vim: set ts=4 sw=4 et: