summaryrefslogtreecommitdiff
path: root/extra/coadde/cron-jobs
diff options
context:
space:
mode:
Diffstat (limited to 'extra/coadde/cron-jobs')
-rwxr-xr-xextra/coadde/cron-jobs/check_archlinux/check_packages.py508
-rwxr-xr-xextra/coadde/cron-jobs/check_archlinux/parse_pkgbuilds.sh153
-rwxr-xr-xextra/coadde/cron-jobs/devlist-mailer28
-rwxr-xr-xextra/coadde/cron-jobs/ftpdir-cleanup88
-rwxr-xr-xextra/coadde/cron-jobs/integrity-check32
-rwxr-xr-xextra/coadde/cron-jobs/make_repo_torrents70
-rwxr-xr-xextra/coadde/cron-jobs/repo-sanity-check57
-rwxr-xr-xextra/coadde/cron-jobs/sourceballs151
-rw-r--r--extra/coadde/cron-jobs/sourceballs.force4
-rw-r--r--extra/coadde/cron-jobs/sourceballs.skip29
10 files changed, 1120 insertions, 0 deletions
diff --git a/extra/coadde/cron-jobs/check_archlinux/check_packages.py b/extra/coadde/cron-jobs/check_archlinux/check_packages.py
new file mode 100755
index 0000000..ac0194f
--- /dev/null
+++ b/extra/coadde/cron-jobs/check_archlinux/check_packages.py
@@ -0,0 +1,508 @@
+#!/usr/bin/env python2
+#
+# check_archlinux.py
+#
+# Original script by Scott Horowitz <stonecrest@gmail.com>
+# Rewritten by Xavier Chantry <shiningxc@gmail.com>
+#
+# This script currently checks for a number of issues in your ABS tree:
+# 1. Directories with missing PKGBUILDS
+# 2. Invalid PKGBUILDs (bash syntax error for instance)
+# 3. PKGBUILD names that don't match their directory
+# 4. Duplicate PKGBUILDs
+# 5. Valid arch's in PKGBUILDS
+# 6. Missing (make-)dependencies
+# 7. Hierarchy of repos (e.g., that a core package doesn't depend on
+# a non-core package)
+# 8. Circular dependencies
+
+import os,re,commands,getopt,sys,tarfile
+import pdb
+
+import ctypes
+_alpm = ctypes.cdll.LoadLibrary("libalpm.so")
+
+DBEXT='.db.tar.gz'
+
+packages = {} # pkgname : PacmanPackage
+repopkgs = {} # pkgname : PacmanPackage
+provisions = {} # provision : PacmanPackage
+pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies
+invalid_pkgbuilds = []
+missing_pkgbuilds = []
+dups = []
+
+dbonly = []
+absonly = []
+
+mismatches = []
+missing_deps = []
+missing_makedeps = []
+invalid_archs = []
+dep_hierarchy = []
+makedep_hierarchy = []
+circular_deps = [] # pkgname>dep1>dep2>...>pkgname
+checked_deps = []
+
+class PacmanPackage:
+ def __init__(self):
+ self.name,self.version = "",""
+ self.base = ""
+ self.path,self.repo = "",""
+ self.deps,self.makedeps = [],[]
+ self.provides,self.conflicts = [],[]
+ self.archs = []
+
+class Depend:
+ def __init__(self,name,version,mod):
+ self.name = name
+ self.version = version
+ self.mod = mod
+
+def parse_pkgbuilds(repos,arch):
+ for absroot in absroots:
+ for repo in repos:
+ cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh '
+ cmd += arch + ' ' + absroot + '/' + repo
+ (status,output) = commands.getstatusoutput(cmd)
+ if status != 0:
+ print "Error : failed to run '%s'" % cmd
+ sys.exit()
+ parse_data(repo,output)
+
+def parse_data(repo,data):
+ attrname = None
+
+ for line in data.split('\n'):
+ if line.startswith('%'):
+ attrname = line.strip('%').lower()
+ elif line.strip() == '':
+ attrname = None
+ elif attrname == "invalid":
+ if repo in repos:
+ invalid_pkgbuilds.append(line)
+ elif attrname == "missing":
+ if repo in repos:
+ missing_pkgbuilds.append(line)
+ elif attrname == "name":
+ pkg = PacmanPackage()
+ pkg.name = line
+ pkg.repo = repo
+ dup = None
+ if pkg.name in packages:
+ dup = packages[pkg.name]
+ else:
+ packages[pkg.name] = pkg
+ elif attrname == "base":
+ pkg.base = line
+ elif attrname == "version":
+ pkg.version = line
+ elif attrname == "path":
+ pkg.path = line
+ if dup != None and (pkg.repo in repos or dup.repo in repos):
+ dups.append(pkg.path + " vs. " + dup.path)
+ elif attrname == "arch":
+ pkg.archs.append(line)
+ elif attrname == "depends":
+ pkg.deps.append(line)
+ elif attrname == "makedepends":
+ pkg.makedeps.append(line)
+ elif attrname == "conflicts":
+ pkg.conflicts.append(line)
+ elif attrname == "provides":
+ pkg.provides.append(line)
+
+def parse_dbs(repos,arch):
+ dbpkgs = {}
+ for repo in repos:
+ pkgs = set([])
+ db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT))
+ for line in db.getnames():
+ if not '/' in line:
+ pkgs.add(line.rsplit('-',2)[0])
+ dbpkgs[repo] = pkgs
+ return(dbpkgs)
+
+def splitdep(dep):
+ name = dep
+ version = ""
+ mod = ""
+ for char in (">=", "<=", "=", ">", "<"):
+ pos = dep.find(char)
+ if pos > -1:
+ name = dep[:pos]
+ version = dep[pos:].replace(char, "")
+ mod = char
+ break
+ return Depend(name,version,mod)
+
+def splitprov(prov):
+ name = prov
+ version = ""
+ pos = prov.find("=")
+ if pos > -1:
+ name = prov[:pos]
+ version = prov[pos:].replace("=", "")
+ return (name,version)
+
+def vercmp(v1,mod,v2):
+ """
+ >>> vercmp("1.0", "<=", "2.0")
+ True
+ >>> vercmp("1:1.0", ">", "2.0")
+ True
+ >>> vercmp("1.0.2", ">=", "2.1.0")
+ False
+ """
+ s1 = ctypes.c_char_p(v1)
+ s2 = ctypes.c_char_p(v2)
+ res = _alpm.alpm_pkg_vercmp(s1,s2)
+ if res == 0:
+ return (mod.find("=") > -1)
+ elif res < 0:
+ return (mod.find("<") > -1)
+ elif res > 0:
+ return (mod.find(">") > -1)
+ return False
+
+
+def depcmp(name,version,dep):
+ if name != dep.name:
+ return False
+ if dep.version == "" or dep.mod == "":
+ return True
+ if version == "":
+ return False
+ return vercmp(version,dep.mod,dep.version)
+
+def provcmp(pkg,dep):
+ for prov in pkg.provides:
+ (provname,provver) = splitprov(prov)
+ if depcmp(provname,provver,dep):
+ return True
+ return False
+
+def verify_dep(dep):
+ dep = splitdep(dep)
+ if dep.name in packages:
+ pkg = packages[dep.name]
+ if depcmp(pkg.name,pkg.version,dep):
+ return [pkg]
+ if dep.name in provisions:
+ provlist = provisions[dep.name]
+ results = []
+ for prov in provlist:
+ if provcmp(prov,dep):
+ results.append(prov)
+ return results
+ return []
+
+def verify_deps(name,repo,deps):
+ pkg_deps = []
+ missdeps = []
+ hierarchy = []
+ for dep in deps:
+ pkglist = verify_dep(dep)
+ if pkglist == []:
+ missdeps.append(repo + "/" + name + " --> '" + dep + "'")
+ else:
+ valid_repos = get_repo_hierarchy(repo)
+ pkgdep = None
+ for pkg in pkglist:
+ if pkg.repo in valid_repos:
+ pkgdep = pkg
+ break
+ if not pkgdep:
+ pkgdep = pkglist[0]
+ hierarchy.append((repo,name,pkgdep))
+
+ pkg_deps.append(pkgdep)
+
+ return (pkg_deps,missdeps,hierarchy)
+
+def compute_deplist(pkg):
+ list = []
+ stack = [pkg]
+ while stack != []:
+ dep = stack.pop()
+ if dep in pkgdeps:
+ for dep2 in pkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ if dep in makepkgdeps:
+ for dep2 in makepkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ return list
+
+def check_hierarchy(deph):
+ hierarchy = []
+ for (repo,name,pkgdep) in deph:
+ deplist = compute_deplist(pkgdep)
+ valid_repos = get_repo_hierarchy(repo)
+ extdeps = []
+ for dep in deplist:
+ if dep.repo not in valid_repos:
+ extdeps.append(dep.name)
+ string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " ("
+ string += "%s extra (make)deps to pull" % len(extdeps)
+ if 0 < len(extdeps) < 10:
+ string += " : " + ' '.join(extdeps)
+ string += ")"
+ hierarchy.append(string)
+ return hierarchy
+
+def get_repo_hierarchy(repo):
+ repo_hierarchy = {'core': ['core'], \
+ 'extra': ['core', 'extra'], \
+ 'community': ['core', 'extra', 'community'], \
+ 'multilib': ['core', 'extra', 'community', 'multilib'] }
+ if repo in repo_hierarchy:
+ return repo_hierarchy[repo]
+ else:
+ return ['core','extra','community']
+
+def verify_archs(name,repo,archs):
+ valid_archs = ['any', 'i686', 'x86_64']
+ invalid_archs = []
+ for arch in archs:
+ if arch not in valid_archs:
+ invalid_archs.append(repo + "/" + name + " --> " + arch)
+ return invalid_archs
+
+def find_scc(packages):
+ # reset all variables
+ global index,S,pkgindex,pkglowlink
+ index = 0
+ S = []
+ pkgindex = {}
+ pkglowlink = {}
+ cycles = []
+ for pkg in packages:
+ tarjan(pkg)
+
+def tarjan(pkg):
+ global index,S,pkgindex,pkglowlink,cycles
+ pkgindex[pkg] = index
+ pkglowlink[pkg] = index
+ index += 1
+ checked_deps.append(pkg)
+ S.append(pkg)
+ deps = []
+ if pkg in pkgdeps:
+ deps = pkgdeps[pkg]
+ for dep in deps:
+ if dep not in pkgindex:
+ tarjan(dep)
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep])
+ elif dep in S:
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep])
+ if pkglowlink[pkg] == pkgindex[pkg]:
+ dep = S.pop()
+ if pkg == dep:
+ return
+ path = pkg.name
+ while pkg != dep:
+ path = dep.repo + "/" + dep.name + ">" + path
+ dep = S.pop()
+ path = dep.name + ">" + path
+ if pkg.repo in repos:
+ circular_deps.append(path)
+
+def print_heading(heading):
+ print ""
+ print "=" * (len(heading) + 4)
+ print "= " + heading + " ="
+ print "=" * (len(heading) + 4)
+
+def print_subheading(subheading):
+ print ""
+ print subheading
+ print "-" * (len(subheading) + 2)
+
+def print_missdeps(pkgname,missdeps) :
+ for d in missdeps:
+ print pkgname + " : " + d
+
+def print_result(list, subheading):
+ if len(list) > 0:
+ list.sort()
+ print_subheading(subheading)
+ for item in list:
+ print item
+
+def print_results():
+ print_result(missing_pkgbuilds, "Missing PKGBUILDs")
+ print_result(invalid_pkgbuilds, "Invalid PKGBUILDs")
+ print_result(mismatches, "Mismatched Pkgnames")
+ print_result(dups, "Duplicate PKGBUILDs")
+ print_result(invalid_archs, "Invalid Archs")
+ print_result(missing_deps, "Missing Dependencies")
+ print_result(missing_makedeps, "Missing Makedepends")
+ print_result(dep_hierarchy, "Repo Hierarchy for Dependencies")
+ print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends")
+ print_result(circular_deps, "Circular Dependencies")
+ print_result(dbonly, "Packages found in db, but not in tree")
+ print_result(absonly,"Packages found in tree, but not in db")
+ print_subheading("Summary")
+ print "Missing PKGBUILDs: ", len(missing_pkgbuilds)
+ print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds)
+ print "Mismatching PKGBUILD names: ", len(mismatches)
+ print "Duplicate PKGBUILDs: ", len(dups)
+ print "Invalid archs: ", len(invalid_archs)
+ print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps)
+ print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy)
+ print "Circular dependencies: ", len(circular_deps)
+ print "In db, but not in tree: ", len(dbonly)
+ print "In tree, but not in db: ", len(absonly)
+ print ""
+
+def print_usage():
+ print ""
+ print "Usage: ./check_packages.py [OPTION]"
+ print ""
+ print "Options:"
+ print " --abs-tree=<path[,path]> Check the specified tree(s) (default : /var/abs)"
+ print " --repos=<r1,r2,...> Check the specified repos (default : core,extra)"
+ print " --arch=<i686|x86_64> Check the specified arch (default : i686)"
+ print " --repo-dir=<path> Check the dbs at the specified path (default : /srv/ftp)"
+ print " -h, --help Show this help and exit"
+ print ""
+ print "Examples:"
+ print "\n Check core and extra in existing abs tree:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686"
+ print "\n Check community:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686"
+ print ""
+
+if __name__ == "__main__":
+ ## Default path to the abs root directory
+ absroots = ["/var/abs"]
+ ## Default list of repos to check
+ repos = ['core', 'extra']
+ ## Default arch
+ arch = "i686"
+ ## Default repodir
+ repodir = "/srv/ftp"
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=",
+ "arch=", "repo-dir="])
+ except getopt.GetoptError:
+ print_usage()
+ sys.exit()
+ if opts != []:
+ for o, a in opts:
+ if o in ("--abs-tree"):
+ absroots = a.split(',')
+ elif o in ("--repos"):
+ repos = a.split(",")
+ elif o in ("--arch"):
+ arch = a
+ elif o in ("--repo-dir"):
+ repodir = a
+ else:
+ print_usage()
+ sys.exit()
+ if args != []:
+ print_usage()
+ sys.exit()
+
+ for absroot in absroots:
+ if not os.path.isdir(absroot):
+ print "Error : the abs tree " + absroot + " does not exist"
+ sys.exit()
+ for repo in repos:
+ repopath = absroot + "/" + repo
+ if not os.path.isdir(repopath):
+ print("Warning : the repository " + repo + " does not exist in " + absroot)
+
+ if not os.path.isdir(repodir):
+ print "Error: the repository directory %s does not exist" % repodir
+ sys.exit()
+ for repo in repos:
+ path = os.path.join(repodir,repo,'os',arch,repo + DBEXT)
+ if not os.path.isfile(path):
+ print "Error : repo DB %s : File not found" % path
+ sys.exit()
+ if not tarfile.is_tarfile(path):
+ print "Error : Cant open repo DB %s, not a valid tar file" % path
+ sys.exit()
+ # repos which need to be loaded
+ loadrepos = set([])
+ for repo in repos:
+ loadrepos = loadrepos | set(get_repo_hierarchy(repo))
+
+ print_heading("Integrity Check " + arch + " of " + ",".join(repos))
+ print("\nPerforming integrity checks...")
+
+ print("==> parsing pkgbuilds")
+ parse_pkgbuilds(loadrepos,arch)
+
+ # fill provisions
+ for name,pkg in packages.iteritems():
+ for prov in pkg.provides:
+ provname=prov.split("=")[0]
+ if provname not in provisions:
+ provisions[provname] = []
+ provisions[provname].append(pkg)
+
+ # fill repopkgs
+ for name,pkg in packages.iteritems():
+ if pkg.repo in repos:
+ repopkgs[name] = pkg
+
+ print("==> parsing db files")
+ dbpkgs = parse_dbs(repos,arch)
+
+ print("==> checking mismatches")
+ for name,pkg in repopkgs.iteritems():
+ pkgdirname = pkg.path.split("/")[-1]
+ if name != pkgdirname and pkg.base != pkgdirname:
+ mismatches.append(name + " vs. " + pkg.path)
+
+ print("==> checking archs")
+ for name,pkg in repopkgs.iteritems():
+ archs = verify_archs(name,pkg.repo,pkg.archs)
+ invalid_archs.extend(archs)
+
+ deph,makedeph = [],[]
+
+ print("==> checking dependencies")
+ for name,pkg in repopkgs.iteritems():
+ (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+ missing_deps.extend(missdeps)
+ deph.extend(hierarchy)
+
+ print("==> checking makedepends")
+ for name,pkg in repopkgs.iteritems():
+ (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps)
+ makepkgdeps[pkg] = makedeps
+ missing_makedeps.extend(missdeps)
+ makedeph.extend(hierarchy)
+
+ print("==> checking hierarchy")
+ dep_hierarchy = check_hierarchy(deph)
+ makedep_hierarchy = check_hierarchy(makedeph)
+
+ print("==> checking for circular dependencies")
+ # make sure pkgdeps is filled for every package
+ for name,pkg in packages.iteritems():
+ if pkg not in pkgdeps:
+ (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+ find_scc(repopkgs.values())
+
+ print("==> checking for differences between db files and pkgbuilds")
+ for repo in repos:
+ for pkg in dbpkgs[repo]:
+ if not (pkg in repopkgs and repopkgs[pkg].repo == repo):
+ dbonly.append("%s/%s" % (repo,pkg))
+ for name,pkg in repopkgs.iteritems():
+ if not name in dbpkgs[pkg.repo]:
+ absonly.append("%s/%s" % (pkg.repo,name))
+
+ print_results()
diff --git a/extra/coadde/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/extra/coadde/cron-jobs/check_archlinux/parse_pkgbuilds.sh
new file mode 100755
index 0000000..b857ac8
--- /dev/null
+++ b/extra/coadde/cron-jobs/check_archlinux/parse_pkgbuilds.sh
@@ -0,0 +1,153 @@
+#!/bin/bash
+
+# Usage : parse_pkgbuilds.sh arch <pkgbuilds_dir1,dir2,...>
+# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra
+
+exit() { return; }
+
+splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts')
+variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}")
+readonly -a variables splitpkg_overrides
+
+backup_package_variables() {
+ for var in "${splitpkg_overrides[@]}"; do
+ indirect="${var}_backup"
+ eval "${indirect}=(\"\${$var[@]}\")"
+ done
+}
+
+restore_package_variables() {
+ for var in "${splitpkg_overrides[@]}"; do
+ indirect="${var}_backup"
+ if [ -n "${!indirect}" ]; then
+ eval "${var}=(\"\${$indirect[@]}\")"
+ else
+ unset "${var}"
+ fi
+ done
+}
+
+print_info() {
+ echo -e "%NAME%\n$pkgname\n"
+ if [ -n "$epoch" ]; then
+ echo -e "%VERSION%\n$epoch:$pkgver-$pkgrel\n"
+ else
+ echo -e "%VERSION%\n$pkgver-$pkgrel\n"
+ fi
+ echo -e "%PATH%\n$dir\n"
+
+ if [ -n "$pkgbase" ]; then
+ echo -e "%BASE%\n$pkgbase\n"
+ fi
+
+ if [ -n "$arch" ]; then
+ echo "%ARCH%"
+ for i in "${arch[@]}"; do echo "$i"; done
+ echo ""
+ fi
+ if [ -n "$depends" ]; then
+ echo "%DEPENDS%"
+ for i in "${depends[@]}"; do
+ echo "$i"
+ done
+ echo ""
+ fi
+ if [ -n "$makedepends" ]; then
+ echo "%MAKEDEPENDS%"
+ for i in "${makedepends[@]}"; do
+ echo "$i"
+ done
+ echo ""
+ fi
+ if [ -n "$conflicts" ]; then
+ echo "%CONFLICTS%"
+ for i in "${conflicts[@]}"; do echo "$i"; done
+ echo ""
+ fi
+ if [ -n "$provides" ]; then
+ echo "%PROVIDES%"
+ for i in "${provides[@]}"; do echo "$i"; done
+ echo ""
+ fi
+}
+
+source_pkgbuild() {
+ ret=0
+ dir=$1
+ pkgbuild=$dir/PKGBUILD
+ for var in "${variables[@]}"; do
+ unset "${var}"
+ done
+ source "$pkgbuild" &>/dev/null || ret=$?
+
+ # ensure $pkgname and $pkgver variables were found
+ if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ fi
+
+ if [ "${#pkgname[@]}" -gt "1" ]; then
+ pkgbase=${pkgbase:-${pkgname[0]}}
+ for pkg in "${pkgname[@]}"; do
+ if [ "$(type -t "package_${pkg}")" != "function" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ else
+ backup_package_variables
+ pkgname=$pkg
+ while IFS= read -r line; do
+ var=${line%%=*}
+ var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
+ for realvar in "${variables[@]}"; do
+ if [ "$var" == "$realvar" ]; then
+ eval $line
+ break
+ fi
+ done
+ done < <(type "package_${pkg}")
+ print_info
+ restore_package_variables
+ fi
+ done
+ else
+ echo
+ print_info
+ fi
+
+ return 0
+}
+
+find_pkgbuilds() {
+ #Skip over some dirs
+ local d="${1##*/}"
+ if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
+ return
+ fi
+
+ if [ -f "$1/PKGBUILD" ]; then
+ source_pkgbuild "$1"
+ return
+ fi
+ empty=1
+ for dir in "$1"/*; do
+ if [ -d "$dir" ]; then
+ find_pkgbuilds "$dir"
+ unset empty
+ fi
+ done
+ if [ -n "$empty" ]; then
+ echo -e "%MISSING%\n$1\n"
+ fi
+}
+
+if [ -z "$1" -o -z "$2" ]; then
+ exit 1
+fi
+
+CARCH=$1
+shift
+for dir in "$@"; do
+ find_pkgbuilds "$dir"
+done
+
+exit 0
diff --git a/extra/coadde/cron-jobs/devlist-mailer b/extra/coadde/cron-jobs/devlist-mailer
new file mode 100755
index 0000000..1a05521
--- /dev/null
+++ b/extra/coadde/cron-jobs/devlist-mailer
@@ -0,0 +1,28 @@
+#!/bin/bash
+#Dummy helper to send email to arch-dev
+# It does nothing if no output
+
+LIST="arch-dev-public@archlinux.org"
+#LIST="aaronmgriffin@gmail.com"
+FROM="repomaint@archlinux.org"
+
+SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")"
+if [ $# -ge 1 ]; then
+ SUBJECT="$1 $(date +"%d-%m-%Y")"
+fi
+
+if [ $# -ge 2 ]; then
+ LIST="$2"
+fi
+
+stdin="$(cat)"
+#echo used to strip whitespace for checking for actual data
+if [ -n "$(echo $stdin)" ]; then
+
+ echo "Subject: $SUBJECT
+To: $LIST
+From: $FROM
+
+$stdin" | /usr/sbin/sendmail -F"$FROM" "$LIST"
+
+fi
diff --git a/extra/coadde/cron-jobs/ftpdir-cleanup b/extra/coadde/cron-jobs/ftpdir-cleanup
new file mode 100755
index 0000000..4a2b418
--- /dev/null
+++ b/extra/coadde/cron-jobs/ftpdir-cleanup
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/../config"
+. "$(dirname "$(readlink -e "$0")")/../db-functions"
+
+clean_pkg() {
+ local pkg
+ local target
+
+ if ! "${CLEANUP_DRYRUN}"; then
+ for pkg in "$@"; do
+ if [ -h "$pkg" ]; then
+ rm -f "$pkg" "$pkg.sig"
+ else
+ mv_acl "$pkg" "$CLEANUP_DESTDIR/${pkg##*/}"
+ if [ -e "$pkg.sig" ]; then
+ mv_acl "$pkg.sig" "$CLEANUP_DESTDIR/${pkg##*/}.sig"
+ fi
+ touch "${CLEANUP_DESTDIR}/${pkg##*/}"
+ fi
+ done
+ fi
+}
+
+"${CLEANUP_DRYRUN}" && warning 'dry run mode is active'
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ # get a list of actual available package files
+ find "${FTP_BASE}/${repo}/os/${arch}" -xtype f -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/repo-${repo}-${arch}"
+ # get a list of package files defined in the repo db
+ bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}"
+
+ missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#missing_pkgs[@]} -ge 1 ]; then
+ error "Missing packages in [%s] (%s)..." "${repo}" "${arch}"
+ for missing_pkg in "${missing_pkgs[@]}"; do
+ msg2 '%s' "${missing_pkg}"
+ done
+ fi
+
+ old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from [%s] (%s)..." "${repo}" "${arch}"
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}"
+ done
+ fi
+ done
+done
+
+# get a list of all available packages in the pacakge pool
+find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool"
+# create a list of packages in our db
+cat "${WORKDIR}/db-"* | sort -u > "${WORKDIR}/db"
+
+old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db"))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from package pool..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}"
+ done
+fi
+
+old_pkgs=($(find "${CLEANUP_DESTDIR}" -type f -name "*${PKGEXT}" -mtime +"${CLEANUP_KEEP}" -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from the cleanup directory..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${CLEANUP_DRYRUN}"; then
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig"
+ fi
+ done
+fi
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
+ done
+done
+
+script_unlock
diff --git a/extra/coadde/cron-jobs/integrity-check b/extra/coadde/cron-jobs/integrity-check
new file mode 100755
index 0000000..7459380
--- /dev/null
+++ b/extra/coadde/cron-jobs/integrity-check
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+dirname="$(dirname "$(readlink -e "$0")")"
+
+. "${dirname}/../config"
+. "${dirname}/../db-functions"
+
+script_lock
+
+if [ $# -ne 1 ]; then
+ die "usage: %s <mailto>" "${0##*/}"
+fi
+mailto=$1
+
+check() {
+ "${dirname}"/check_archlinux/check_packages.py \
+ --repos="${repos}" \
+ --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
+ --repo-dir="${FTP_BASE}" \
+ --arch="${arch}" \
+ 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
+}
+
+repos='core,extra,community'
+arch='i686'
+check
+
+repos='core,extra,community,multilib'
+arch='x86_64'
+check
+
+script_unlock
diff --git a/extra/coadde/cron-jobs/make_repo_torrents b/extra/coadde/cron-jobs/make_repo_torrents
new file mode 100755
index 0000000..2eb0978
--- /dev/null
+++ b/extra/coadde/cron-jobs/make_repo_torrents
@@ -0,0 +1,70 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script finds any updated packages and calls
+# `make_indivudual_torrent' for each of them.
+
+username=$( id -un )
+
+case "${username}" in
+ repo | root )
+ true
+ ;;
+ * )
+ echo "This script must be run as repo user or root user."
+ echo "ByeBye!"
+ exit 1
+ ;;
+esac
+
+# pacman doesn't support multiple different packages of the same name,
+# so it's OK to just stuff all the torrents into a single directory.
+script_directory="$(dirname "$(readlink -e "$0")")/.."
+. "$(dirname "$(readlink -e "$0")")/../config"
+public_location="$FTP_BASE/"
+torrent_location="$FTP_BASE/torrents/"
+
+cd "${torrent_location}"
+
+# Find any directories that might have packages in then
+find "${public_location}" -name 'os' -type 'd' |
+while read dir
+do
+ # Find any packages
+ find "${dir}" -name '*\.pkg\.tar\.xz' |
+ while read pkg
+ do
+ pkg_name="${pkg##*/}"
+
+ if [[ -h "${pkg}" ]] # check if it's a symbolic link
+ then
+ # We get the target of the symlink
+ pkg=$( readlink -f "${pkg}" )
+ fi
+
+ # If a .torrent file does not already exist for this package, we call
+ # `make_individual_torrent' to make it.
+ if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
+ then
+ "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
+ fi
+ done
+done
+
+if [[ "${username}" == root ]]
+then
+ chown repo *
+fi
diff --git a/extra/coadde/cron-jobs/repo-sanity-check b/extra/coadde/cron-jobs/repo-sanity-check
new file mode 100755
index 0000000..239f042
--- /dev/null
+++ b/extra/coadde/cron-jobs/repo-sanity-check
@@ -0,0 +1,57 @@
+#!/bin/bash
+# Solves issue165... on the old roundup install. From the database
+# backups, the title was "Older/deprecated packages never leave the
+# repo", I don't know how the body of the issue is stored in the DB,
+# but the title says enough, I think.
+
+. "$(dirname "$(readlink -e "$0")")/../config"
+. "$(dirname "$(readlink -e "$0")")/../db-functions"
+
+# Traverse all repos
+for _repo in "${PKGREPOS[@]}"; do
+ msg "Cleaning up [%s]" "${_repo}"
+
+ # Find all pkgnames on this repo's abs
+ on_abs=($(
+ find "${SVNREPO}/${_repo}" -name PKGBUILD | \
+ while read pkgbuild; do
+ source "${pkgbuild}" >/dev/null 2>&1
+ # cleanup to save memory
+ unset build package source md5sums pkgdesc pkgver pkgrel epoch \
+ url license arch depends makedepends optdepends options \
+ >/dev/null 2>&1
+
+ # also cleanup package functions
+ for _pkg in "${pkgname[@]}"; do
+ unset "package_${pkg}" >/dev/null 2>&1
+ done
+
+ # this fills the on_abs array
+ echo "${pkgname[@]}"
+ done
+ ))
+
+ # quit if abs is empty
+ if [ ${#on_abs[*]} -eq 0 ]; then
+ warning "[%s]'s ABS tree is empty, skipping" "${_repo}"
+ break
+ fi
+
+ # Find all pkgnames on repos
+ on_repo=($(
+ find "${FTP_BASE}/${_repo}" -name "*.pkg.tar.?z" \
+ -printf "%f\n" | sed "s/^\(.\+\)-[^-]\+-[^-]\+-[^-]\+$/\1/"
+ ))
+
+ # Compares them, whatever is on repos but not on abs should be removed
+ remove=($(comm -13 \
+ <(printf '%s\n' "${on_abs[@]}" | sort -u) \
+ <(printf '%s\n' "${on_repo[@]}" | sort -u) ))
+
+ # Remove them from databases, ftpdir-cleanup will take care of the rest
+ find "${FTP_BASE}/${_repo}" -name "*.db.tar.?z" \
+ -exec repo-remove {} "${remove[@]}" \; >/dev/null 2>&1
+
+ msg2 "Removed the following packages:"
+ plain '%s' "${remove[@]}"
+done
diff --git a/extra/coadde/cron-jobs/sourceballs b/extra/coadde/cron-jobs/sourceballs
new file mode 100755
index 0000000..c12a128
--- /dev/null
+++ b/extra/coadde/cron-jobs/sourceballs
@@ -0,0 +1,151 @@
+#!/bin/bash
+
+dirname="$(dirname "$(readlink -e "$0")")"
+. "${dirname}/../config"
+. "${dirname}/../db-functions"
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${arch}" || exit 1
+ done
+done
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a readable file for each repo with the following format
+# <pkgbase|pkgname> <pkgver>-<pkgrel> <arch> <license>[ <license>]
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ # Repo does not exist; skip it
+ if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
+ | awk '/^%NAME%/ { getline b };
+ /^%BASE%/ { getline b };
+ /^%VERSION%/ { getline v };
+ /^%LICENSE%/,/^$/ {
+ if ( !/^%LICENSE%/ ) { l=l" "$0 }
+ };
+ /^%ARCH%/ {
+ getline a;
+ printf "%s %s %s %s\n", b, v, a, l;
+ l="";
+ }'
+ done | sort -u > "${WORKDIR}/db-${repo}"
+done
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
+ done
+done
+
+# Create a list of all available source package file names
+find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+# Check for all packages if we need to build a source package
+for repo in "${PKGREPOS[@]}"; do
+ newpkgs=()
+ failedpkgs=()
+ while read line; do
+ pkginfo=("${line}")
+ pkgbase=${pkginfo[0]}
+ pkgver=${pkginfo[1]}
+ pkgarch=${pkginfo[2]}
+ pkglicense=("${pkginfo[@]:3}")
+
+ # Should this package be skipped?
+ if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
+ continue
+ fi
+ # Commenting out, we'll sourceball everything
+ # Check if the license or .force file does not enforce creating a source package
+# if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+# continue
+# fi
+ # Store the expected file name of the source package
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Build the source package if its not already there
+ if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then
+ # Check if we had failed before
+ if in_array "${pkgbase}-${pkgver}${SRCEXT}" "${failedpkgs[@]}"; then
+ continue
+ fi
+
+ # Get the sources from xbs
+ mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
+ cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \
+ "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+ if [ $? -ge 1 ]; then
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ continue
+ fi
+
+ # Build the actual source package
+ pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
+ SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1
+ if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
+ mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}"
+ # Avoid creating the same source package for every arch
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
+ newpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ else
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ cat "${WORKDIR}/${pkgbase}.log" >> "${WORKDIR}/makepkg-fail.log"
+ fi
+ popd >/dev/null
+ fi
+ done < "${WORKDIR}/db-${repo}"
+
+ if [ ${#newpkgs[@]} -ge 1 ]; then
+ msg "Adding source packages for [%s]..." "${repo}"
+ for new_pkg in "${newpkgs[@]}"; do
+ msg2 '%s' "${new_pkg}"
+ done
+ fi
+ if [ ${#failedpkgs[@]} -ge 1 ]; then
+ msg "Failed to create source packages for [%s]..." "${repo}"
+ for failed_pkg in "${failedpkgs[@]}"; do
+ msg2 '%s' "${failed_pkg}"
+ done
+ fi
+done
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ "${SOURCE_CLEANUP_DRYRUN}" && warning 'dry run mode is active'
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${SOURCE_CLEANUP_DRYRUN}"; then
+ mv_acl "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find "${SOURCE_CLEANUP_DESTDIR}" -type f -name "*${SRCEXT}" -mtime +"${SOURCE_CLEANUP_KEEP}" -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ "${SOURCE_CLEANUP_DRYRUN}" || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+if [ -f "${WORKDIR}/makepkg-fail.log" ]; then
+ msg "Log of failed packages"
+ cat "${WORKDIR}/makepkg-fail.log"
+fi
+
+script_unlock
diff --git a/extra/coadde/cron-jobs/sourceballs.force b/extra/coadde/cron-jobs/sourceballs.force
new file mode 100644
index 0000000..badf15d
--- /dev/null
+++ b/extra/coadde/cron-jobs/sourceballs.force
@@ -0,0 +1,4 @@
+faad2
+wxgtk
+wxpython
+glhack
diff --git a/extra/coadde/cron-jobs/sourceballs.skip b/extra/coadde/cron-jobs/sourceballs.skip
new file mode 100644
index 0000000..14d6f4b
--- /dev/null
+++ b/extra/coadde/cron-jobs/sourceballs.skip
@@ -0,0 +1,29 @@
+nexuiz-data
+torcs-data
+tremulous-data
+ufoai-data
+frogatto-data
+vdrift-data
+naev-data
+btanks-data
+wesnoth-data
+texlive-bin
+texlive-bibtexextra
+texlive-core
+texlive-fontsextra
+texlive-formatsextra
+texlive-games
+texlive-genericextra
+texlive-htmlxml
+texlive-humanities
+texlive-langcjk
+texlive-langcyrillic
+texlive-langextra
+texlive-langgreek
+texlive-latexextra
+texlive-music
+texlive-pictures
+texlive-plainextra
+texlive-pstricks
+texlive-publishers
+texlive-science