summaryrefslogtreecommitdiff
path: root/extra/legacy/cron-jobs
diff options
context:
space:
mode:
Diffstat (limited to 'extra/legacy/cron-jobs')
-rwxr-xr-xextra/legacy/cron-jobs/check_archlinux/check_packages.py508
-rwxr-xr-xextra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh153
-rwxr-xr-xextra/legacy/cron-jobs/integrity-check32
-rwxr-xr-xextra/legacy/cron-jobs/make_repo_torrents70
4 files changed, 0 insertions, 763 deletions
diff --git a/extra/legacy/cron-jobs/check_archlinux/check_packages.py b/extra/legacy/cron-jobs/check_archlinux/check_packages.py
deleted file mode 100755
index ac0194f..0000000
--- a/extra/legacy/cron-jobs/check_archlinux/check_packages.py
+++ /dev/null
@@ -1,508 +0,0 @@
-#!/usr/bin/env python2
-#
-# check_archlinux.py
-#
-# Original script by Scott Horowitz <stonecrest@gmail.com>
-# Rewritten by Xavier Chantry <shiningxc@gmail.com>
-#
-# This script currently checks for a number of issues in your ABS tree:
-# 1. Directories with missing PKGBUILDS
-# 2. Invalid PKGBUILDs (bash syntax error for instance)
-# 3. PKGBUILD names that don't match their directory
-# 4. Duplicate PKGBUILDs
-# 5. Valid arch's in PKGBUILDS
-# 6. Missing (make-)dependencies
-# 7. Hierarchy of repos (e.g., that a core package doesn't depend on
-# a non-core package)
-# 8. Circular dependencies
-
-import os,re,commands,getopt,sys,tarfile
-import pdb
-
-import ctypes
-_alpm = ctypes.cdll.LoadLibrary("libalpm.so")
-
-DBEXT='.db.tar.gz'
-
-packages = {} # pkgname : PacmanPackage
-repopkgs = {} # pkgname : PacmanPackage
-provisions = {} # provision : PacmanPackage
-pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies
-invalid_pkgbuilds = []
-missing_pkgbuilds = []
-dups = []
-
-dbonly = []
-absonly = []
-
-mismatches = []
-missing_deps = []
-missing_makedeps = []
-invalid_archs = []
-dep_hierarchy = []
-makedep_hierarchy = []
-circular_deps = [] # pkgname>dep1>dep2>...>pkgname
-checked_deps = []
-
-class PacmanPackage:
- def __init__(self):
- self.name,self.version = "",""
- self.base = ""
- self.path,self.repo = "",""
- self.deps,self.makedeps = [],[]
- self.provides,self.conflicts = [],[]
- self.archs = []
-
-class Depend:
- def __init__(self,name,version,mod):
- self.name = name
- self.version = version
- self.mod = mod
-
-def parse_pkgbuilds(repos,arch):
- for absroot in absroots:
- for repo in repos:
- cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh '
- cmd += arch + ' ' + absroot + '/' + repo
- (status,output) = commands.getstatusoutput(cmd)
- if status != 0:
- print "Error : failed to run '%s'" % cmd
- sys.exit()
- parse_data(repo,output)
-
-def parse_data(repo,data):
- attrname = None
-
- for line in data.split('\n'):
- if line.startswith('%'):
- attrname = line.strip('%').lower()
- elif line.strip() == '':
- attrname = None
- elif attrname == "invalid":
- if repo in repos:
- invalid_pkgbuilds.append(line)
- elif attrname == "missing":
- if repo in repos:
- missing_pkgbuilds.append(line)
- elif attrname == "name":
- pkg = PacmanPackage()
- pkg.name = line
- pkg.repo = repo
- dup = None
- if pkg.name in packages:
- dup = packages[pkg.name]
- else:
- packages[pkg.name] = pkg
- elif attrname == "base":
- pkg.base = line
- elif attrname == "version":
- pkg.version = line
- elif attrname == "path":
- pkg.path = line
- if dup != None and (pkg.repo in repos or dup.repo in repos):
- dups.append(pkg.path + " vs. " + dup.path)
- elif attrname == "arch":
- pkg.archs.append(line)
- elif attrname == "depends":
- pkg.deps.append(line)
- elif attrname == "makedepends":
- pkg.makedeps.append(line)
- elif attrname == "conflicts":
- pkg.conflicts.append(line)
- elif attrname == "provides":
- pkg.provides.append(line)
-
-def parse_dbs(repos,arch):
- dbpkgs = {}
- for repo in repos:
- pkgs = set([])
- db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT))
- for line in db.getnames():
- if not '/' in line:
- pkgs.add(line.rsplit('-',2)[0])
- dbpkgs[repo] = pkgs
- return(dbpkgs)
-
-def splitdep(dep):
- name = dep
- version = ""
- mod = ""
- for char in (">=", "<=", "=", ">", "<"):
- pos = dep.find(char)
- if pos > -1:
- name = dep[:pos]
- version = dep[pos:].replace(char, "")
- mod = char
- break
- return Depend(name,version,mod)
-
-def splitprov(prov):
- name = prov
- version = ""
- pos = prov.find("=")
- if pos > -1:
- name = prov[:pos]
- version = prov[pos:].replace("=", "")
- return (name,version)
-
-def vercmp(v1,mod,v2):
- """
- >>> vercmp("1.0", "<=", "2.0")
- True
- >>> vercmp("1:1.0", ">", "2.0")
- True
- >>> vercmp("1.0.2", ">=", "2.1.0")
- False
- """
- s1 = ctypes.c_char_p(v1)
- s2 = ctypes.c_char_p(v2)
- res = _alpm.alpm_pkg_vercmp(s1,s2)
- if res == 0:
- return (mod.find("=") > -1)
- elif res < 0:
- return (mod.find("<") > -1)
- elif res > 0:
- return (mod.find(">") > -1)
- return False
-
-
-def depcmp(name,version,dep):
- if name != dep.name:
- return False
- if dep.version == "" or dep.mod == "":
- return True
- if version == "":
- return False
- return vercmp(version,dep.mod,dep.version)
-
-def provcmp(pkg,dep):
- for prov in pkg.provides:
- (provname,provver) = splitprov(prov)
- if depcmp(provname,provver,dep):
- return True
- return False
-
-def verify_dep(dep):
- dep = splitdep(dep)
- if dep.name in packages:
- pkg = packages[dep.name]
- if depcmp(pkg.name,pkg.version,dep):
- return [pkg]
- if dep.name in provisions:
- provlist = provisions[dep.name]
- results = []
- for prov in provlist:
- if provcmp(prov,dep):
- results.append(prov)
- return results
- return []
-
-def verify_deps(name,repo,deps):
- pkg_deps = []
- missdeps = []
- hierarchy = []
- for dep in deps:
- pkglist = verify_dep(dep)
- if pkglist == []:
- missdeps.append(repo + "/" + name + " --> '" + dep + "'")
- else:
- valid_repos = get_repo_hierarchy(repo)
- pkgdep = None
- for pkg in pkglist:
- if pkg.repo in valid_repos:
- pkgdep = pkg
- break
- if not pkgdep:
- pkgdep = pkglist[0]
- hierarchy.append((repo,name,pkgdep))
-
- pkg_deps.append(pkgdep)
-
- return (pkg_deps,missdeps,hierarchy)
-
-def compute_deplist(pkg):
- list = []
- stack = [pkg]
- while stack != []:
- dep = stack.pop()
- if dep in pkgdeps:
- for dep2 in pkgdeps[dep]:
- if dep2 not in list:
- list.append(dep2)
- stack.append(dep2)
- if dep in makepkgdeps:
- for dep2 in makepkgdeps[dep]:
- if dep2 not in list:
- list.append(dep2)
- stack.append(dep2)
- return list
-
-def check_hierarchy(deph):
- hierarchy = []
- for (repo,name,pkgdep) in deph:
- deplist = compute_deplist(pkgdep)
- valid_repos = get_repo_hierarchy(repo)
- extdeps = []
- for dep in deplist:
- if dep.repo not in valid_repos:
- extdeps.append(dep.name)
- string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " ("
- string += "%s extra (make)deps to pull" % len(extdeps)
- if 0 < len(extdeps) < 10:
- string += " : " + ' '.join(extdeps)
- string += ")"
- hierarchy.append(string)
- return hierarchy
-
-def get_repo_hierarchy(repo):
- repo_hierarchy = {'core': ['core'], \
- 'extra': ['core', 'extra'], \
- 'community': ['core', 'extra', 'community'], \
- 'multilib': ['core', 'extra', 'community', 'multilib'] }
- if repo in repo_hierarchy:
- return repo_hierarchy[repo]
- else:
- return ['core','extra','community']
-
-def verify_archs(name,repo,archs):
- valid_archs = ['any', 'i686', 'x86_64']
- invalid_archs = []
- for arch in archs:
- if arch not in valid_archs:
- invalid_archs.append(repo + "/" + name + " --> " + arch)
- return invalid_archs
-
-def find_scc(packages):
- # reset all variables
- global index,S,pkgindex,pkglowlink
- index = 0
- S = []
- pkgindex = {}
- pkglowlink = {}
- cycles = []
- for pkg in packages:
- tarjan(pkg)
-
-def tarjan(pkg):
- global index,S,pkgindex,pkglowlink,cycles
- pkgindex[pkg] = index
- pkglowlink[pkg] = index
- index += 1
- checked_deps.append(pkg)
- S.append(pkg)
- deps = []
- if pkg in pkgdeps:
- deps = pkgdeps[pkg]
- for dep in deps:
- if dep not in pkgindex:
- tarjan(dep)
- pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep])
- elif dep in S:
- pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep])
- if pkglowlink[pkg] == pkgindex[pkg]:
- dep = S.pop()
- if pkg == dep:
- return
- path = pkg.name
- while pkg != dep:
- path = dep.repo + "/" + dep.name + ">" + path
- dep = S.pop()
- path = dep.name + ">" + path
- if pkg.repo in repos:
- circular_deps.append(path)
-
-def print_heading(heading):
- print ""
- print "=" * (len(heading) + 4)
- print "= " + heading + " ="
- print "=" * (len(heading) + 4)
-
-def print_subheading(subheading):
- print ""
- print subheading
- print "-" * (len(subheading) + 2)
-
-def print_missdeps(pkgname,missdeps) :
- for d in missdeps:
- print pkgname + " : " + d
-
-def print_result(list, subheading):
- if len(list) > 0:
- list.sort()
- print_subheading(subheading)
- for item in list:
- print item
-
-def print_results():
- print_result(missing_pkgbuilds, "Missing PKGBUILDs")
- print_result(invalid_pkgbuilds, "Invalid PKGBUILDs")
- print_result(mismatches, "Mismatched Pkgnames")
- print_result(dups, "Duplicate PKGBUILDs")
- print_result(invalid_archs, "Invalid Archs")
- print_result(missing_deps, "Missing Dependencies")
- print_result(missing_makedeps, "Missing Makedepends")
- print_result(dep_hierarchy, "Repo Hierarchy for Dependencies")
- print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends")
- print_result(circular_deps, "Circular Dependencies")
- print_result(dbonly, "Packages found in db, but not in tree")
- print_result(absonly,"Packages found in tree, but not in db")
- print_subheading("Summary")
- print "Missing PKGBUILDs: ", len(missing_pkgbuilds)
- print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds)
- print "Mismatching PKGBUILD names: ", len(mismatches)
- print "Duplicate PKGBUILDs: ", len(dups)
- print "Invalid archs: ", len(invalid_archs)
- print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps)
- print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy)
- print "Circular dependencies: ", len(circular_deps)
- print "In db, but not in tree: ", len(dbonly)
- print "In tree, but not in db: ", len(absonly)
- print ""
-
-def print_usage():
- print ""
- print "Usage: ./check_packages.py [OPTION]"
- print ""
- print "Options:"
- print " --abs-tree=<path[,path]> Check the specified tree(s) (default : /var/abs)"
- print " --repos=<r1,r2,...> Check the specified repos (default : core,extra)"
- print " --arch=<i686|x86_64> Check the specified arch (default : i686)"
- print " --repo-dir=<path> Check the dbs at the specified path (default : /srv/ftp)"
- print " -h, --help Show this help and exit"
- print ""
- print "Examples:"
- print "\n Check core and extra in existing abs tree:"
- print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686"
- print "\n Check community:"
- print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686"
- print ""
-
-if __name__ == "__main__":
- ## Default path to the abs root directory
- absroots = ["/var/abs"]
- ## Default list of repos to check
- repos = ['core', 'extra']
- ## Default arch
- arch = "i686"
- ## Default repodir
- repodir = "/srv/ftp"
-
- try:
- opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=",
- "arch=", "repo-dir="])
- except getopt.GetoptError:
- print_usage()
- sys.exit()
- if opts != []:
- for o, a in opts:
- if o in ("--abs-tree"):
- absroots = a.split(',')
- elif o in ("--repos"):
- repos = a.split(",")
- elif o in ("--arch"):
- arch = a
- elif o in ("--repo-dir"):
- repodir = a
- else:
- print_usage()
- sys.exit()
- if args != []:
- print_usage()
- sys.exit()
-
- for absroot in absroots:
- if not os.path.isdir(absroot):
- print "Error : the abs tree " + absroot + " does not exist"
- sys.exit()
- for repo in repos:
- repopath = absroot + "/" + repo
- if not os.path.isdir(repopath):
- print("Warning : the repository " + repo + " does not exist in " + absroot)
-
- if not os.path.isdir(repodir):
- print "Error: the repository directory %s does not exist" % repodir
- sys.exit()
- for repo in repos:
- path = os.path.join(repodir,repo,'os',arch,repo + DBEXT)
- if not os.path.isfile(path):
- print "Error : repo DB %s : File not found" % path
- sys.exit()
- if not tarfile.is_tarfile(path):
- print "Error : Cant open repo DB %s, not a valid tar file" % path
- sys.exit()
- # repos which need to be loaded
- loadrepos = set([])
- for repo in repos:
- loadrepos = loadrepos | set(get_repo_hierarchy(repo))
-
- print_heading("Integrity Check " + arch + " of " + ",".join(repos))
- print("\nPerforming integrity checks...")
-
- print("==> parsing pkgbuilds")
- parse_pkgbuilds(loadrepos,arch)
-
- # fill provisions
- for name,pkg in packages.iteritems():
- for prov in pkg.provides:
- provname=prov.split("=")[0]
- if provname not in provisions:
- provisions[provname] = []
- provisions[provname].append(pkg)
-
- # fill repopkgs
- for name,pkg in packages.iteritems():
- if pkg.repo in repos:
- repopkgs[name] = pkg
-
- print("==> parsing db files")
- dbpkgs = parse_dbs(repos,arch)
-
- print("==> checking mismatches")
- for name,pkg in repopkgs.iteritems():
- pkgdirname = pkg.path.split("/")[-1]
- if name != pkgdirname and pkg.base != pkgdirname:
- mismatches.append(name + " vs. " + pkg.path)
-
- print("==> checking archs")
- for name,pkg in repopkgs.iteritems():
- archs = verify_archs(name,pkg.repo,pkg.archs)
- invalid_archs.extend(archs)
-
- deph,makedeph = [],[]
-
- print("==> checking dependencies")
- for name,pkg in repopkgs.iteritems():
- (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps)
- pkgdeps[pkg] = deps
- missing_deps.extend(missdeps)
- deph.extend(hierarchy)
-
- print("==> checking makedepends")
- for name,pkg in repopkgs.iteritems():
- (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps)
- makepkgdeps[pkg] = makedeps
- missing_makedeps.extend(missdeps)
- makedeph.extend(hierarchy)
-
- print("==> checking hierarchy")
- dep_hierarchy = check_hierarchy(deph)
- makedep_hierarchy = check_hierarchy(makedeph)
-
- print("==> checking for circular dependencies")
- # make sure pkgdeps is filled for every package
- for name,pkg in packages.iteritems():
- if pkg not in pkgdeps:
- (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps)
- pkgdeps[pkg] = deps
- find_scc(repopkgs.values())
-
- print("==> checking for differences between db files and pkgbuilds")
- for repo in repos:
- for pkg in dbpkgs[repo]:
- if not (pkg in repopkgs and repopkgs[pkg].repo == repo):
- dbonly.append("%s/%s" % (repo,pkg))
- for name,pkg in repopkgs.iteritems():
- if not name in dbpkgs[pkg.repo]:
- absonly.append("%s/%s" % (pkg.repo,name))
-
- print_results()
diff --git a/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh
deleted file mode 100755
index b857ac8..0000000
--- a/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh
+++ /dev/null
@@ -1,153 +0,0 @@
-#!/bin/bash
-
-# Usage : parse_pkgbuilds.sh arch <pkgbuilds_dir1,dir2,...>
-# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra
-
-exit() { return; }
-
-splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts')
-variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}")
-readonly -a variables splitpkg_overrides
-
-backup_package_variables() {
- for var in "${splitpkg_overrides[@]}"; do
- indirect="${var}_backup"
- eval "${indirect}=(\"\${$var[@]}\")"
- done
-}
-
-restore_package_variables() {
- for var in "${splitpkg_overrides[@]}"; do
- indirect="${var}_backup"
- if [ -n "${!indirect}" ]; then
- eval "${var}=(\"\${$indirect[@]}\")"
- else
- unset "${var}"
- fi
- done
-}
-
-print_info() {
- echo -e "%NAME%\n$pkgname\n"
- if [ -n "$epoch" ]; then
- echo -e "%VERSION%\n$epoch:$pkgver-$pkgrel\n"
- else
- echo -e "%VERSION%\n$pkgver-$pkgrel\n"
- fi
- echo -e "%PATH%\n$dir\n"
-
- if [ -n "$pkgbase" ]; then
- echo -e "%BASE%\n$pkgbase\n"
- fi
-
- if [ -n "$arch" ]; then
- echo "%ARCH%"
- for i in "${arch[@]}"; do echo "$i"; done
- echo ""
- fi
- if [ -n "$depends" ]; then
- echo "%DEPENDS%"
- for i in "${depends[@]}"; do
- echo "$i"
- done
- echo ""
- fi
- if [ -n "$makedepends" ]; then
- echo "%MAKEDEPENDS%"
- for i in "${makedepends[@]}"; do
- echo "$i"
- done
- echo ""
- fi
- if [ -n "$conflicts" ]; then
- echo "%CONFLICTS%"
- for i in "${conflicts[@]}"; do echo "$i"; done
- echo ""
- fi
- if [ -n "$provides" ]; then
- echo "%PROVIDES%"
- for i in "${provides[@]}"; do echo "$i"; done
- echo ""
- fi
-}
-
-source_pkgbuild() {
- ret=0
- dir=$1
- pkgbuild=$dir/PKGBUILD
- for var in "${variables[@]}"; do
- unset "${var}"
- done
- source "$pkgbuild" &>/dev/null || ret=$?
-
- # ensure $pkgname and $pkgver variables were found
- if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then
- echo -e "%INVALID%\n$pkgbuild\n"
- return 1
- fi
-
- if [ "${#pkgname[@]}" -gt "1" ]; then
- pkgbase=${pkgbase:-${pkgname[0]}}
- for pkg in "${pkgname[@]}"; do
- if [ "$(type -t "package_${pkg}")" != "function" ]; then
- echo -e "%INVALID%\n$pkgbuild\n"
- return 1
- else
- backup_package_variables
- pkgname=$pkg
- while IFS= read -r line; do
- var=${line%%=*}
- var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
- for realvar in "${variables[@]}"; do
- if [ "$var" == "$realvar" ]; then
- eval $line
- break
- fi
- done
- done < <(type "package_${pkg}")
- print_info
- restore_package_variables
- fi
- done
- else
- echo
- print_info
- fi
-
- return 0
-}
-
-find_pkgbuilds() {
- #Skip over some dirs
- local d="${1##*/}"
- if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
- return
- fi
-
- if [ -f "$1/PKGBUILD" ]; then
- source_pkgbuild "$1"
- return
- fi
- empty=1
- for dir in "$1"/*; do
- if [ -d "$dir" ]; then
- find_pkgbuilds "$dir"
- unset empty
- fi
- done
- if [ -n "$empty" ]; then
- echo -e "%MISSING%\n$1\n"
- fi
-}
-
-if [ -z "$1" -o -z "$2" ]; then
- exit 1
-fi
-
-CARCH=$1
-shift
-for dir in "$@"; do
- find_pkgbuilds "$dir"
-done
-
-exit 0
diff --git a/extra/legacy/cron-jobs/integrity-check b/extra/legacy/cron-jobs/integrity-check
deleted file mode 100755
index 7459380..0000000
--- a/extra/legacy/cron-jobs/integrity-check
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-dirname="$(dirname "$(readlink -e "$0")")"
-
-. "${dirname}/../config"
-. "${dirname}/../db-functions"
-
-script_lock
-
-if [ $# -ne 1 ]; then
- die "usage: %s <mailto>" "${0##*/}"
-fi
-mailto=$1
-
-check() {
- "${dirname}"/check_archlinux/check_packages.py \
- --repos="${repos}" \
- --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
- --repo-dir="${FTP_BASE}" \
- --arch="${arch}" \
- 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
-}
-
-repos='core,extra,community'
-arch='i686'
-check
-
-repos='core,extra,community,multilib'
-arch='x86_64'
-check
-
-script_unlock
diff --git a/extra/legacy/cron-jobs/make_repo_torrents b/extra/legacy/cron-jobs/make_repo_torrents
deleted file mode 100755
index 2eb0978..0000000
--- a/extra/legacy/cron-jobs/make_repo_torrents
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/bin/bash
-# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-# This script finds any updated packages and calls
-# `make_indivudual_torrent' for each of them.
-
-username=$( id -un )
-
-case "${username}" in
- repo | root )
- true
- ;;
- * )
- echo "This script must be run as repo user or root user."
- echo "ByeBye!"
- exit 1
- ;;
-esac
-
-# pacman doesn't support multiple different packages of the same name,
-# so it's OK to just stuff all the torrents into a single directory.
-script_directory="$(dirname "$(readlink -e "$0")")/.."
-. "$(dirname "$(readlink -e "$0")")/../config"
-public_location="$FTP_BASE/"
-torrent_location="$FTP_BASE/torrents/"
-
-cd "${torrent_location}"
-
-# Find any directories that might have packages in then
-find "${public_location}" -name 'os' -type 'd' |
-while read dir
-do
- # Find any packages
- find "${dir}" -name '*\.pkg\.tar\.xz' |
- while read pkg
- do
- pkg_name="${pkg##*/}"
-
- if [[ -h "${pkg}" ]] # check if it's a symbolic link
- then
- # We get the target of the symlink
- pkg=$( readlink -f "${pkg}" )
- fi
-
- # If a .torrent file does not already exist for this package, we call
- # `make_individual_torrent' to make it.
- if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
- then
- "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
- fi
- done
-done
-
-if [[ "${username}" == root ]]
-then
- chown repo *
-fi