summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore7
-rw-r--r--TODO10
-rw-r--r--__init__.py0
-rwxr-xr-xclean_repo.py99
-rw-r--r--config41
-rw-r--r--config.local.gerolde4
-rw-r--r--config.local.sigurd4
-rwxr-xr-xconfig.py68
-rwxr-xr-xcreate-repo24
-rwxr-xr-xcron-jobs/repo-sanity-check56
-rwxr-xr-xcron-jobs/sourceballs30
-rwxr-xr-xcron-jobs/sourceballs291
-rwxr-xr-xcron-jobs/update-abs-tarballs7
-rwxr-xr-xdb-check-nonfree46
-rw-r--r--db-functions29
-rwxr-xr-xdb-remove12
-rwxr-xr-xdb-update20
-rwxr-xr-xfilter.py204
-rwxr-xr-xlibremessages77
-rwxr-xr-xlist_nonfree_in_db.py28
-rw-r--r--local_config.example26
-rwxr-xr-xmkrepo24
-rwxr-xr-xrepo-add561
-rwxr-xr-xrepo-remove561
-rwxr-xr-xrepo-restore-to-normal58
-rwxr-xr-xrepo-update55
-rw-r--r--test/__init__.py0
-rw-r--r--test/blacklist_sample2
-rw-r--r--test/core.db.tar.gzbin0 -> 1345 bytes
-rw-r--r--test/depends4
-rw-r--r--test/desc39
-rw-r--r--test/rsync_output_sample14
-rw-r--r--test/test_filter.py196
-rwxr-xr-xyf-update18
-rw-r--r--yf/PKGBUILD28
-rw-r--r--yf/your-freedom.install32
36 files changed, 2409 insertions, 66 deletions
diff --git a/.gitignore b/.gitignore
index f47d96a..dd17455 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,10 @@
*~
+*.pyc
+local_config
/config.local
test/packages/*/*.pkg.tar.?z
+\#*#
+.#*
+yftime
+src*
+pkg* \ No newline at end of file
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..3219b1c
--- /dev/null
+++ b/TODO
@@ -0,0 +1,10 @@
+* Test Suite for clean_repo.py
+
+ - Review all repo
+ - Remove all blacklisted packages
+ - Get pending list right
+ - Extract licenses all right
+
+* Fix db-move
+
+ - Make it use abslibre \ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/__init__.py
diff --git a/clean_repo.py b/clean_repo.py
new file mode 100755
index 0000000..cc8e811
--- /dev/null
+++ b/clean_repo.py
@@ -0,0 +1,99 @@
+#! /usr/bin/python
+#-*- encoding: utf-8 -*-
+from repm.filter import *
+import argparse
+
+def mkpending(packages_iterable, pending_file, blacklisted_names,
+ whitelisted_names):
+ """ Determine wich packages are pending for license auditing."""
+ search = tuple(blacklisted_names +
+ whitelisted_names)
+
+ try:
+ fsock=open(pending_file, "r")
+ pkgs=[pkg for pkg in packages_iterable
+ if pkg["name"] not in listado(pending_file)]
+ for line in fsock.readlines():
+ if line:
+ pkg=Package()
+ pkg["name"]=line.split(":")[0]
+ pkg["license"]=":".join(line.split(":")[1:])
+ pkgs.append(pkg)
+ pkgs=[pkg for pkg in pkgs if pkg["name"] not in search
+ and "custom" in pkg["license"]]
+ fsock=open(pending_file, "w")
+ fsock.write("\n".join([pkg["name"] + ":" + pkg["location"] +
+ ":" + pkg["license"]
+ for pkg in pkgs]) + "\n")
+ fsock.close()
+ except(IOError):
+ printf("Can't read or write %s" % pending_file)
+ return pkgs
+
+def remove_from_blacklist(path_to_db, blacklisted_names):
+ """ Check the blacklist and remove packages on the db"""
+ if "~" in path_to_db:
+ path_to_db=(os.path.expanduser(path_to_db))
+
+ pkgs=[pkg for pkg in pkginfo_from_db(path_to_db) if
+ pkg["name"] in blacklisted_names]
+ if pkgs:
+ lista=" ".join(pkgs)
+ cmd = "repo-remove " + path_to_db + " " + lista
+ printf(cmd)
+ a = check_output(cmd)
+ return pkgs
+
+def cleanup_nonfree_in_dir(directory, blacklisted_names):
+ if "~" in directory:
+ directory=(os.path.expanduser(directory))
+ pkglist=list()
+ pkgs=pkginfo_from_files_in_dir(directory)
+ for package in pkgs:
+ if package["name"] in blacklisted_names:
+ os.remove(package["location"])
+ pkglist.append(package)
+ return pkglist
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="clean_repo",
+ description="Clean a repo db and packages",)
+
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+
+ group_dir=parser.add_argument_group("Clean non-free packages in dir")
+ group_dir.add_argument("-d", "--directory", type=str,
+ help="directory to clean")
+
+ group_db=parser.add_argument_group("Clean non-free packages in db",
+ "All these arguments need to be specified for db cleaning:")
+ group_db.add_argument("-b", "--database", type=str,
+ help="dabatase to clean")
+ group_db.add_argument("-p", "--pending-file", type=str,
+ help="File in which to write pending list")
+ group_db.add_argument("-w", "--whitelist-file", type=str,
+ help="File containing whitelisted names")
+
+ args=parser.parse_args()
+
+ if args.database and not (args.pending_file and args.whitelist_file):
+ parser.print_help()
+ exit(1)
+
+ blacklisted=listado(args.blacklist_file)
+
+ if args.database:
+ whitelisted=listado(args.whitelist_file)
+ pkgs=pkginfo_from_db(args.database)
+ pending_names=[pkg["name"] for pkg in
+ mkpending(pkgs, args.pending_file,
+ blacklisted, whitelisted)]
+
+ if args.directory and args.database:
+ cleanup_nonfree_in_dir(args.directory, (blacklisted + pending_names))
+ elif args.directory:
+ cleanup_nonfree_in_dir(args.directory, blacklisted)
+
diff --git a/config b/config
index 4abddd5..35c7963 100644
--- a/config
+++ b/config
@@ -1,34 +1,43 @@
-FTP_BASE="/srv/ftp"
-SVNREPO=''
-PKGREPOS=()
-PKGPOOL=''
-SRCPOOL=''
+#!/bin/bash
+FTP_BASE="/home/parabolavnx/parabolagnulinux.org/repo"
+ARCH_BASE="/home/parabolavnx/parabolagnulinux.org/repo"
+SVNREPO="/home/parabolavnx/parabolagnulinux.org/abslibre"
-CLEANUP_DESTDIR="/srv/package-cleanup"
+# Repos from Arch
+ARCHREPOS=('core' 'extra' 'community' 'testing' 'multilib')
+# Official Parabola repos
+OURREPOS=('libre' 'libre-testing')
+# User repos
+USERREPOS=('~fauno' '~smv' '~xihh' '~mtjm')
+# Community project repos
+PROJREPOS=('social' 'elementary' 'kernels' 'radio' 'security' 'social')
+PKGREPOS=(${ARCHREPOS[@]} ${OURREPOS[@]} ${USERREPOS[@]} ${PROJREPOS[@]})
+PKGPOOL='pool/packages'
+SRCPOOL='sources/packages'
+
+CLEANUP_DESTDIR="$FTP_BASE/old/packages"
CLEANUP_DRYRUN=false
# Time in days to keep moved packages
CLEANUP_KEEP=30
-SOURCE_CLEANUP_DESTDIR="/srv/source-cleanup"
+SOURCE_CLEANUP_DESTDIR="$FTP_BASE/old/sources"
SOURCE_CLEANUP_DRYRUN=false
# Time in days to keep moved sourcepackages
-SOURCE_CLEANUP_KEEP=14
+SOURCE_CLEANUP_KEEP=30
REQUIRE_SIGNATURE=true
LOCK_DELAY=10
LOCK_TIMEOUT=300
-STAGING="$HOME/staging"
-TMPDIR="/srv/tmp"
-ARCHES=(i686 x86_64)
+STAGING="$FTP_BASE/staging"
+TMPDIR="$HOME/tmp"
+ARCHARCHES=(i686 x86_64)
+ARCHES=(${ARCHARCHES[@]} mips64el)
DBEXT=".db.tar.gz"
FILESEXT=".files.tar.gz"
PKGEXT=".pkg.tar.?z"
SRCEXT=".src.tar.gz"
-# Allowed licenses: get sourceballs only for licenses in this array
-ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1')
-
-# Override default config with config.local
-[ -f "$(dirname ${BASH_SOURCE[0]})/config.local" ] && . "$(dirname ${BASH_SOURCE[0]})/config.local"
+MAKEPKGCONF="$HOME/etc/makepkg.conf"
+BLACKLIST_FILE="$HOME/blacklist/blacklist.txt"
diff --git a/config.local.gerolde b/config.local.gerolde
deleted file mode 100644
index 4501a93..0000000
--- a/config.local.gerolde
+++ /dev/null
@@ -1,4 +0,0 @@
-PKGREPOS=('core' 'extra' 'testing' 'staging' 'kde-unstable' 'gnome-unstable')
-PKGPOOL='pool/packages'
-SRCPOOL='sources/packages'
-SVNREPO='file:///srv/svn-packages'
diff --git a/config.local.sigurd b/config.local.sigurd
deleted file mode 100644
index d28aa37..0000000
--- a/config.local.sigurd
+++ /dev/null
@@ -1,4 +0,0 @@
-PKGREPOS=('community' 'community-testing' 'community-staging' 'multilib' 'multilib-testing')
-PKGPOOL='pool/community'
-SRCPOOL='sources/community'
-SVNREPO='file:///srv/svn-packages'
diff --git a/config.py b/config.py
new file mode 100755
index 0000000..4e218a5
--- /dev/null
+++ b/config.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+try:
+ from subprocess import check_output
+except(ImportError):
+ from commands import getoutput
+ def check_output(*popenargs,**kwargs):
+ cmd=" ".join(*popenargs)
+ return getoutput(cmd)
+import os
+
+
+# Rsync commands
+
+def printf(text, logfile=False):
+ """Guarda el texto en la variable log y puede imprimir en pantalla."""
+ print (str(text) + "\n")
+ if logfile:
+ try:
+ log = open(logfile, 'a')
+ log.write("\n" + str(text) + "\n")
+ except:
+ print("Can't open %s" % logfile)
+ finally:
+ log.close()
+
+
+# Classes and Exceptions
+class NonValidFile(ValueError): pass
+class NonValidDir(ValueError): pass
+class NonValidCommand(ValueError): pass
+
+class Package:
+ """ An object that has information about a package. """
+ package_info=dict()
+
+ def __init__(self):
+ self.package_info={ "name" : False,
+ "version" : False,
+ "release" : False,
+ "arch" : False,
+ "license" : False,
+ "location": False,
+ "depends" : False,}
+
+ def __setitem__(self, key, item):
+ if key in self.package_info.keys():
+ return self.package_info.__setitem__(key, item)
+ else:
+ raise ValueError("Package has no %s attribute" % key)
+
+ def __getitem__(self, key):
+ return self.package_info.__getitem__(key)
+
+ def __unicode__(self):
+ return str(self.package_info)
+
+ def __repr__(self):
+ return str(self.package_info)
+
+ def __eq__(self,x):
+ if not isinstance(x, Package):
+ return False
+ for key in self.package_info.keys():
+ if x[key] != self.package_info[key]:
+ return False
+ else:
+ return True
diff --git a/create-repo b/create-repo
new file mode 100755
index 0000000..58842c3
--- /dev/null
+++ b/create-repo
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Creates repository structure
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -eq 0 ]; then
+ msg "Usage: $0 repo1 [repo2 ... repoX]"
+ exit 1
+fi
+
+msg "Creating repos..."
+for _repo in $@; do
+ msg2 "Creating [${_repo}]"
+ mkdir -p "${FTP_BASE}/staging/${_repo}" || \
+ error "Failed creating staging dir"
+
+ for _arch in ${ARCHES[@]}; do
+ mkdir -p "${FTP_BASE}/${_repo}/os/${_arch}" || \
+ error "Failed creating ${_arch} dir"
+ done
+done
+
+msg "Don't forget to add them to the PKGREPOS array on $(dirname $0)/config"
diff --git a/cron-jobs/repo-sanity-check b/cron-jobs/repo-sanity-check
new file mode 100755
index 0000000..1ba90a6
--- /dev/null
+++ b/cron-jobs/repo-sanity-check
@@ -0,0 +1,56 @@
+#!/bin/bash
+# Solves issue165
+
+. "$(dirname $0)/../db-functions"
+. "$(dirname $0)/../config"
+
+# Traverse all repos
+for _repo in ${PKGREPOS[@]}; do
+ msg "Cleaning up [${_repo}]"
+
+# Find all pkgnames on this repo's abs
+ on_abs=($(
+ find ${SVNREPO}/${_repo} -name PKGBUILD | \
+ while read pkgbuild; do
+ source ${pkgbuild} >/dev/null 2>&1
+# cleanup to save memory
+ unset build package source md5sums pkgdesc pkgver pkgrel epoch \
+ url license arch depends makedepends optdepends options \
+ >/dev/null 2>&1
+
+# also cleanup package functions
+ for _pkg in ${pkgname[@]}; do
+ unset package_${pkg} >/dev/null 2>&1
+ done
+
+# this fills the on_abs array
+ echo ${pkgname[@]}
+ done
+ ))
+
+# quit if abs is empty
+ if [ ${#on_abs[*]} -eq 0 ]; then
+ warning "[${_repo}]'s ABS tree is empty, skipping"
+ break
+ fi
+
+# Find all pkgnames on repos
+ on_repo=($(
+ find ${FTP_BASE}/${_repo} -name "*.pkg.tar.?z" -printf "%f\n" | \
+ sed "s/^\(.\+\)-[^-]\+-[^-]\+-[^-]\+$/\1/"
+ ))
+
+# Compares them, whatever is on repos but not on abs should be removed
+ remove=($(comm -13 <(echo ${on_abs[@]} | tr ' ' "\n" | sort -u) \
+ <(echo ${on_repo[@]} | tr ' ' "\n" | sort -u)))
+
+# Remove them from databases, ftpdir-cleanup will take care of the rest
+ find ${FTP_BASE}/${_repo} -name "*.db.tar.?z" -exec \
+ repo-remove {} ${remove[@]} >/dev/null 2>&1 \;
+
+ msg2 "Removed the following packages:"
+ plain "$(echo ${remove[@]} | tr ' ' "\n")"
+
+done
+
+exit $?
diff --git a/cron-jobs/sourceballs b/cron-jobs/sourceballs
index b55de05..ee074bd 100755
--- a/cron-jobs/sourceballs
+++ b/cron-jobs/sourceballs
@@ -21,10 +21,10 @@ renice +10 -p $$ > /dev/null
for repo in ${PKGREPOS[@]}; do
for arch in ${ARCHES[@]}; do
# Repo does not exist; skip it
- if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ if [ ! -f "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
continue
fi
- bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
+ bsdtar -xOf "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
| awk '/^%NAME%/ { getline b };
/^%BASE%/ { getline b };
/^%VERSION%/ { getline v };
@@ -46,7 +46,7 @@ for repo in ${PKGREPOS[@]}; do
done
# Create a list of all available source package file names
-find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+find "${ARCH_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
# Check for all packages if we need to build a source package
for repo in ${PKGREPOS[@]}; do
@@ -59,14 +59,15 @@ for repo in ${PKGREPOS[@]}; do
pkgarch=${pkginfo[2]}
pkglicense=(${pkginfo[@]:3})
- # Should this package be skipped?
+ # Should this packages be skipped?
if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
continue
fi
+ # Commenting out, we'll sourceball everything
# Check if the license or .force file does not enforce creating a source package
- if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
- continue
- fi
+# if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+# continue
+# fi
# Store the expected file name of the source package
echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
@@ -79,8 +80,13 @@ for repo in ${PKGREPOS[@]}; do
# Get the sources from svn
mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
- svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
- "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+ #svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
+ # "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+
+ # If it's on official repos, nor [libre], nor [libre-testing]
+ cp -r "${SVNREPO}/$repo/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre-testing/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1
if [ $? -ge 1 ]; then
failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
continue
@@ -88,9 +94,9 @@ for repo in ${PKGREPOS[@]}; do
# Build the actual source package
pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
- makepkg --nocolor --allsource --ignorearch >/dev/null 2>&1
+ makepkg --nocolor --allsource --ignorearch # >/dev/null 2>&1
if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
- mv "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}"
+ mv "${pkgbase}-${pkgver}${SRCEXT}" "${ARCH_BASE}/${SRCPOOL}"
# Avoid creating the same source package for every arch
echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
newpkgs[${#newpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
@@ -126,7 +132,7 @@ if [ ${#old_pkgs[@]} -ge 1 ]; then
for old_pkg in ${old_pkgs[@]}; do
msg2 "${old_pkg}"
if ! ${SOURCE_CLEANUP_DRYRUN}; then
- mv "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ mv "$ARCH_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
fi
done
diff --git a/cron-jobs/sourceballs2 b/cron-jobs/sourceballs2
new file mode 100755
index 0000000..5e228fc
--- /dev/null
+++ b/cron-jobs/sourceballs2
@@ -0,0 +1,91 @@
+#!/bin/bash
+# Steps
+# Traverse ABSLibre
+# Makepkg --allsource every package
+# Remove the old sourceballs
+
+dirname="$(dirname $(readlink -e $0))"
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+. "${MAKEPKGCONF}"
+
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a list of all available source package file names
+find "${ARCH_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+pushd "${SVNREPO}" >/dev/null
+
+for repo in ${PKGREPOS[@]}; do
+ msg "Sourceballing [${repo}]"
+
+ pushd $repo >/dev/null
+ find -maxdepth 1 -type d | while read pkg; do
+ pushd "${SVNREPO}/$repo/$pkg" >/dev/null
+
+ [[ ! -e PKGBUILD ]] && {
+ warning "$repo/$pkg is not a package"
+ continue
+ }
+
+# Unset the previous data
+ unset pkgbase pkgname pkgver pkgrel
+ source PKGBUILD
+
+ unset build package url pkgdesc source md5sums depends makedepends \
+ optdepends license arch options check mksource
+
+ for _pkg in ${pkgname[@]}; do
+ unset package_${_pkg} >/dev/null 2>&1
+ done
+
+ pkgbase=${pkgbase:-$pkgname}
+ srcfile="${pkgbase}-${pkgver}-${pkgrel}${SRCEXT}"
+
+ echo "${srcfile}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Skip already sourceballed
+ [ -e "${SRCPKGDEST}/${srcfile}" ] && continue
+
+ makepkg --allsource --ignorearch -c >/dev/null 2>&1
+
+ [ $? -ne 0 ] && plain ${srcfile}
+
+ done # end find pkgs
+ popd >/dev/null
+
+done # end repos
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ ${SOURCE_CLEANUP_DRYRUN} && warning 'dry run mode is active'
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ if ! ${SOURCE_CLEANUP_DRYRUN}; then
+ mv "$ARCH_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find ${SOURCE_CLEANUP_DESTDIR} -type f -name "*${SRCEXT}" -mtime +${SOURCE_CLEANUP_KEEP} -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ ${SOURCE_CLEANUP_DRYRUN} || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+script_unlock
+
diff --git a/cron-jobs/update-abs-tarballs b/cron-jobs/update-abs-tarballs
new file mode 100755
index 0000000..824ac34
--- /dev/null
+++ b/cron-jobs/update-abs-tarballs
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+. "$(dirname $0)/../config"
+
+rsync -av --exclude=staging/ parabolagnulinux.org::abstar/ ${FTP_BASE}/
+
+exit $?
diff --git a/db-check-nonfree b/db-check-nonfree
new file mode 100755
index 0000000..ab6491d
--- /dev/null
+++ b/db-check-nonfree
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -ge 1 ]; then
+ warning "Calling $(basename $0) with a specific repository is not supported"
+ exit 1
+fi
+
+# TODO: this might lock too much (architectures)
+for repo in ${repos[@]}; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${pkgarch} || exit 1
+ done
+done
+
+msg "Check nonfree in repo:"
+nonfree=($(cut -d: -f1 ${BLACKLIST_FILE} | sort -u))
+for repo in ${ARCHREPOS[@]}; do
+ for pkgarch in ${ARCHES[@]}; do
+ msg2 "$repo $pkgarch"
+ if [ ! -f "${FTP_BASE}/${repo}/os/${pkgarch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ unset dbpkgs
+ unset cleanpkgs
+ cleanpkgs=()
+ dbpkgs=($(bsdtar -xOf "${FTP_BASE}/${repo}/os/${pkgarch}/${repo}${DBEXT}" | awk '/^%NAME%/{getline;print}' | sort -u ))
+ for pkgname in ${dbpkgs[@]}; do
+ if in_array ${pkgname} ${nonfree[@]}; then
+ cleanpkgs+=(${pkgname})
+ fi
+ done
+ if [ ${#cleanpkgs[@]} -ge 1 ]; then
+ msg2 "Unfree: ${cleanpkgs[@]}"
+ arch_repo_remove "${repo}" "${pkgarch}" ${cleanpkgs[@]}
+ fi
+ done
+done
+
+for repo in ${repos[@]}; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${pkgarch}
+ done
+done
diff --git a/db-functions b/db-functions
index a3e2168..4dddcb5 100644
--- a/db-functions
+++ b/db-functions
@@ -87,7 +87,7 @@ get_full_version() {
script_lock() {
local LOCKDIR="$TMPDIR/.scriptlock.$(basename $0)"
if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
- local _owner="$(/usr/bin/stat -c %U $LOCKDIR)"
+ local _owner="$(stat -c %U $LOCKDIR)"
error "Script $(basename $0) is already locked by $_owner."
exit 1
else
@@ -178,7 +178,7 @@ repo_lock () {
_count=0
while [ $_count -le $_trial ] || $_lockblock ; do
if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
- _owner="$(/usr/bin/stat -c %U $LOCKDIR)"
+ _owner="$(stat -c %U $LOCKDIR)"
warning "Repo [${1}] (${2}) is already locked by $_owner. "
msg2 "Retrying in $LOCK_DELAY seconds..."
else
@@ -211,7 +211,7 @@ repo_unlock () { #repo_unlock <repo-name> <arch>
_grep_pkginfo() {
local _ret
- _ret="$(/usr/bin/bsdtar -xOqf "$1" .PKGINFO | /bin/grep -m 1 "^${2} = ")"
+ _ret="$(bsdtar -xOqf "$1" .PKGINFO | grep -m 1 "^${2} = ")"
echo "${_ret#${2} = }"
}
@@ -375,6 +375,7 @@ check_splitpkgs() {
for pkgfile in ${pkgfiles[@]}; do
issplitpkg "${pkgfile}" || continue
local _pkgbase="$(getpkgbase ${pkgfile})"
+ msg2 "Checking $_pkgbase"
local _pkgname="$(getpkgname ${pkgfile})"
local _pkgarch="$(getpkgarch ${pkgfile})"
mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
@@ -382,9 +383,15 @@ check_splitpkgs() {
if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
- svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
- "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
- [ $? -ge 1 ] && return 1
+
+ cp -r ${SVNREPO}/$repo/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre-testing/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/$_pkgbase">/dev/null 2>&1
+
+ [[ $? -ge 1 ]] && {
+ echo "Failed $_pkgbase-$_pkgver-$_pkgarch"
+ return 1
+ }
fi
local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
@@ -473,7 +480,7 @@ set_repo_permission() {
local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
if [ -w "${dbfile}" ]; then
- local group=$(/usr/bin/stat --printf='%G' "$(dirname "${dbfile}")")
+ local group=$(stat --printf='%G' "$(dirname "${dbfile}")")
chgrp $group "${dbfile}" || error "Could not change group of ${dbfile} to $group"
chgrp $group "${filesfile}" || error "Could not change group of ${filesfile} to $group"
chmod g+w "${dbfile}" || error "Could not set write permission for group $group to ${dbfile}"
@@ -490,9 +497,9 @@ arch_repo_add() {
# package files might be relative to repo dir
pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
- /usr/bin/repo-add -q "${repo}${DBEXT}" ${pkgs[@]} \
+ repo-add -q "${repo}${DBEXT}" ${pkgs[@]} >/dev/null \
|| error "repo-add ${repo}${DBEXT} ${pkgs[@]}"
- /usr/bin/repo-add -f -q "${repo}${FILESEXT}" ${pkgs[@]} \
+ repo-add -f -q "${repo}${FILESEXT}" ${pkgs[@]} \
|| error "repo-add -f ${repo}${FILESEXT} ${pkgs[@]}"
popd >/dev/null
set_repo_permission "${repo}" "${arch}"
@@ -509,9 +516,9 @@ arch_repo_remove() {
error "No database found at '${dbfile}'"
return 1
fi
- /usr/bin/repo-remove -q "${dbfile}" ${pkgs[@]} \
+ repo-remove -q "${dbfile}" ${pkgs[@]} >/dev/null \
|| error "repo-remove ${dbfile} ${pkgs[@]}"
- /usr/bin/repo-remove -q "${filesfile}" ${pkgs[@]} \
+ repo-remove -q "${filesfile}" ${pkgs[@]} \
|| error "repo-remove ${filesfile} ${pkgs[@]}"
set_repo_permission "${repo}" "${arch}"
}
diff --git a/db-remove b/db-remove
index b44eb33..ccfeb36 100755
--- a/db-remove
+++ b/db-remove
@@ -12,9 +12,6 @@ pkgbase="$1"
repo="$2"
arch="$3"
-ftppath="$FTP_BASE/$repo/os"
-svnrepo="$repo-$arch"
-
if ! check_repo_permission $repo; then
die "You don't have permission to remove packages from ${repo}"
fi
@@ -30,14 +27,11 @@ for tarch in ${tarches[@]}; do
done
msg "Removing $pkgbase from [$repo]..."
-/usr/bin/svn checkout -q "${SVNREPO}/${pkgbase}" "${WORKDIR}/svn/${pkgbase}" >/dev/null
-if [ -d "${WORKDIR}/svn/$pkgbase/repos/$svnrepo" ]; then
- pkgnames=($(. "${WORKDIR}/svn/$pkgbase/repos/$svnrepo/PKGBUILD"; echo ${pkgname[@]}))
- /usr/bin/svn rm --force -q "${WORKDIR}/svn/$pkgbase/repos/$svnrepo"
- /usr/bin/svn commit -q "${WORKDIR}/svn/$pkgbase" -m "$(basename $0): $pkgbase removed by $(id -un)"
+if [ -d "${SVNREPO}/$repo/$pkgbase" ]; then
+ pkgnames=($(. "${SVNREPO}/$repo/$pkgbase/PKGBUILD"; echo ${pkgname[@]}))
else
- warning "$pkgbase not found in $svnrepo"
+ warning "$pkgbase not found in $repo"
warning "Removing only $pkgbase from the repo"
warning "If it was a split package you have to remove the others yourself!"
pkgnames=($pkgbase)
diff --git a/db-update b/db-update
index 4b9c78f..86eaa2e 100755
--- a/db-update
+++ b/db-update
@@ -35,16 +35,14 @@ for repo in ${repos[@]}; do
if ! check_pkgfile "${pkg}"; then
die "Package ${repo}/$(basename ${pkg}) is not consistent with its meta data"
fi
- if ! check_pkgsvn "${pkg}" "${repo}"; then
- die "Package ${repo}/$(basename ${pkg}) is not consistent with svn repository"
- fi
- if ! check_pkgrepos "${pkg}"; then
- die "Package ${repo}/$(basename ${pkg}) already exists in another repository"
- fi
+ #if ! check_pkgrepos "${pkg}"; then
+ # die "Package ${repo}/$(basename ${pkg}) already exists in another repository"
+ #fi
done
- if ! check_splitpkgs ${repo} ${pkgs[@]}; then
- die "Missing split packages for ${repo}"
- fi
+ # This is fucking obnoxious
+# if ! check_splitpkgs ${repo} ${pkgs[@]}; then
+# die "Missing split packages for ${repo}"
+# fi
else
die "Could not read ${STAGING}"
fi
@@ -71,7 +69,7 @@ for repo in ${repos[@]}; do
if [ -f "$FTP_BASE/${PKGPOOL}/${pkgfile}.sig" ]; then
ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "$FTP_BASE/$repo/os/${pkgarch}"
fi
- add_pkgs[${#add_pkgs[*]}]=${pkgfile}
+ add_pkgs[${#add_pkgs[*]}]=${pkgfile}
done
if [ ${#add_pkgs[@]} -ge 1 ]; then
arch_repo_add "${repo}" "${pkgarch}" ${add_pkgs[@]}
@@ -84,3 +82,5 @@ for repo in ${repos[@]}; do
repo_unlock ${repo} ${pkgarch}
done
done
+
+
diff --git a/filter.py b/filter.py
new file mode 100755
index 0000000..5d90bdd
--- /dev/null
+++ b/filter.py
@@ -0,0 +1,204 @@
+#! /usr/bin/python
+#-*- encoding: utf-8 -*-
+from glob import glob
+from repm.config import *
+import tarfile
+
+def listado(filename, start=0, end=None):
+ """Obtiene una lista de paquetes de un archivo."""
+ fsock = open(filename, "r")
+ lista = fsock.read().split("\n")
+ fsock.close()
+ if end is not None:
+ return [pkg.split(":")[start:end].rstrip()
+ for pkg in lista if pkg]
+ else:
+ return [pkg.split(":")[start].rstrip()
+ for pkg in lista if pkg]
+
+def pkginfo_from_filename(filename):
+ """ Generates a Package object with info from a filename,
+ filename can be relative or absolute
+
+ Parameters:
+ ----------
+ filename -> str Must contain .pkg.tar.
+
+ Returns:
+ ----------
+ pkg -> Package object"""
+ if ".pkg.tar." not in filename:
+ raise NonValidFile("File is not a pacman package")
+ pkg = Package()
+ pkg["location"] = filename
+ fileattrs = os.path.basename(filename).split("-")
+ pkg["arch"] = fileattrs.pop(-1).split(".")[0]
+ pkg["release"] = fileattrs.pop(-1)
+ pkg["version"] = fileattrs.pop(-1)
+ pkg["name"] = "-".join(fileattrs)
+ return pkg
+
+def pkginfo_from_desc(info_from_desc, pkg=Package()):
+ """ Returns pkginfo from desc file.
+
+ Parameters:
+ ----------
+ filename -> str File must exist
+
+ Returns:
+ ----------
+ pkg -> Package object"""
+ info=info_from_desc.rsplit()
+ info_map={"name" :("%NAME%" , None),
+ "version" :("%VERSION%" , 0 ),
+ "release" :("%VERSION%" , 1 ),
+ "arch" :("%ARCH%" , None),
+ "license" :("%LICENSE%" , None),
+ "location":("%FILENAME%", None),}
+
+ for key in info_map.keys():
+ field,pos=info_map[key]
+ pkg[key]=info[info.index(field)+1]
+ if pos is not None:
+ pkg[key]=pkg[key].split("-")[pos]
+ return pkg
+
+def pkginfo_from_rsync_output(rsync_output):
+ """ Generates a list of packages and versions from an rsync output
+ wich uses --list-only and --no-motd options.
+
+ Parameters:
+ ----------
+ rsync_output -> str Contains output from rsync
+
+ Returns:
+ ----------
+ package_list -> tuple Contains Package objects. """
+
+ def package_or_link(line):
+ """ Take info out of filename """
+ location_field = 4
+ return pkginfo_from_filename(line.rsplit()[location_field])
+
+ def do_nothing():
+ pass
+
+ options = { "d": do_nothing,
+ "l": package_or_link,
+ "-": package_or_link,
+ " ": do_nothing}
+
+ package_list=list()
+
+ lines=[x for x in rsync_output.split("\n") if ".pkg.tar" in x]
+
+ for line in lines:
+ pkginfo=options[line[0]](line)
+ if pkginfo:
+ package_list.append(pkginfo)
+
+ return tuple(package_list)
+
+def pkginfo_from_files_in_dir(directory):
+ """ Returns pkginfo from filenames of packages in dir
+ wich has .pkg.tar. on them
+
+ Parameters:
+ ----------
+ directory -> str Directory must exist
+
+ Returns:
+ ----------
+ package_list -> tuple Contains Package objects """
+ package_list=list()
+
+ if not os.path.isdir(directory):
+ raise NonValidDir
+
+ for filename in glob(os.path.join(directory,"*")):
+ if ".pkg.tar." in filename:
+ package_list.append(pkginfo_from_filename(filename))
+ return tuple(package_list)
+
+def pkginfo_from_db(path_to_db):
+ """ Get pkginfo from db.
+
+ Parameters:
+ ----------
+ path_to_db -> str Path to file
+
+ Output:
+ ----------
+ package_list -> tuple of Package objects"""
+ package_list=list()
+
+ if not os.path.isfile(path_to_db):
+ raise NonValidFile(path_to_db + " is not a file")
+
+ try:
+ dbsock = tarfile.open(path_to_db, 'r:gz')
+ desc_files=[desc for desc in dbsock.getnames()
+ if "/desc" in desc]
+ for name in desc_files:
+ desc=dbsock.extractfile(name).read().decode("UTF-8")
+ package_list.append(pkginfo_from_desc(desc))
+ except tarfile.ReadError:
+ raise NonValidFile("No valid db_file %s or not readable"
+ % path_to_db)
+ finally:
+ dbsock.close()
+ return package_list
+
+def rsyncBlacklist_from_blacklist(packages_iterable,
+ blacklisted_names,
+ exclude_file):
+ """ Generate an exclude list for rsync
+
+ Parameters:
+ ----------
+ package_iterable -> list or tuple Contains Package objects
+ blacklisted_names-> list or tuple Contains blacklisted names
+ exclude_file -> str Path to file
+ debug -> bool If True, file list gets logged
+
+ Output:
+ ----------
+ None """
+ pkgs=[pkg["location"] for pkg in packages_iterable
+ if isinstance(pkg, Package)
+ and pkg["name"] in blacklisted_names]
+ if exclude_file:
+ try:
+ fsock = open(exclude_file,"w")
+ fsock.write("\n".join(pkgs) + "\n")
+ except IOError:
+ printf("%s wasnt written" % exclude_file)
+ exit(1)
+ finally:
+ fsock.close()
+ return pkgs
+
+
+if __name__ == "__main__":
+ import argparse
+ parser=argparse.ArgumentParser()
+ parser.add_argument("-r", "--rsync-exclude-file", type=str,
+ help="File in which to generate exclude list",
+ required=True,)
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+ parser.add_argument("-f", "--rsout-file", type=str,
+ help="This file will be read to get a pkg list",
+ required=True,)
+ args=parser.parse_args()
+ try:
+ fsock=open(args.rsout_file, "r")
+ rsout=fsock.read()
+ except IOError:
+ print("%s is not readable" % args.rsout_file)
+ finally:
+ fsock.close()
+ packages=pkginfo_from_rsync_output(rsout)
+ rsyncBlacklist_from_blacklist(packages, listado(args.blacklist_file),
+ args.rsync_exclude_file)
diff --git a/libremessages b/libremessages
new file mode 100755
index 0000000..9fbbc2b
--- /dev/null
+++ b/libremessages
@@ -0,0 +1,77 @@
+# Copyright (c) 2006-2010 Pacman Development Team <pacman-dev@archlinux.org>
+# Copyright (c) 2002-2006 by Judd Vinet <jvinet@zeroflux.org>
+# Copyright (c) 2005 by Aurelien Foret <orelien@chez.com>
+# Copyright (c) 2006 by Miklos Vajna <vmiklos@frugalware.org>
+# Copyright (c) 2005 by Christian Hamar <krics@linuxforum.hu>
+# Copyright (c) 2006 by Alex Smith <alex@alex-smith.me.uk>
+# Copyright (c) 2006 by Andras Voroskoi <voroskoi@frugalware.org>
+# Copyright (c) 2011 by Joshua Haase <hahj87@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# gettext initialization
+export TEXTDOMAIN='libretools'
+export TEXTDOMAINDIR='/usr/share/locale'
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+if tput setaf 0 &>/dev/null; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+ PURPLE="${ALL_OFF}$(tput setaf 5)"
+else
+ ALL_OFF="\033[1;0m"
+ BOLD="\033[1;1m"
+ BLUE="${BOLD}\033[1;34m"
+ GREEN="${BOLD}\033[1;32m"
+ RED="${BOLD}\033[1;31m"
+ YELLOW="${BOLD}\033[1;33m"
+ PURPLE="${BOLD}\033[1;30;40m"
+fi
+
+stdnull() {
+ local action=$1;
+ eval "${action} >/dev/null 2>&1"
+}
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> $(gettext "WARNING:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> $(gettext "ERROR:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
diff --git a/list_nonfree_in_db.py b/list_nonfree_in_db.py
new file mode 100755
index 0000000..598a2e7
--- /dev/null
+++ b/list_nonfree_in_db.py
@@ -0,0 +1,28 @@
+#! /usr/bin/python
+#-*- encoding: utf-8 -*-
+from repm.filter import *
+import argparse
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="nonfree_in_db",
+ description="Cleans nonfree files on repo",)
+
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+
+ parser.add_argument("-b", "--database", type=str,
+ help="dabatase to clean",
+ required=True,)
+
+ args=parser.parse_args()
+
+ if not (args.blacklist_file and args.database):
+ parser.print_help()
+ exit(1)
+
+ blacklist=listado(args.blacklist_file)
+ pkgs=get_pkginfo_from_db(args.database)
+
+ print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist]))
diff --git a/local_config.example b/local_config.example
new file mode 100644
index 0000000..2280cc2
--- /dev/null
+++ b/local_config.example
@@ -0,0 +1,26 @@
+# Mirror options
+mirror="mirrors.eu.kernel.org"
+mirrorpath="mirrors/archlinux"
+
+# Directories: they should end without /
+paraboladir=~/parabolagnulinux.org
+tempdir=~/tmp
+archdb=${tempdir}/db
+docs_dir=${paraboladir}/docs
+repodir=${paraboladir}/repo
+licenses_dir=${docs_dir}/pending_licenses
+# End Directories
+
+# Files
+logname=${paraboladir}/log/$(date -u +%Y%m%d-%H:%M)-repo-maintainer.log
+rsout_file=${tempdir}/rsout
+rsync_not_needed=${tempdir}/rsync_not_needed
+
+rsync_blacklist=${docs_dir}/rsyncBlacklist
+
+blacklist=${docs_dir}/blacklist.txt
+whitelist=${docs_dir}/whitelist.txt
+
+# Rsync commands
+rsync_list_command="rsync -rptgoL --exclude='*.abs.tar.*' --list-only --no-motd "
+rsync_update_command="rsync -rptgoL --exclude='*.abs.tar.*' --no-motd "
diff --git a/mkrepo b/mkrepo
new file mode 100755
index 0000000..5f704cc
--- /dev/null
+++ b/mkrepo
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Author: Nicolás Reynolds <fauno@kiwwwi.com.ar>
+# License: GPLv3+
+# Description: A script to quickly create new [repos]
+
+source $(dirname $0)/config
+source $(dirname $0)/local_config
+
+# TODO it would be simpler to expand arrays to {element1,element2,etc}
+for repo in $@; do
+
+ echo ":: Creating [$repo]"
+ mkdir -pv ${repodir}/{staging/,}${repo}
+
+ for arch in ${ARCHES[@]}; do
+ mkdir -pv ${repodir}/${repo}/os/${arch}
+ done
+
+done
+
+echo ":: All done. Add the repo to the parabolaweb admin page"
+echo " and the get_repos script on the same server."
+
+exit $?
diff --git a/repo-add b/repo-add
new file mode 100755
index 0000000..c4bf96f
--- /dev/null
+++ b/repo-add
@@ -0,0 +1,561 @@
+#!/bin/bash
+#
+# repo-add - add a package to a given repo database file
+# repo-remove - remove a package entry from a given repo database file
+# Generated from repo-add.in; do not edit by hand.
+#
+# Copyright (c) 2006-2008 Aaron Griffin <aaron@archlinux.org>
+# Copyright (c) 2007-2008 Dan McGee <dan@archlinux.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+LICENSESDIR=/home/parabolavnx/licenses
+
+# gettext initialization
+export TEXTDOMAIN='pacman'
+export TEXTDOMAINDIR='/usr/share/locale'
+
+myver='3.5.0'
+confdir='/home/parabolavnx/etc'
+
+QUIET=0
+DELTA=0
+WITHFILES=0
+REPO_DB_FILE=
+LOCKFILE=
+CLEAN_LOCK=0
+
+# ensure we have a sane umask set
+umask 0022
+
+msg() {
+ (( QUIET )) && return
+ local mesg=$1; shift
+ printf "==> ${mesg}\n" "$@" >&1
+}
+
+msg2() {
+ (( QUIET )) && return
+ local mesg=$1; shift
+ printf " -> ${mesg}\n" "$@" >&1
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "==> $(gettext "WARNING:") ${mesg}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "==> $(gettext "ERROR:") ${mesg}\n" "$@" >&2
+}
+
+# print usage instructions
+usage() {
+ printf "repo-add, repo-remove (pacman) %s\n\n" "$myver"
+ printf "$(gettext "Usage: repo-add [-d] [-f] [-q] <path-to-db> <package|delta> ...\n")"
+ printf "$(gettext "Usage: repo-remove [-q] <path-to-db> <packagename|delta> ...\n\n")"
+ printf "$(gettext "\
+repo-add will update a package database by reading a package file.\n\
+Multiple packages to add can be specified on the command line.\n\n")"
+ printf "$(gettext "\
+repo-remove will update a package database by removing the package name\n\
+specified on the command line from the given repo database. Multiple\n\
+packages to remove can be specified on the command line.\n\n")"
+ printf "$(gettext "\
+Use the -q/--quiet flag to minimize output to basic messages, warnings,\n\
+and errors.\n\n")"
+ printf "$(gettext "\
+Use the -d/--delta flag to automatically generate and add a delta file\n\
+between the old entry and the new one, if the old package file is found\n\
+next to the new one.\n\n")"
+ printf "$(gettext "\
+Use the -f/--files flag to update a database including file entries.\n\n")"
+ echo "$(gettext "Example: repo-add /path/to/repo.db.tar.gz pacman-3.0.0.pkg.tar.gz")"
+ echo "$(gettext "Example: repo-remove /path/to/repo.db.tar.gz kernel26")"
+}
+
+version() {
+ printf "repo-add, repo-remove (pacman) %s\n\n" "$myver"
+ printf "$(gettext "\
+Copyright (C) 2006-2008 Aaron Griffin <aaron@archlinux.org>.\n\
+Copyright (c) 2007-2008 Dan McGee <dan@archlinux.org>.\n\n\
+This is free software; see the source for copying conditions.\n\
+There is NO WARRANTY, to the extent permitted by law.\n")"
+}
+
+# write a list entry
+# arg1 - Entry name
+# arg2 - List
+# arg3 - File to write to
+write_list_entry() {
+ if [[ -n $2 ]]; then
+ echo "%$1%" >>$3
+ echo -e $2 >>$3
+ fi
+}
+
+find_pkgentry()
+{
+ local pkgname=$1
+ local pkgentry
+ for pkgentry in $tmpdir/$pkgname*; do
+ name=${pkgentry##*/}
+ if [[ ${name%-*-*} = $pkgname ]]; then
+ echo $pkgentry
+ return 0
+ fi
+ done
+ return 1
+}
+
+# Get the package name from the delta filename
+get_delta_pkgname() {
+ local tmp
+
+ tmp=${1##*/}
+ echo ${tmp%-*-*_to*}
+}
+
+# write a delta entry
+# arg1 - path to delta file
+db_write_delta()
+{
+ deltafile="$1"
+ pkgname="$(get_delta_pkgname $deltafile)"
+
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -z $pkgentry ]]; then
+ error "$(gettext "No database entry for package '%s'.")" "$pkgname"
+ return 1
+ fi
+ deltas="$pkgentry/deltas"
+ if [[ ! -f $deltas ]]; then
+ echo -e "%DELTAS%" >$deltas
+ fi
+ # get md5sum and compressed size of package
+ md5sum="$(openssl dgst -md5 "$deltafile")"
+ md5sum="${md5sum##* }"
+ csize=$(stat -L -c %s "$deltafile")
+
+ oldfile=$(xdelta3 printhdr $deltafile | grep "XDELTA filename (source)" | sed 's/.*: *//')
+ newfile=$(xdelta3 printhdr $deltafile | grep "XDELTA filename (output)" | sed 's/.*: *//')
+
+ if grep -q "$oldfile.*$newfile" $deltas; then
+ sed -i.backup "/$oldfile.*$newfile/d" $deltas && rm -f $deltas.backup
+ fi
+ msg2 "$(gettext "Adding 'deltas' entry : %s -> %s")" "$oldfile" "$newfile"
+ echo ${deltafile##*/} $md5sum $csize $oldfile $newfile >> $deltas
+
+ return 0
+} # end db_write_delta
+
+# remove a delta entry
+# arg1 - path to delta file
+db_remove_delta()
+{
+ deltafile="$1"
+ filename=${deltafile##*/}
+ pkgname="$(get_delta_pkgname $deltafile)"
+
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -z $pkgentry ]]; then
+ return 1
+ fi
+ deltas="$pkgentry/deltas"
+ if [[ ! -f $deltas ]]; then
+ return 1
+ fi
+ if grep -q "$filename" $deltas; then
+ sed -i.backup "/$filename/d" $deltas && rm -f $deltas.backup
+ msg2 "$(gettext "Removing existing entry '%s'...")" "$filename"
+ return 0
+ fi
+
+ return 1
+} # end db_remove_delta
+
+# write an entry to the pacman database
+# arg1 - path to package
+db_write_entry()
+{
+ # blank out all variables
+ local pkgfile="$1"
+ local pkgname pkgver pkgdesc csize size md5sum url arch builddate packager \
+ _groups _licenses _replaces _depends _conflicts _provides _optdepends
+
+ local OLDIFS="$IFS"
+ # IFS (field separator) is only the newline character
+ IFS="
+"
+
+ # read info from the zipped package
+ local line var val
+ for line in $(bsdtar -xOqf "$pkgfile" .PKGINFO |
+ grep -v '^#' | sed 's|\(\w*\)\s*=\s*\(.*\)|\1 \2|'); do
+ # bash awesomeness here- var is always one word, val is everything else
+ var=${line%% *}
+ val=${line#* }
+ declare $var="$val"
+ case "$var" in
+ group) _groups="$_groups$group\n" ;;
+ license) _licenses="$_licenses$license\n" ;;
+ replaces) _replaces="$_replaces$replaces\n" ;;
+ depend) _depends="$_depends$depend\n" ;;
+ conflict) _conflicts="$_conflicts$conflict\n" ;;
+ provides) _provides="$_provides$provides\n" ;;
+ optdepend) _optdepends="$_optdepends$optdepend\n" ;;
+ esac
+ done
+
+ IFS=$OLDIFS
+
+ # get md5sum and compressed size of package
+ md5sum="$(openssl dgst -md5 "$pkgfile")"
+ md5sum="${md5sum##* }"
+ csize=$(stat -L -c %s "$pkgfile")
+
+ # ensure $pkgname and $pkgver variables were found
+ if [[ -z $pkgname || -z $pkgver ]]; then
+ error "$(gettext "Invalid package file '%s'.")" "$pkgfile"
+ return 1
+ fi
+
+ pushd "$tmpdir" >/dev/null
+ if [[ -d $pkgname-$pkgver ]]; then
+ warning "$(gettext "An entry for '%s' already existed")" "$pkgname-$pkgver"
+ else
+ if (( DELTA )); then
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -n $pkgentry ]]; then
+ local oldfilename=$(grep -A1 FILENAME $pkgentry/desc | tail -n1)
+ local oldfile="$(dirname $1)/$oldfilename"
+ fi
+ fi
+ fi
+
+ # remove an existing entry if it exists, ignore failures
+ db_remove_entry "$pkgname"
+
+ # create package directory
+ mkdir "$pkgname-$pkgver"
+ pushd "$pkgname-$pkgver" >/dev/null
+
+ # restore an eventual deltas file
+ [[ -f ../$pkgname.deltas ]] && mv "../$pkgname.deltas" deltas
+
+ # create desc entry
+ msg2 "$(gettext "Creating '%s' db entry...")" 'desc'
+ echo -e "%FILENAME%\n$(basename "$1")\n" >>desc
+ echo -e "%NAME%\n$pkgname\n" >>desc
+ [[ -n $pkgbase ]] && echo -e "%BASE%\n$pkgbase\n" >>desc
+ echo -e "%VERSION%\n$pkgver\n" >>desc
+ [[ -n $pkgdesc ]] && echo -e "%DESC%\n$pkgdesc\n" >>desc
+ write_list_entry "GROUPS" "$_groups" "desc"
+ [[ -n $csize ]] && echo -e "%CSIZE%\n$csize\n" >>desc
+ [[ -n $size ]] && echo -e "%ISIZE%\n$size\n" >>desc
+
+ # compute checksums
+ msg2 "$(gettext "Computing md5 checksums...")"
+ echo -e "%MD5SUM%\n$md5sum\n" >>desc
+
+ [[ -n $url ]] && echo -e "%URL%\n$url\n" >>desc
+ write_list_entry "LICENSE" "$_licenses" "desc"
+ [[ -n $arch ]] && echo -e "%ARCH%\n$arch\n" >>desc
+ [[ -n $builddate ]] && echo -e "%BUILDDATE%\n$builddate\n" >>desc
+ [[ -n $packager ]] && echo -e "%PACKAGER%\n$packager\n" >>desc
+ write_list_entry "REPLACES" "$_replaces" "desc"
+
+ # create depends entry
+ msg2 "$(gettext "Creating '%s' db entry...")" 'depends'
+ # create the file even if it will remain empty
+ touch "depends"
+ write_list_entry "DEPENDS" "$_depends" "depends"
+ write_list_entry "CONFLICTS" "$_conflicts" "depends"
+ write_list_entry "PROVIDES" "$_provides" "depends"
+ write_list_entry "OPTDEPENDS" "$_optdepends" "depends"
+
+ popd >/dev/null
+ popd >/dev/null
+
+ # create files file if wanted
+ if (( WITHFILES )); then
+ msg2 "$(gettext "Creating '%s' db entry...")" 'files'
+ local files_path="$tmpdir/$pkgname-$pkgver/files"
+ echo "%FILES%" >$files_path
+ bsdtar --exclude='.*' -tf "$pkgfile" >>$files_path
+ fi
+
+ # create a delta file
+ if (( DELTA )); then
+ if [[ -n $oldfilename ]]; then
+ if [[ -f $oldfile ]]; then
+ delta=$(pkgdelta -q $oldfile $1)
+ if [[ -f $delta ]]; then
+ db_write_delta $delta
+ fi
+ else
+ warning "$(gettext "Old package file not found: %s")" "$oldfilename"
+ fi
+ fi
+ fi
+
+ # Extracts licenses to a common license dir
+ msg "Extracting license"
+ if bsdtar -xOf ${pkgfile} .PKGINFO | grep "license" | grep "custom" ; then
+ if [ -d ${LICENSESDIR}/${pkgname} ]; then
+ rm -r ${LICENSESDIR}/${pkgname}
+ fi
+
+ # Change dir to licenses, and extract them stripping the first part of the path
+ bsdtar -C ${LICENSESDIR}/ --include="usr/share/licenses/" \
+ --strip-components 3 -xf ${pkgfile} >/dev/null 2>&1
+
+ if [ $? -ne 0 ]; then
+ warning "This package doesn't contain a license dir"
+ fi
+ fi
+
+ return 0
+} # end db_write_entry
+
+# remove existing entries from the DB
+# arg1 - package name
+db_remove_entry() {
+ local pkgname=$1
+ local notfound=1
+ local pkgentry=$(find_pkgentry $pkgname)
+ while [[ -n $pkgentry ]]; do
+ notfound=0
+ if [[ -f $pkgentry/deltas ]]; then
+ mv "$pkgentry/deltas" "$tmpdir/$pkgname.deltas"
+ fi
+ msg2 "$(gettext "Removing existing entry '%s'...")" \
+ "$(basename $pkgentry)"
+ rm -rf $pkgentry
+ pkgentry=$(find_pkgentry $pkgname)
+ done
+
+ msg "Removing license"
+ if [ -d ${LICENSESDIR}/${pkgname} ]; then
+ rm -r ${LICENSESDIR}/${pkgname}
+ fi
+
+ return $notfound
+} # end db_remove_entry
+
+check_repo_db()
+{
+ # check lock file
+ if ( set -o noclobber; echo "$$" > "$LOCKFILE") 2> /dev/null; then
+ CLEAN_LOCK=1
+ else
+ error "$(gettext "Failed to acquire lockfile: %s.")" "$LOCKFILE"
+ [[ -f $LOCKFILE ]] && error "$(gettext "Held by process %s")" "$(cat $LOCKFILE)"
+ exit 1
+ fi
+
+ if [[ -f $REPO_DB_FILE ]]; then
+ # there are two situations we can have here- a DB with some entries,
+ # or a DB with no contents at all.
+ if ! bsdtar -tqf "$REPO_DB_FILE" '*/desc' >/dev/null 2>&1; then
+ # check empty case
+ if [[ -n $(bsdtar -tqf "$REPO_DB_FILE" '*' 2>/dev/null) ]]; then
+ error "$(gettext "Repository file '%s' is not a proper pacman database.")" "$REPO_DB_FILE"
+ exit 1
+ fi
+ fi
+ msg "$(gettext "Extracting database to a temporary location...")"
+ bsdtar -xf "$REPO_DB_FILE" -C "$tmpdir"
+ else
+ case "$cmd" in
+ repo-remove)
+ error "$(gettext "Repository file '%s' was not found.")" "$REPO_DB_FILE"
+ exit 1
+ ;;
+ repo-add)
+ # check if the file can be created (write permission, directory existence, etc)
+ if ! touch "$REPO_DB_FILE"; then
+ error "$(gettext "Repository file '%s' could not be created.")" "$REPO_DB_FILE"
+ exit 1
+ fi
+ rm -f "$REPO_DB_FILE"
+ ;;
+ esac
+ fi
+}
+
+add()
+{
+ if [[ ! -f $1 ]]; then
+ error "$(gettext "File '%s' not found.")" "$1"
+ return 1
+ fi
+
+ if [[ ${1##*.} == "delta" ]]; then
+ deltafile=$1
+ msg "$(gettext "Adding delta '%s'")" "$deltafile"
+ if ! type xdelta3 &>/dev/null; then
+ error "$(gettext "Cannot find the xdelta3 binary! Is xdelta3 installed?")"
+ exit 1
+ fi
+ if db_write_delta "$deltafile"; then
+ return 0
+ else
+ return 1
+ fi
+ fi
+
+ pkgfile=$1
+ if ! bsdtar -tqf "$pkgfile" .PKGINFO >/dev/null 2>&1; then
+ error "$(gettext "'%s' is not a package file, skipping")" "$pkgfile"
+ return 1
+ fi
+
+ msg "$(gettext "Adding package '%s'")" "$pkgfile"
+
+ db_write_entry "$pkgfile"
+}
+
+remove()
+{
+ if [[ ${1##*.} == "delta" ]]; then
+ deltafile=$1
+ msg "$(gettext "Searching for delta '%s'...")" "$deltafile"
+ if db_remove_delta "$deltafile"; then
+ return 0
+ else
+ error "$(gettext "Delta matching '%s' not found.")" "$deltafile"
+ return 1
+ fi
+ fi
+
+ pkgname=$1
+ msg "$(gettext "Searching for package '%s'...")" "$pkgname"
+
+ if db_remove_entry "$pkgname"; then
+ rm -f "$tmpdir/$pkgname.deltas"
+ return 0
+ else
+ error "$(gettext "Package matching '%s' not found.")" "$pkgname"
+ return 1
+ fi
+}
+
+trap_exit()
+{
+ echo
+ error "$@"
+ exit 1
+}
+
+clean_up() {
+ local exit_code=$?
+
+ [[ -d $tmpdir ]] && rm -rf "$tmpdir"
+ (( CLEAN_LOCK )) && [[ -f $LOCKFILE ]] && rm -f "$LOCKFILE"
+
+ exit $exit_code
+}
+
+# PROGRAM START
+
+# determine whether we have gettext; make it a no-op if we do not
+if ! type gettext &>/dev/null; then
+ gettext() {
+ echo "$@"
+ }
+fi
+
+case "$1" in
+ -h|--help) usage; exit 0;;
+ -V|--version) version; exit 0;;
+esac
+
+# figure out what program we are
+cmd="$(basename $0)"
+if [[ $cmd != "repo-add" && $cmd != "repo-remove" ]]; then
+ error "$(gettext "Invalid command name '%s' specified.")" "$cmd"
+ exit 1
+fi
+
+tmpdir=$(mktemp -d /tmp/repo-tools.XXXXXXXXXX) || (\
+ error "$(gettext "Cannot create temp directory for database building.")"; \
+ exit 1)
+
+trap 'clean_up' EXIT
+trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT
+trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT
+trap 'trap_exit "$(gettext "An unknown error has occured. Exiting...")"' ERR
+
+success=0
+# parse arguments
+for arg in "$@"; do
+ case "$arg" in
+ -q|--quiet) QUIET=1;;
+ -d|--delta) DELTA=1;;
+ -f|--files) WITHFILES=1;;
+ *)
+ if [[ -z $REPO_DB_FILE ]]; then
+ REPO_DB_FILE="$arg"
+ LOCKFILE="$REPO_DB_FILE.lck"
+ check_repo_db
+ else
+ case "$cmd" in
+ repo-add) add $arg && success=1 ;;
+ repo-remove) remove $arg && success=1 ;;
+ esac
+ fi
+ ;;
+ esac
+done
+
+# if at least one operation was a success, re-zip database
+if (( success )); then
+ msg "$(gettext "Creating updated database file '%s'")" "$REPO_DB_FILE"
+
+ case "$REPO_DB_FILE" in
+ *tar.gz) TAR_OPT="z" ;;
+ *tar.bz2) TAR_OPT="j" ;;
+ *tar.xz) TAR_OPT="J" ;;
+ *) warning "$(gettext "'%s' does not have a valid archive extension.")" \
+ "$REPO_DB_FILE" ;;
+ esac
+
+ filename=$(basename "$REPO_DB_FILE")
+
+ pushd "$tmpdir" >/dev/null
+ if [[ -n $(ls) ]]; then
+ bsdtar -c${TAR_OPT}f "$filename" *
+ else
+ # we have no packages remaining? zip up some emptyness
+ warning "$(gettext "No packages remain, creating empty database.")"
+ bsdtar -c${TAR_OPT}f "$filename" -T /dev/null
+ fi
+ popd >/dev/null
+
+ [[ -f $REPO_DB_FILE ]] && mv -f "$REPO_DB_FILE" "${REPO_DB_FILE}.old"
+ [[ -f $tmpdir/$filename ]] && mv "$tmpdir/$filename" "$REPO_DB_FILE"
+ dblink="${REPO_DB_FILE%.tar.*}"
+ target=${REPO_DB_FILE##*/}
+ ln -sf "$target" "$dblink" 2>/dev/null || \
+ ln -f "$target" "$dblink" 2>/dev/null || \
+ cp "$REPO_DB_FILE" "$dblink"
+else
+ msg "$(gettext "No packages modified, nothing to do.")"
+ exit 1
+fi
+
+exit 0
+# vim: set ts=2 sw=2 noet:
diff --git a/repo-remove b/repo-remove
new file mode 100755
index 0000000..c4bf96f
--- /dev/null
+++ b/repo-remove
@@ -0,0 +1,561 @@
+#!/bin/bash
+#
+# repo-add - add a package to a given repo database file
+# repo-remove - remove a package entry from a given repo database file
+# Generated from repo-add.in; do not edit by hand.
+#
+# Copyright (c) 2006-2008 Aaron Griffin <aaron@archlinux.org>
+# Copyright (c) 2007-2008 Dan McGee <dan@archlinux.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+LICENSESDIR=/home/parabolavnx/licenses
+
+# gettext initialization
+export TEXTDOMAIN='pacman'
+export TEXTDOMAINDIR='/usr/share/locale'
+
+myver='3.5.0'
+confdir='/home/parabolavnx/etc'
+
+QUIET=0
+DELTA=0
+WITHFILES=0
+REPO_DB_FILE=
+LOCKFILE=
+CLEAN_LOCK=0
+
+# ensure we have a sane umask set
+umask 0022
+
+msg() {
+ (( QUIET )) && return
+ local mesg=$1; shift
+ printf "==> ${mesg}\n" "$@" >&1
+}
+
+msg2() {
+ (( QUIET )) && return
+ local mesg=$1; shift
+ printf " -> ${mesg}\n" "$@" >&1
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "==> $(gettext "WARNING:") ${mesg}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "==> $(gettext "ERROR:") ${mesg}\n" "$@" >&2
+}
+
+# print usage instructions
+usage() {
+ printf "repo-add, repo-remove (pacman) %s\n\n" "$myver"
+ printf "$(gettext "Usage: repo-add [-d] [-f] [-q] <path-to-db> <package|delta> ...\n")"
+ printf "$(gettext "Usage: repo-remove [-q] <path-to-db> <packagename|delta> ...\n\n")"
+ printf "$(gettext "\
+repo-add will update a package database by reading a package file.\n\
+Multiple packages to add can be specified on the command line.\n\n")"
+ printf "$(gettext "\
+repo-remove will update a package database by removing the package name\n\
+specified on the command line from the given repo database. Multiple\n\
+packages to remove can be specified on the command line.\n\n")"
+ printf "$(gettext "\
+Use the -q/--quiet flag to minimize output to basic messages, warnings,\n\
+and errors.\n\n")"
+ printf "$(gettext "\
+Use the -d/--delta flag to automatically generate and add a delta file\n\
+between the old entry and the new one, if the old package file is found\n\
+next to the new one.\n\n")"
+ printf "$(gettext "\
+Use the -f/--files flag to update a database including file entries.\n\n")"
+ echo "$(gettext "Example: repo-add /path/to/repo.db.tar.gz pacman-3.0.0.pkg.tar.gz")"
+ echo "$(gettext "Example: repo-remove /path/to/repo.db.tar.gz kernel26")"
+}
+
+version() {
+ printf "repo-add, repo-remove (pacman) %s\n\n" "$myver"
+ printf "$(gettext "\
+Copyright (C) 2006-2008 Aaron Griffin <aaron@archlinux.org>.\n\
+Copyright (c) 2007-2008 Dan McGee <dan@archlinux.org>.\n\n\
+This is free software; see the source for copying conditions.\n\
+There is NO WARRANTY, to the extent permitted by law.\n")"
+}
+
+# write a list entry
+# arg1 - Entry name
+# arg2 - List
+# arg3 - File to write to
+write_list_entry() {
+ if [[ -n $2 ]]; then
+ echo "%$1%" >>$3
+ echo -e $2 >>$3
+ fi
+}
+
+find_pkgentry()
+{
+ local pkgname=$1
+ local pkgentry
+ for pkgentry in $tmpdir/$pkgname*; do
+ name=${pkgentry##*/}
+ if [[ ${name%-*-*} = $pkgname ]]; then
+ echo $pkgentry
+ return 0
+ fi
+ done
+ return 1
+}
+
+# Get the package name from the delta filename
+get_delta_pkgname() {
+ local tmp
+
+ tmp=${1##*/}
+ echo ${tmp%-*-*_to*}
+}
+
+# write a delta entry
+# arg1 - path to delta file
+db_write_delta()
+{
+ deltafile="$1"
+ pkgname="$(get_delta_pkgname $deltafile)"
+
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -z $pkgentry ]]; then
+ error "$(gettext "No database entry for package '%s'.")" "$pkgname"
+ return 1
+ fi
+ deltas="$pkgentry/deltas"
+ if [[ ! -f $deltas ]]; then
+ echo -e "%DELTAS%" >$deltas
+ fi
+ # get md5sum and compressed size of package
+ md5sum="$(openssl dgst -md5 "$deltafile")"
+ md5sum="${md5sum##* }"
+ csize=$(stat -L -c %s "$deltafile")
+
+ oldfile=$(xdelta3 printhdr $deltafile | grep "XDELTA filename (source)" | sed 's/.*: *//')
+ newfile=$(xdelta3 printhdr $deltafile | grep "XDELTA filename (output)" | sed 's/.*: *//')
+
+ if grep -q "$oldfile.*$newfile" $deltas; then
+ sed -i.backup "/$oldfile.*$newfile/d" $deltas && rm -f $deltas.backup
+ fi
+ msg2 "$(gettext "Adding 'deltas' entry : %s -> %s")" "$oldfile" "$newfile"
+ echo ${deltafile##*/} $md5sum $csize $oldfile $newfile >> $deltas
+
+ return 0
+} # end db_write_delta
+
+# remove a delta entry
+# arg1 - path to delta file
+db_remove_delta()
+{
+ deltafile="$1"
+ filename=${deltafile##*/}
+ pkgname="$(get_delta_pkgname $deltafile)"
+
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -z $pkgentry ]]; then
+ return 1
+ fi
+ deltas="$pkgentry/deltas"
+ if [[ ! -f $deltas ]]; then
+ return 1
+ fi
+ if grep -q "$filename" $deltas; then
+ sed -i.backup "/$filename/d" $deltas && rm -f $deltas.backup
+ msg2 "$(gettext "Removing existing entry '%s'...")" "$filename"
+ return 0
+ fi
+
+ return 1
+} # end db_remove_delta
+
+# write an entry to the pacman database
+# arg1 - path to package
+db_write_entry()
+{
+ # blank out all variables
+ local pkgfile="$1"
+ local pkgname pkgver pkgdesc csize size md5sum url arch builddate packager \
+ _groups _licenses _replaces _depends _conflicts _provides _optdepends
+
+ local OLDIFS="$IFS"
+ # IFS (field separator) is only the newline character
+ IFS="
+"
+
+ # read info from the zipped package
+ local line var val
+ for line in $(bsdtar -xOqf "$pkgfile" .PKGINFO |
+ grep -v '^#' | sed 's|\(\w*\)\s*=\s*\(.*\)|\1 \2|'); do
+ # bash awesomeness here- var is always one word, val is everything else
+ var=${line%% *}
+ val=${line#* }
+ declare $var="$val"
+ case "$var" in
+ group) _groups="$_groups$group\n" ;;
+ license) _licenses="$_licenses$license\n" ;;
+ replaces) _replaces="$_replaces$replaces\n" ;;
+ depend) _depends="$_depends$depend\n" ;;
+ conflict) _conflicts="$_conflicts$conflict\n" ;;
+ provides) _provides="$_provides$provides\n" ;;
+ optdepend) _optdepends="$_optdepends$optdepend\n" ;;
+ esac
+ done
+
+ IFS=$OLDIFS
+
+ # get md5sum and compressed size of package
+ md5sum="$(openssl dgst -md5 "$pkgfile")"
+ md5sum="${md5sum##* }"
+ csize=$(stat -L -c %s "$pkgfile")
+
+ # ensure $pkgname and $pkgver variables were found
+ if [[ -z $pkgname || -z $pkgver ]]; then
+ error "$(gettext "Invalid package file '%s'.")" "$pkgfile"
+ return 1
+ fi
+
+ pushd "$tmpdir" >/dev/null
+ if [[ -d $pkgname-$pkgver ]]; then
+ warning "$(gettext "An entry for '%s' already existed")" "$pkgname-$pkgver"
+ else
+ if (( DELTA )); then
+ pkgentry=$(find_pkgentry $pkgname)
+ if [[ -n $pkgentry ]]; then
+ local oldfilename=$(grep -A1 FILENAME $pkgentry/desc | tail -n1)
+ local oldfile="$(dirname $1)/$oldfilename"
+ fi
+ fi
+ fi
+
+ # remove an existing entry if it exists, ignore failures
+ db_remove_entry "$pkgname"
+
+ # create package directory
+ mkdir "$pkgname-$pkgver"
+ pushd "$pkgname-$pkgver" >/dev/null
+
+ # restore an eventual deltas file
+ [[ -f ../$pkgname.deltas ]] && mv "../$pkgname.deltas" deltas
+
+ # create desc entry
+ msg2 "$(gettext "Creating '%s' db entry...")" 'desc'
+ echo -e "%FILENAME%\n$(basename "$1")\n" >>desc
+ echo -e "%NAME%\n$pkgname\n" >>desc
+ [[ -n $pkgbase ]] && echo -e "%BASE%\n$pkgbase\n" >>desc
+ echo -e "%VERSION%\n$pkgver\n" >>desc
+ [[ -n $pkgdesc ]] && echo -e "%DESC%\n$pkgdesc\n" >>desc
+ write_list_entry "GROUPS" "$_groups" "desc"
+ [[ -n $csize ]] && echo -e "%CSIZE%\n$csize\n" >>desc
+ [[ -n $size ]] && echo -e "%ISIZE%\n$size\n" >>desc
+
+ # compute checksums
+ msg2 "$(gettext "Computing md5 checksums...")"
+ echo -e "%MD5SUM%\n$md5sum\n" >>desc
+
+ [[ -n $url ]] && echo -e "%URL%\n$url\n" >>desc
+ write_list_entry "LICENSE" "$_licenses" "desc"
+ [[ -n $arch ]] && echo -e "%ARCH%\n$arch\n" >>desc
+ [[ -n $builddate ]] && echo -e "%BUILDDATE%\n$builddate\n" >>desc
+ [[ -n $packager ]] && echo -e "%PACKAGER%\n$packager\n" >>desc
+ write_list_entry "REPLACES" "$_replaces" "desc"
+
+ # create depends entry
+ msg2 "$(gettext "Creating '%s' db entry...")" 'depends'
+ # create the file even if it will remain empty
+ touch "depends"
+ write_list_entry "DEPENDS" "$_depends" "depends"
+ write_list_entry "CONFLICTS" "$_conflicts" "depends"
+ write_list_entry "PROVIDES" "$_provides" "depends"
+ write_list_entry "OPTDEPENDS" "$_optdepends" "depends"
+
+ popd >/dev/null
+ popd >/dev/null
+
+ # create files file if wanted
+ if (( WITHFILES )); then
+ msg2 "$(gettext "Creating '%s' db entry...")" 'files'
+ local files_path="$tmpdir/$pkgname-$pkgver/files"
+ echo "%FILES%" >$files_path
+ bsdtar --exclude='.*' -tf "$pkgfile" >>$files_path
+ fi
+
+ # create a delta file
+ if (( DELTA )); then
+ if [[ -n $oldfilename ]]; then
+ if [[ -f $oldfile ]]; then
+ delta=$(pkgdelta -q $oldfile $1)
+ if [[ -f $delta ]]; then
+ db_write_delta $delta
+ fi
+ else
+ warning "$(gettext "Old package file not found: %s")" "$oldfilename"
+ fi
+ fi
+ fi
+
+ # Extracts licenses to a common license dir
+ msg "Extracting license"
+ if bsdtar -xOf ${pkgfile} .PKGINFO | grep "license" | grep "custom" ; then
+ if [ -d ${LICENSESDIR}/${pkgname} ]; then
+ rm -r ${LICENSESDIR}/${pkgname}
+ fi
+
+ # Change dir to licenses, and extract them stripping the first part of the path
+ bsdtar -C ${LICENSESDIR}/ --include="usr/share/licenses/" \
+ --strip-components 3 -xf ${pkgfile} >/dev/null 2>&1
+
+ if [ $? -ne 0 ]; then
+ warning "This package doesn't contain a license dir"
+ fi
+ fi
+
+ return 0
+} # end db_write_entry
+
+# remove existing entries from the DB
+# arg1 - package name
+db_remove_entry() {
+ local pkgname=$1
+ local notfound=1
+ local pkgentry=$(find_pkgentry $pkgname)
+ while [[ -n $pkgentry ]]; do
+ notfound=0
+ if [[ -f $pkgentry/deltas ]]; then
+ mv "$pkgentry/deltas" "$tmpdir/$pkgname.deltas"
+ fi
+ msg2 "$(gettext "Removing existing entry '%s'...")" \
+ "$(basename $pkgentry)"
+ rm -rf $pkgentry
+ pkgentry=$(find_pkgentry $pkgname)
+ done
+
+ msg "Removing license"
+ if [ -d ${LICENSESDIR}/${pkgname} ]; then
+ rm -r ${LICENSESDIR}/${pkgname}
+ fi
+
+ return $notfound
+} # end db_remove_entry
+
+check_repo_db()
+{
+ # check lock file
+ if ( set -o noclobber; echo "$$" > "$LOCKFILE") 2> /dev/null; then
+ CLEAN_LOCK=1
+ else
+ error "$(gettext "Failed to acquire lockfile: %s.")" "$LOCKFILE"
+ [[ -f $LOCKFILE ]] && error "$(gettext "Held by process %s")" "$(cat $LOCKFILE)"
+ exit 1
+ fi
+
+ if [[ -f $REPO_DB_FILE ]]; then
+ # there are two situations we can have here- a DB with some entries,
+ # or a DB with no contents at all.
+ if ! bsdtar -tqf "$REPO_DB_FILE" '*/desc' >/dev/null 2>&1; then
+ # check empty case
+ if [[ -n $(bsdtar -tqf "$REPO_DB_FILE" '*' 2>/dev/null) ]]; then
+ error "$(gettext "Repository file '%s' is not a proper pacman database.")" "$REPO_DB_FILE"
+ exit 1
+ fi
+ fi
+ msg "$(gettext "Extracting database to a temporary location...")"
+ bsdtar -xf "$REPO_DB_FILE" -C "$tmpdir"
+ else
+ case "$cmd" in
+ repo-remove)
+ error "$(gettext "Repository file '%s' was not found.")" "$REPO_DB_FILE"
+ exit 1
+ ;;
+ repo-add)
+ # check if the file can be created (write permission, directory existence, etc)
+ if ! touch "$REPO_DB_FILE"; then
+ error "$(gettext "Repository file '%s' could not be created.")" "$REPO_DB_FILE"
+ exit 1
+ fi
+ rm -f "$REPO_DB_FILE"
+ ;;
+ esac
+ fi
+}
+
+add()
+{
+ if [[ ! -f $1 ]]; then
+ error "$(gettext "File '%s' not found.")" "$1"
+ return 1
+ fi
+
+ if [[ ${1##*.} == "delta" ]]; then
+ deltafile=$1
+ msg "$(gettext "Adding delta '%s'")" "$deltafile"
+ if ! type xdelta3 &>/dev/null; then
+ error "$(gettext "Cannot find the xdelta3 binary! Is xdelta3 installed?")"
+ exit 1
+ fi
+ if db_write_delta "$deltafile"; then
+ return 0
+ else
+ return 1
+ fi
+ fi
+
+ pkgfile=$1
+ if ! bsdtar -tqf "$pkgfile" .PKGINFO >/dev/null 2>&1; then
+ error "$(gettext "'%s' is not a package file, skipping")" "$pkgfile"
+ return 1
+ fi
+
+ msg "$(gettext "Adding package '%s'")" "$pkgfile"
+
+ db_write_entry "$pkgfile"
+}
+
+remove()
+{
+ if [[ ${1##*.} == "delta" ]]; then
+ deltafile=$1
+ msg "$(gettext "Searching for delta '%s'...")" "$deltafile"
+ if db_remove_delta "$deltafile"; then
+ return 0
+ else
+ error "$(gettext "Delta matching '%s' not found.")" "$deltafile"
+ return 1
+ fi
+ fi
+
+ pkgname=$1
+ msg "$(gettext "Searching for package '%s'...")" "$pkgname"
+
+ if db_remove_entry "$pkgname"; then
+ rm -f "$tmpdir/$pkgname.deltas"
+ return 0
+ else
+ error "$(gettext "Package matching '%s' not found.")" "$pkgname"
+ return 1
+ fi
+}
+
+trap_exit()
+{
+ echo
+ error "$@"
+ exit 1
+}
+
+clean_up() {
+ local exit_code=$?
+
+ [[ -d $tmpdir ]] && rm -rf "$tmpdir"
+ (( CLEAN_LOCK )) && [[ -f $LOCKFILE ]] && rm -f "$LOCKFILE"
+
+ exit $exit_code
+}
+
+# PROGRAM START
+
+# determine whether we have gettext; make it a no-op if we do not
+if ! type gettext &>/dev/null; then
+ gettext() {
+ echo "$@"
+ }
+fi
+
+case "$1" in
+ -h|--help) usage; exit 0;;
+ -V|--version) version; exit 0;;
+esac
+
+# figure out what program we are
+cmd="$(basename $0)"
+if [[ $cmd != "repo-add" && $cmd != "repo-remove" ]]; then
+ error "$(gettext "Invalid command name '%s' specified.")" "$cmd"
+ exit 1
+fi
+
+tmpdir=$(mktemp -d /tmp/repo-tools.XXXXXXXXXX) || (\
+ error "$(gettext "Cannot create temp directory for database building.")"; \
+ exit 1)
+
+trap 'clean_up' EXIT
+trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT
+trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT
+trap 'trap_exit "$(gettext "An unknown error has occured. Exiting...")"' ERR
+
+success=0
+# parse arguments
+for arg in "$@"; do
+ case "$arg" in
+ -q|--quiet) QUIET=1;;
+ -d|--delta) DELTA=1;;
+ -f|--files) WITHFILES=1;;
+ *)
+ if [[ -z $REPO_DB_FILE ]]; then
+ REPO_DB_FILE="$arg"
+ LOCKFILE="$REPO_DB_FILE.lck"
+ check_repo_db
+ else
+ case "$cmd" in
+ repo-add) add $arg && success=1 ;;
+ repo-remove) remove $arg && success=1 ;;
+ esac
+ fi
+ ;;
+ esac
+done
+
+# if at least one operation was a success, re-zip database
+if (( success )); then
+ msg "$(gettext "Creating updated database file '%s'")" "$REPO_DB_FILE"
+
+ case "$REPO_DB_FILE" in
+ *tar.gz) TAR_OPT="z" ;;
+ *tar.bz2) TAR_OPT="j" ;;
+ *tar.xz) TAR_OPT="J" ;;
+ *) warning "$(gettext "'%s' does not have a valid archive extension.")" \
+ "$REPO_DB_FILE" ;;
+ esac
+
+ filename=$(basename "$REPO_DB_FILE")
+
+ pushd "$tmpdir" >/dev/null
+ if [[ -n $(ls) ]]; then
+ bsdtar -c${TAR_OPT}f "$filename" *
+ else
+ # we have no packages remaining? zip up some emptyness
+ warning "$(gettext "No packages remain, creating empty database.")"
+ bsdtar -c${TAR_OPT}f "$filename" -T /dev/null
+ fi
+ popd >/dev/null
+
+ [[ -f $REPO_DB_FILE ]] && mv -f "$REPO_DB_FILE" "${REPO_DB_FILE}.old"
+ [[ -f $tmpdir/$filename ]] && mv "$tmpdir/$filename" "$REPO_DB_FILE"
+ dblink="${REPO_DB_FILE%.tar.*}"
+ target=${REPO_DB_FILE##*/}
+ ln -sf "$target" "$dblink" 2>/dev/null || \
+ ln -f "$target" "$dblink" 2>/dev/null || \
+ cp "$REPO_DB_FILE" "$dblink"
+else
+ msg "$(gettext "No packages modified, nothing to do.")"
+ exit 1
+fi
+
+exit 0
+# vim: set ts=2 sw=2 noet:
diff --git a/repo-restore-to-normal b/repo-restore-to-normal
new file mode 100755
index 0000000..9463731
--- /dev/null
+++ b/repo-restore-to-normal
@@ -0,0 +1,58 @@
+#!/bin/bash
+# Solves issue165
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+CLEANUP_DESTDIR=/home/parabolavnx/repo/pool/restore
+PKGREPOS=(community)
+
+# Find all pkgnames on old with pkgver-pkgrels
+#on_repo=($(find ${CLEANUP_DESTDIR} -name "*.pkg.tar.?z" -printf "%f\n" | \
+# sed "s/^\(.\+-[^-]\+-[^-]\+\)-[^-]\+$/\1/"))
+
+# Traverse all repos
+for _repo in ${PKGREPOS[@]}; do
+ msg "Restoring [${_repo}]"
+
+# Find all pkgnames on this repo's abs
+ on_abs=($(
+ find ${SVNREPO}/${_repo} -name PKGBUILD | \
+ while read pkgbuild; do
+ unset pkgname pkgver pkgrel
+ source ${pkgbuild} >/dev/null 2>&1
+# cleanup to save memory
+ unset build package source md5sums pkgdesc epoch \
+ url license arch depends makedepends optdepends options \
+ >/dev/null 2>&1
+
+# also cleanup package functions
+ for _pkg in ${pkgname[@]}; do
+ unset package_${pkg} >/dev/null 2>&1
+# this fills the on_abs array
+ echo ${_pkg}-${pkgver}-${pkgrel}
+ done
+
+ done
+ ))
+
+# quit if abs is empty
+ if [ ${#on_abs[*]} -eq 0 ]; then
+ warning "[${_repo}]'s ABS tree is empty, skipping"
+ continue
+ fi
+
+# Compares them, whatever is on abs should be restored
+# restore=($(comm -12 <(echo ${on_abs[@]} | tr ' ' "\n" | sort -u) \
+# <(echo ${on_repo[@]} | tr ' ' "\n" | sort -u)))
+
+ msg2 "Restoring the following packages:"
+# plain "$(echo ${restore[@]} | tr ' ' "\n")"
+
+ for _pkg in ${on_abs[@]}; do
+ find ${CLEANUP_DESTDIR} -name "${_pkg}*" -exec cp -v '{}' ${STAGING}/${_repo} \;
+ done
+
+done
+
+exit $?
diff --git a/repo-update b/repo-update
new file mode 100755
index 0000000..a44ae87
--- /dev/null
+++ b/repo-update
@@ -0,0 +1,55 @@
+#!/bin/bash
+# -*- coding: utf-8 -*-
+source ~/.bashrc
+source $(dirname $0)/config
+source $(dirname $0)/local_config
+source $(dirname $0)/libremessages
+
+for repo in ${ARCHREPOS[@]}; do
+ msg "Syncing ${repo}"
+ for arch in ${ARCHARCHES[@]} 'any'; do
+ msg2 "${repo} ${arch}"
+ # makes a file containing rsync output for filter.py
+ ${rsync_list_command} \
+ rsync://${mirror}/${mirrorpath}/${repo}/os/${arch}/ \
+ ${repodir}/staging/${repo}/ > ${rsout_file}
+ # reads blacklist and rsout_file and makes an rsync exclude-from
+ # list
+ filter.py -r ${rsync_blacklist} -k ${blacklist} \
+ -f ${rsout_file}
+ # list files in ${repodir}/${repo} and write their names on
+ # rsync_not_needed for using as an rsync exclude-from
+ find ${repodir}/${repo} -name "*${PKGEXT}" \
+ -fprintf ${rsync_not_needed} '%f\n'
+ # Actual rsync command
+ ${rsync_update_command} \
+ --exclude-from=${rsync_blacklist} \
+ --exclude-from=${rsync_not_needed} \
+ rsync://${mirror}/${mirrorpath}/${repo}/os/${arch}/ \
+ ${repodir}/staging/${repo}/
+ done
+ for arch in ${ARCHARCHES[@]}; do
+ msg2 "Making pending list for $repo $arch"
+ # if there is a db in repo (db is created on rsync)
+ if [ -r ${repodir}/staging/${repo}/os/${arch}/${repo}${DBEXT} ]; then
+ # clean_repo makes pending list with files on db and remove
+ # packages from db
+ $(dirname $0)/clean_repo.py -k ${blacklist} -w ${whitelist} \
+ -p ${docs_dir}/pending-${repo}.txt \
+ -b ${repodir}/staging/${repo}/${repo}${DBEXT} \
+ -d ${repodir}/stagging/${repo}
+ fi
+ done
+ # if some nonfree files got pass the filter this command delete them
+ msg2 "Fallback cleaning repo"
+ $(dirname $0)/clean_repo.py -k ${blacklist} -d ${repodir}/staging/${repo}
+done
+
+msg "Removing leftover files..."
+find ${repodir}/staging/ -type f \! -name "*${PKGEXT}" -delete
+# Staging should not have symbolic links
+find ${repodir}/staging/ -type l -delete
+
+$(dirname $0)/db-update
+$(dirname $0)/db-check-nonfree
+
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/__init__.py
diff --git a/test/blacklist_sample b/test/blacklist_sample
new file mode 100644
index 0000000..2a02af6
--- /dev/null
+++ b/test/blacklist_sample
@@ -0,0 +1,2 @@
+alex:alex-libre: Aquí va un comentario
+gmime22 ::Non free dependencies \ No newline at end of file
diff --git a/test/core.db.tar.gz b/test/core.db.tar.gz
new file mode 100644
index 0000000..5eb2081
--- /dev/null
+++ b/test/core.db.tar.gz
Binary files differ
diff --git a/test/depends b/test/depends
new file mode 100644
index 0000000..7ff3ad4
--- /dev/null
+++ b/test/depends
@@ -0,0 +1,4 @@
+%DEPENDS%
+glibc>=2.13
+zlib
+
diff --git a/test/desc b/test/desc
new file mode 100644
index 0000000..abba644
--- /dev/null
+++ b/test/desc
@@ -0,0 +1,39 @@
+%FILENAME%
+binutils-2.21-4-x86_64.pkg.tar.xz
+
+%NAME%
+binutils
+
+%VERSION%
+2.21-4
+
+%DESC%
+A set of programs to assemble and manipulate binary and object files
+
+%GROUPS%
+base
+
+%CSIZE%
+3412892
+
+%ISIZE%
+17571840
+
+%MD5SUM%
+4e666f87c78998f4839f33dc06d2043a
+
+%URL%
+http://www.gnu.org/software/binutils/
+
+%LICENSE%
+GPL
+
+%ARCH%
+x86_64
+
+%BUILDDATE%
+1297240369
+
+%PACKAGER%
+Allan McRae <allan@archlinux.org>
+
diff --git a/test/rsync_output_sample b/test/rsync_output_sample
new file mode 100644
index 0000000..72d9cd0
--- /dev/null
+++ b/test/rsync_output_sample
@@ -0,0 +1,14 @@
+dr-xr-sr-x 4096 2010/09/11 11:37:10 .
+-rw-r--r-- 11 2011/02/08 00:00:01 lastsync
+drwxrwxr-x 15 2010/09/11 11:28:50 community-staging
+drwxrwxr-x 30 2010/09/11 11:28:50 community-staging/os
+drwxrwxr-x 8192 2011/02/07 17:00:01 community-staging/os/i686
+lrwxrwxrwx 52 2010/12/23 16:51:01 community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz -> ../../../pool/community/alex-2.3.4-1-i686.pkg.tar.xz
+lrwxrwxrwx 27 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db -> community-staging.db.tar.gz
+-rw-rw-r-- 2237 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db.tar.gz
+-rw-rw-r-- 3209 2011/02/07 14:00:13 community-staging/os/i686/community-staging.db.tar.gz.old
+drwxrwxr-x 15 2009/07/22 15:07:56 community
+drwxrwxr-x 40 2009/08/04 15:57:42 community/os
+drwxrwsr-x 36864 2011/02/03 05:00:01 community/os/any
+-rw-rw-r-- 303336 2010/07/16 10:06:28 community/os/any/any2dvd-0.34-4-any.pkg.tar.xz
+-rw-rw-r-- 221664 2010/03/28 15:55:48 community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz
diff --git a/test/test_filter.py b/test/test_filter.py
new file mode 100644
index 0000000..d8006f9
--- /dev/null
+++ b/test/test_filter.py
@@ -0,0 +1,196 @@
+# -*- encoding: utf-8 -*-
+""" """
+
+__author__ = "Joshua Ismael Haase Hernández <hahj87@gmail.com>"
+__version__ = "$Revision: 1.1 $"
+__date__ = "$Date: 2011/02/08 $"
+__copyright__ = "Copyright (c) 2011 Joshua Ismael Haase Hernández"
+__license__ = "GPL3+"
+
+from repm.config import *
+from repm.filter import *
+import unittest
+
+class pkginfo_from_file_KnownValues(unittest.TestCase):
+ # (filename, name, version, release, arch)
+ # filename is location
+ known=(
+ ("community-testing/os/i686/inputattach-1.24-3-i686.pkg.tar.xz","inputattach","1.24","3","i686"),
+ ("community-testing/os/i686/ngspice-22-1-i686.pkg.tar.xz","ngspice","22","1","i686"),
+ ("community-testing/os/i686/tmux-1.4-2-i686.pkg.tar.xz","tmux","1.4","2","i686"),
+ ("community-testing/os/i686/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("../../../pool/community/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("community-testing/os/x86_64/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("../../../pool/community/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("tor-0.2.1.29-2-x86_64.pkg.tar.xz","tor","0.2.1.29","2","x86_64"),
+ )
+
+ def generate_results(self, example_tuple, attr):
+ location, name, version, release, arch = example_tuple
+ return pkginfo_from_filename(location)[attr], locals()[attr]
+
+ def testReturnPackageObject(self):
+ for i in self.known:
+ location, name, version, release, arch = i
+ self.assertIsInstance(pkginfo_from_filename(location),Package)
+
+ def testNames(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="name")
+ self.assertEqual(k, v)
+
+ def testVersions(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="version")
+ self.assertEqual(k, v)
+
+ def testArchs(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="arch")
+ self.assertEqual(k, v)
+
+ def testReleases(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="release")
+ self.assertEqual(k, v)
+
+ def testLocations(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="location")
+ self.assertEqual(k, v)
+
+class pkginfo_from_file_BadInput(unittest.TestCase):
+ bad=("community-testing/os/i686/community-testing.db",
+ "community-testing/os/i686/community-testing.db.tar.gz",
+ "community-testing/os/i686/community-testing.db.tar.gz.old",
+ "community-testing/os/i686/community-testing.files",
+ "community-testing/os/i686/community-testing.files.tar.gz",
+ "community-testing/os/x86_64")
+
+ def testBadInput(self):
+ for i in self.bad:
+ self.assertRaises(NonValidFile,pkginfo_from_filename,i)
+
+class pkginfoFromRsyncOutput(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ try:
+ output_file = open("rsync_output_sample")
+ rsync_out= output_file.read()
+ output_file.close()
+ except IOError: print("There is no rsync_output_sample file")
+
+ pkglist = pkginfo_from_rsync_output(rsync_out)
+
+ def testOutputArePackages(self):
+ if not self.pkglist:
+ self.fail("not pkglist:" + str(self.pkglist))
+ for pkg in self.pkglist:
+ self.assertIsInstance(pkg,Package)
+
+ def testPackageInfo(self):
+ if not self.pkglist:
+ self.fail("Pkglist doesn't exist: " + str(self.pkglist))
+ self.assertEqual(self.pkglist,self.example_package_list)
+
+class generateRsyncBlacklist(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ def testListado(self):
+ self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
+
+ def testExcludeFiles(self):
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"),
+ False)
+ b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
+ self.assertEqual(a,b)
+
+class pkginfo_from_descKnownValues(unittest.TestCase):
+ pkgsample=Package()
+ pkgsample.package_info={"name" : "binutils",
+ "version" : "2.21",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : "GPL",
+ "location": "binutils-2.21-4-x86_64.pkg.tar.xz",
+ "depends" : False,}
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
+ def testPkginfoFromDesc(self):
+ if self.pkggen is None:
+ self.fail("return value is None")
+ self.assertEqual(self.pkgsample,self.pkggen)
+
+class pkginfo_from_db(unittest.TestCase):
+ archdb = os.path.join("./workdir")
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "acl",
+ "version" : "2.2.49",
+ "release" : "2",
+ "arch" : "x86_64",
+ "license" : ("LGPL",),
+ "location": "acl-2.2.49-2-x86_64.pkg.tar.xz",
+ "depends" : ("attr>=2.4.41"),}
+ example_package_list[1].package_info={ "name" : "glibc",
+ "version" : "2.13",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : ("GPL","LGPL"),
+ "location": "glibc-2.13-4-x86_64.pkg.tar.xz",
+ "depends" : ("linux-api-headers>=2.6.37","tzdata",),}
+ example_package_list[2].package_info={ "name" : "",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "",
+ "depends" : False,}
+
+
+if __name__ == "__main__":
+ unittest.main()
+
diff --git a/yf-update b/yf-update
new file mode 100755
index 0000000..9c2131e
--- /dev/null
+++ b/yf-update
@@ -0,0 +1,18 @@
+#!/bin/bash
+source $(dirname $0)/local_config
+source $(dirname $0)/config
+source $(dirname $0)/libremessages
+
+blacklist_mtime=$(printf "%.0f" $(find ${blacklist} -printf "%T@"))
+last_bl_mtime=$(cat $(dirname $0)/yftime)
+
+if [ $blacklist_mtime -gt $last_bl_mtime ]; then
+ pushd $(dirname $0)/yf
+ makepkg -f
+ find . -name "*${PKGEXT}" -exec mv {} ${STAGING}/libre \;
+ popd
+ echo ${blacklist_mtime} > $(dirname $0)/yftime
+ msg2 "built and staged"
+else
+ msg2 "nothing to do"
+fi
diff --git a/yf/PKGBUILD b/yf/PKGBUILD
new file mode 100644
index 0000000..cc6f07b
--- /dev/null
+++ b/yf/PKGBUILD
@@ -0,0 +1,28 @@
+# Maintainer: Parabola Project <dev@list.parabolagnulinux.org>
+pkgname=your-freedom
+pkgver=$(LC_ALL=C date -u +%Y%m%d)
+pkgrel=1
+pkgdesc="This package conflicts with every unfree package known to date."
+arch=('any')
+url="https://parabolagnulinux.org"
+license=('GPL')
+groups=('base')
+install=${pkgname}.install
+source=()
+md5sums=()
+noextract=()
+
+build() {
+ cd ${srcdir}
+ source ~/repm/local_config
+ install -d ${pkgdir}/usr/share/doc/${pkgname}
+ install -m644 $blacklist $whitelist ${pkgdir}/usr/share/doc/${pkgname}/
+}
+
+package() {
+ conflicts=($(cut -d: -f1,2 ${pkgdir}/usr/share/doc/${pkgname}/blacklist.txt | \
+ sed "s/:$//" | \
+ grep -v ":" | \
+ sort -u
+ ))
+}
diff --git a/yf/your-freedom.install b/yf/your-freedom.install
new file mode 100644
index 0000000..49ae045
--- /dev/null
+++ b/yf/your-freedom.install
@@ -0,0 +1,32 @@
+
+pre_install() {
+ cat <<EOM
+ == IMPORTANT NOTICE ==
+
+ This package will help you identify unfree packages that are installed
+ on your system at the time of it's installation, as well as protecting
+ you from (accidentally) installing them.
+
+ Also, if any other unfree package is identified, later updates will ask
+ you for it's removal.
+
+ Have in mind that, if you want to retain certain unfree packages
+ installed on your system, you'll have to remove your-freedom :)
+
+ If you find out any of the following:
+ * Cascade package removal due to unfree dependencies,
+ * Your-freedom conflicting with a *-libre package,
+ * Etc.
+
+ Please report back to the Parabola Project on the usual channels:
+ * http://bugs.parabolagnulinux.org
+ * irc://freenode.net/#parabola
+ * mailto:dev@list.parabolagnulinux.org
+EOM
+}
+
+pre_upgrade() {
+ pre_install
+}
+
+# vim:set ts=2 sw=2 et: