summaryrefslogtreecommitdiff
path: root/staging
diff options
context:
space:
mode:
Diffstat (limited to 'staging')
-rw-r--r--staging/alpine/2.00-lpam.patch13
-rw-r--r--staging/alpine/CVE-2008-5514.patch20
-rw-r--r--staging/alpine/PKGBUILD44
-rw-r--r--staging/anjuta/PKGBUILD35
-rw-r--r--staging/anjuta/anjuta.install21
-rw-r--r--staging/cifs-utils/PKGBUILD26
-rw-r--r--staging/cups/PKGBUILD110
-rwxr-xr-xstaging/cups/cups38
-rw-r--r--staging/cups/cups-avahi.patch1089
-rw-r--r--staging/cups/cups.install15
-rw-r--r--staging/cups/cups.logrotate8
-rw-r--r--staging/cups/cups.pam3
-rw-r--r--staging/cvs/PKGBUILD39
-rw-r--r--staging/cvs/cvs-1.11.23-cve-2010-3846.patch167
-rw-r--r--staging/cvs/cvs-1.11.23-getline64.patch34
-rw-r--r--staging/cvs/cvs.install20
-rw-r--r--staging/cyrus-sasl-plugins/PKGBUILD52
-rw-r--r--staging/cyrus-sasl-plugins/cyrus-sasl-2.1.22-gcc44.patch24
-rw-r--r--staging/cyrus-sasl-plugins/cyrus-sasl-2.1.23-db5-fix.patch23
-rw-r--r--staging/cyrus-sasl/PKGBUILD53
-rw-r--r--staging/cyrus-sasl/cyrus-sasl-2.1.23+db-5.0.patch24
-rw-r--r--staging/cyrus-sasl/cyrus-sasl-2.1.23-gcc4.patch21
-rw-r--r--staging/cyrus-sasl/saslauthd50
-rw-r--r--staging/cyrus-sasl/saslauthd.conf.d1
-rw-r--r--staging/dovecot/PKGBUILD74
-rw-r--r--staging/dovecot/dovecot.install57
-rwxr-xr-xstaging/dovecot/dovecot.sh36
-rw-r--r--staging/evolution-data-server/PKGBUILD29
-rw-r--r--staging/evolution-exchange/PKGBUILD33
-rw-r--r--staging/evolution-exchange/evolution-exchange.install17
-rw-r--r--staging/gnome-control-center/PKGBUILD34
-rw-r--r--staging/gnome-control-center/gnome-control-center.install19
-rw-r--r--staging/gnome-settings-daemon/PKGBUILD33
-rw-r--r--staging/gnome-settings-daemon/gnome-settings-daemon.install18
-rw-r--r--staging/gnome-vfs/PKGBUILD45
-rw-r--r--staging/gnome-vfs/gnome-vfs.install17
-rw-r--r--staging/gnome-vfs/gnutls-config.patch25
-rw-r--r--staging/gtk2/PKGBUILD50
-rw-r--r--staging/gtk2/gtk2.install16
-rw-r--r--staging/gtk2/xid-collision-debug.patch15
-rw-r--r--staging/gtk3/PKGBUILD36
-rw-r--r--staging/gtk3/gtk3.install15
-rw-r--r--staging/gtk3/settings.ini2
-rw-r--r--staging/kdelibs/PKGBUILD62
-rw-r--r--staging/kdelibs/abs-syntax-highlight.patch12
-rw-r--r--staging/kdelibs/archlinux-menu.patch22
-rw-r--r--staging/kdelibs/kde-applications-menu.patch22
-rw-r--r--staging/kdelibs/kdelibs.install12
-rw-r--r--staging/krb5/CVE-2010-4022.patch19
-rw-r--r--staging/krb5/CVE-2011-0281.0282.0283.patch126
-rw-r--r--staging/krb5/CVE-2011-0284.patch13
-rw-r--r--staging/krb5/CVE-2011-0285.patch39
-rw-r--r--staging/krb5/PKGBUILD64
-rw-r--r--staging/krb5/kadmind.rc40
-rw-r--r--staging/krb5/krb5-kdc.rc40
-rw-r--r--staging/libgnomecups/PKGBUILD31
-rw-r--r--staging/libgnomecups/libgnomecups_0.2.3-ignore-ipp-not-found.patch11
-rw-r--r--staging/libgnomeprint/PKGBUILD28
-rw-r--r--staging/librpcsecgss/PKGBUILD27
-rw-r--r--staging/librpcsecgss/librpcsecgss-0.18-heimdal.patch33
-rw-r--r--staging/libtirpc/PKGBUILD32
-rw-r--r--staging/libtirpc/libtirpc-0.2.1-fortify.patch18
-rw-r--r--staging/mutt/PKGBUILD47
-rw-r--r--staging/mutt/install8
-rw-r--r--staging/neon/PKGBUILD29
-rw-r--r--staging/nss_ldap/PKGBUILD33
-rw-r--r--staging/openssh/PKGBUILD56
-rwxr-xr-xstaging/openssh/sshd48
-rw-r--r--staging/openssh/sshd.confd4
-rw-r--r--staging/openssh/sshd.pam10
-rw-r--r--staging/samba/PKGBUILD139
-rw-r--r--staging/samba/fix-ipv6-mount.patch11
-rwxr-xr-xstaging/samba/samba54
-rw-r--r--staging/samba/samba.conf.d7
-rw-r--r--staging/samba/samba.logrotate9
-rw-r--r--staging/samba/samba.pam3
-rw-r--r--staging/samba/swat.xinetd10
-rw-r--r--staging/subversion/PKGBUILD92
-rw-r--r--staging/subversion/subversion.rpath.fix.patch10
-rw-r--r--staging/subversion/subversion.suppress.deprecation.warnings.patch22
-rw-r--r--staging/subversion/svn11
-rw-r--r--staging/subversion/svnmerge.py2370
-rwxr-xr-xstaging/subversion/svnserve42
-rw-r--r--staging/subversion/svnserve.conf7
-rw-r--r--staging/wireshark/PKGBUILD54
-rw-r--r--staging/wireshark/wireshark-gtk.install11
-rw-r--r--staging/wireshark/wireshark.install18
-rw-r--r--staging/xfprint/PKGBUILD35
-rw-r--r--staging/xfprint/xfprint-manager-fix.diff29
-rw-r--r--staging/xfprint/xfprint.install11
90 files changed, 0 insertions, 6332 deletions
diff --git a/staging/alpine/2.00-lpam.patch b/staging/alpine/2.00-lpam.patch
deleted file mode 100644
index 69e66d7ef..000000000
--- a/staging/alpine/2.00-lpam.patch
+++ /dev/null
@@ -1,13 +0,0 @@
---- alpine-2.00.orig/alpine/Makefile.in
-+++ alpine-2.00/alpine/Makefile.in
-@@ -169,7 +169,7 @@
- LIBICONV = @LIBICONV@
- LIBINTL = @LIBINTL@
- LIBOBJS = @LIBOBJS@
--LIBS = @LIBS@
-+LIBS = @LIBS@ -lpam
- LIBTOOL = @LIBTOOL@
- LN = @LN@
- LN_S = @LN_S@
- LTLIBICONV = @LTLIBICONV@
- LTLIBINTL = @LTLIBINTL@
diff --git a/staging/alpine/CVE-2008-5514.patch b/staging/alpine/CVE-2008-5514.patch
deleted file mode 100644
index 594bea0b2..000000000
--- a/staging/alpine/CVE-2008-5514.patch
+++ /dev/null
@@ -1,20 +0,0 @@
---- alpine-2.00/imap/src/c-client/rfc822.c
-+++ alpine-2.00/imap/src/c-client/rfc822.c
-@@ -1351,6 +1351,7 @@
-
- static long rfc822_output_char (RFC822BUFFER *buf,int c)
- {
-+ if ((buf->cur == buf->end) && !rfc822_output_flush (buf)) return NIL;
- *buf->cur++ = c; /* add character, soutr buffer if full */
- return (buf->cur == buf->end) ? rfc822_output_flush (buf) : LONGT;
- }
-@@ -1374,7 +1375,8 @@
- len -= i;
- }
- /* soutr buffer now if full */
-- if (len && !rfc822_output_flush (buf)) return NIL;
-+ if ((len || (buf->cur == buf->end)) && !rfc822_output_flush (buf))
-+ return NIL;
- }
- return LONGT;
- }
diff --git a/staging/alpine/PKGBUILD b/staging/alpine/PKGBUILD
deleted file mode 100644
index 6b2f9ee73..000000000
--- a/staging/alpine/PKGBUILD
+++ /dev/null
@@ -1,44 +0,0 @@
-# $Id: PKGBUILD 121113 2011-04-29 02:56:31Z eric $
-# Maintainer: Eric BĂ©langer <eric@archlinux.org>
-
-pkgname=alpine
-pkgver=2.00
-pkgrel=13
-_patchlevel=79
-pkgdesc="The Apache-licensed PINE (a tool for reading, sending, and managing electronic messages)"
-arch=('i686' 'x86_64')
-url="http://www.washington.edu/alpine/"
-license=('APACHE')
-depends=('libldap' 'krb5' 'gettext')
-optdepends=('aspell: for spell-checking support')
-provides=('pine')
-conflicts=('pine')
-replaces=('pine')
-options=('!makeflags')
-source=(ftp://ftp.cac.washington.edu/${pkgname}/${pkgname}.tar.bz2 \
- http://staff.washington.edu/chappa/alpine/patches/alpine-${pkgver}/all_${_patchlevel}.patch.gz \
- 2.00-lpam.patch CVE-2008-5514.patch)
-md5sums=('84e44cbf71ed674800a5d57eed9c1c52'
- 'd7dffd121c9a1cac4c458c0ff71df1ce'
- 'cd3911c16fc6a072e853c0ccfc35857c'
- '1b52a54a656979116c09fb1d948a4325')
-sha1sums=('dcbd3c5419954f484ccf706feaba31ce48cdebc4'
- 'd3acbf0e46c50feb2e822ef3bdc0a0f43c007294'
- '1b39525f91ebd5a9de5a1e04f5554f6fa5f58ae3'
- 'bc61d76a237ff42b00b3f60f2e6fc5c45e261dbb')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- patch -p1 < ../all_${_patchlevel}.patch
- patch -p1 < ../2.00-lpam.patch
- patch -p1 < ../CVE-2008-5514.patch
- ./configure --prefix=/usr --without-passfile --without-tcl \
- --disable-shared --with-system-pinerc=/etc/alpine.d/pine.conf \
- --with-system-fixed-pinerc=/etc/alpine.d/pine.conf.fixed
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/anjuta/PKGBUILD b/staging/anjuta/PKGBUILD
deleted file mode 100644
index 98a1c6730..000000000
--- a/staging/anjuta/PKGBUILD
+++ /dev/null
@@ -1,35 +0,0 @@
-# $Id: PKGBUILD 122589 2011-05-04 18:41:26Z ibiru $
-# Maintainer: Andreas Radke <andyrtr@archlinux.org>
-# Contributor: Harley Laue <losinggeneration@yahoo.com>
-
-pkgname=anjuta
-pkgver=3.0.1.0
-pkgrel=2
-pkgdesc="GNOME Integrated Development Environment (IDE)"
-arch=('i686' 'x86_64')
-license=('GPL')
-depends=('vte3' 'gdl' 'autogen' 'devhelp' 'gtksourceview3' 'glade' 'libgda'
- 'subversion' 'gnome-icon-theme' 'hicolor-icon-theme' 'vala' 'graphviz'
- 'python2' 'shared-mime-info' 'desktop-file-utils')
-makedepends=('gnome-doc-utils' 'intltool' 'gobject-introspection')
-url="http://anjuta.sourceforge.net/"
-install=anjuta.install
-source=(ftp://ftp.gnome.org/pub/gnome/sources/${pkgname}/${pkgver%.*.*}/${pkgname}-${pkgver}.tar.bz2)
-options=('!libtool' '!emptydirs')
-sha256sums=('dbda189f453e0f9baecb35e85d59df92613668f7f4be08207d635cff3abb7261')
-
-build() {
- cd "${srcdir}/anjuta-${pkgver}"
- PYTHON=/usr/bin/python2 ./configure --prefix=/usr --sysconfdir=/etc \
- --localstatedir=/var --disable-scrollkeeper
- make
-}
-
-package(){
- cd "${srcdir}/anjuta-${pkgver}"
-
- make DESTDIR="${pkgdir}" install
-
- sed -i "1s|#!/usr/bin/python$|&2|" \
- $pkgdir/usr/share/anjuta/project/{pygtk,python}/src/main.py
-}
diff --git a/staging/anjuta/anjuta.install b/staging/anjuta/anjuta.install
deleted file mode 100644
index c6fcfec11..000000000
--- a/staging/anjuta/anjuta.install
+++ /dev/null
@@ -1,21 +0,0 @@
-post_install() {
- usr/bin/glib-compile-schemas usr/share/glib-2.0/schemas
- update-desktop-database -q
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
- gtk-update-icon-cache -q -t -f usr/share/icons/gnome
- update-mime-database usr/share/mime > /dev/null
-}
-
-pre_upgrade() {
- if (( $(vercmp $2 2.90.0) < 0 )); then
- usr/sbin/gconfpkg --uninstall anjuta
- fi
-}
-
-post_upgrade() {
- post_install
-}
-
-post_remove() {
- post_install
-}
diff --git a/staging/cifs-utils/PKGBUILD b/staging/cifs-utils/PKGBUILD
deleted file mode 100644
index 9c33cefba..000000000
--- a/staging/cifs-utils/PKGBUILD
+++ /dev/null
@@ -1,26 +0,0 @@
-# $Id: PKGBUILD 122574 2011-05-04 17:23:19Z ibiru $
-# Maintainer: Tobias Powalowski <tpowa@archlinux.org>
-pkgname=cifs-utils
-pkgver=4.9
-pkgrel=3
-pkgdesc="CIFS filesystem user-space tools"
-arch=(i686 x86_64)
-url="http://wiki.samba.org/index.php/LinuxCIFS_utils"
-license=('GPL')
-depends=('libcap' 'keyutils' 'krb5' 'talloc')
-source=(ftp://ftp.samba.org/pub/linux-cifs/cifs-utils/$pkgname-$pkgver.tar.bz2)
-md5sums=('908d904e6b9e58f09f530de151a88ef8')
-
-build() {
- cd "$srcdir/$pkgname-$pkgver"
- ./configure --prefix=/usr
- make
-}
-
-package() {
- cd "$srcdir/$pkgname-$pkgver"
- make DESTDIR="$pkgdir" install
- rm -r $pkgdir/usr/bin
- # set mount.cifs uid, to enable none root mounting form fstab
- chmod +s $pkgdir/sbin/mount.cifs
-}
diff --git a/staging/cups/PKGBUILD b/staging/cups/PKGBUILD
deleted file mode 100644
index 64ab5d109..000000000
--- a/staging/cups/PKGBUILD
+++ /dev/null
@@ -1,110 +0,0 @@
-# $Id: PKGBUILD 121078 2011-04-29 00:03:07Z stephane $
-# Maintainer: Andreas Radke <andyrtr@archlinux.org>
-
-pkgbase="cups"
-pkgname=('libcups' 'cups')
-pkgver=1.4.6
-pkgrel=2
-arch=('i686' 'x86_64')
-license=('GPL')
-url="http://www.cups.org/"
-makedepends=('libtiff>=3.9.2-2' 'libpng>=1.4.0' 'acl' 'openslp' 'pam' 'xdg-utils' 'krb5' 'gnutls>=2.8.3' 'poppler>=0.12.3'
- 'xinetd' 'gzip' 'autoconf' 'php' 'libusb-compat' 'dbus-core' 'avahi' 'hicolor-icon-theme')
-source=(ftp://ftp.easysw.com/pub/cups/${pkgver}/cups-${pkgver}-source.tar.bz2
- cups-avahi.patch
- cups cups.logrotate cups.pam)
-#options=('!emptydirs')
-md5sums=('de8fb5a29c36554925c0c6a6e2c0dae1'
- '8ebd390197501ffd709f0ee546937fd5'
- '5c85b7d8d2ddd02c2c64955cebbf55ea'
- 'f861b18f4446c43918c8643dcbbd7f6d'
- '96f82c38f3f540b53f3e5144900acf17')
-
-# move client.conf man page for next update to the client pkg.
-
-build() {
- cd ${srcdir}/${pkgbase}-${pkgver}
- # Avahi support in the dnssd backend. patch from Fedora
- patch -Np1 -i ${srcdir}/cups-avahi.patch || return 1
-
- # Rebuild configure script for --enable-avahi.
- aclocal -I config-scripts
- autoconf -I config-scripts
-
- ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
- --with-logdir=/var/log/cups -with-docdir=/usr/share/cups/doc \
- --with-cups-user=daemon --with-cups-group=lp --enable-pam=yes \
- --disable-ldap --libdir=/usr/lib --enable-raw-printing \
- --enable-dbus --with-dbusdir=/etc/dbus-1 --enable-ssl=yes --enable-gnutls --disable-threads --enable-avahi\
- --with-php=/usr/bin/php-cgi --with-pdftops=pdftops --with-optim="$CFLAGS"
- make || return 1
-}
-
-package_libcups() {
-pkgdesc="The CUPS Printing System - client libraries and headers"
-depends=('gnutls>=2.8.3' 'libtiff>=3.9.2-2' 'libpng>=1.4.0' 'krb5' 'avahi')
-
- cd ${srcdir}/${pkgbase}-${pkgver}
- make BUILDROOT=${pkgdir} install-headers install-libs || return 1
- # put this into the libs pkg to make other software find the libs(no pkg-config file included)
- mkdir -p ${pkgdir}/usr/bin
- install -m755 ${srcdir}/${pkgbase}-${pkgver}/cups-config ${pkgdir}/usr/bin/cups-config
-}
-
-package_cups() {
-pkgdesc="The CUPS Printing System - deamon package"
-install=cups.install
-backup=(etc/cups/cupsd.conf
- etc/cups/mime.convs
- etc/cups/mime.types
- etc/cups/snmp.conf
- etc/cups/printers.conf
- etc/cups/classes.conf
- etc/cups/client.conf
- etc/cups/subscriptions.conf
- etc/dbus-1/system.d/cups.conf
- etc/logrotate.d/cups
- etc/pam.d/cups
- etc/xinetd.d/cups-lpd)
-depends=('acl' 'openslp' 'pam' "libcups>=${pkgver}" 'xdg-utils' 'poppler>=0.12.3' 'libusb-compat' 'dbus-core' 'hicolor-icon-theme')
-optdepends=('php: for included phpcups.so module')
-
- cd ${srcdir}/${pkgbase}-${pkgver}
- make BUILDROOT=${pkgdir} install-data install-exec || return 1
-
- # this one we ship in the libcups pkg
- rm -f ${pkgdir}/usr/bin/cups-config
-
- # kill the sysv stuff
- rm -rf ${pkgdir}/etc/rc*.d
- rm -rf ${pkgdir}/etc/init.d
- install -D -m755 ../cups ${pkgdir}/etc/rc.d/cups
- install -D -m644 ../cups.logrotate ${pkgdir}/etc/logrotate.d/cups
- install -D -m644 ../cups.pam ${pkgdir}/etc/pam.d/cups
-
- # fix perms on /var/spool and /etc
- chmod 755 ${pkgdir}/var/spool
- chmod 755 ${pkgdir}/etc
-
- # serial backend needs to run as root (http://bugs.archlinux.org/task/20396)
- chmod 700 ${pkgdir}/usr/lib/cups/backend/serial
-
- # install ssl directory where to store the certs, solves some samba issues
- install -dm700 -g lp ${pkgdir}/etc/cups/ssl
- install -dm511 -g lp ${pkgdir}/var/run/cups/certs
-
- # install some more configuration files that will get filled by cupsd
- touch ${pkgdir}/etc/cups/printers.conf
- touch ${pkgdir}/etc/cups/classes.conf
- touch ${pkgdir}/etc/cups/client.conf
- echo "# see 'man client.conf'" >> ${pkgdir}/etc/cups/client.conf
- echo "ServerName /var/run/cups/cups.sock # alternative: ServerName hostname-or-ip-address[:port] of a remote server" >> ${pkgdir}/etc/cups/client.conf
- touch ${pkgdir}/etc/cups/subscriptions.conf
- chgrp lp ${pkgdir}/etc/cups/{printers.conf,classes.conf,client.conf,subscriptions.conf}
-
- # fix .desktop file
- sed -i 's|^Exec=htmlview http://localhost:631/|Exec=xdg-open http://localhost:631/|g' ${pkgdir}/usr/share/applications/cups.desktop
-
- # compress some driver files, adopted from Fedora
- find ${pkgdir}/usr/share/cups/model -name "*.ppd" | xargs gzip -n9f
-}
diff --git a/staging/cups/cups b/staging/cups/cups
deleted file mode 100755
index 4afaf5a7c..000000000
--- a/staging/cups/cups
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-
-PID=`pidof -o %PPID /usr/sbin/cupsd`
-case "$1" in
- start)
- stat_busy "Starting CUPS Daemon"
- [ -z "$PID" ] && /usr/sbin/cupsd
- if [ $? -gt 0 ]; then
- stat_fail
- else
- echo $(pidof -o %PPID -x /usr/sbin/cupsd) > /var/run/cups.pid
- add_daemon cups
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping CUPS Daemon"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm /var/run/cups.pid
- rm_daemon cups
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
-exit 0
diff --git a/staging/cups/cups-avahi.patch b/staging/cups/cups-avahi.patch
deleted file mode 100644
index cf1056a75..000000000
--- a/staging/cups/cups-avahi.patch
+++ /dev/null
@@ -1,1089 +0,0 @@
-diff -up cups-1.4.5/backend/dnssd.c.avahi cups-1.4.5/backend/dnssd.c
---- cups-1.4.5/backend/dnssd.c.avahi 2009-08-08 00:27:12.000000000 +0200
-+++ cups-1.4.5/backend/dnssd.c 2010-11-12 13:13:31.000000000 +0100
-@@ -22,6 +22,7 @@
- * exec_backend() - Execute the backend that corresponds to the
- * resolved service name.
- * get_device() - Create or update a device.
-+* find_device()
- * query_callback() - Process query data.
- * sigterm_handler() - Handle termination signals...
- * unquote() - Unquote a name string.
-@@ -33,7 +34,18 @@
-
- #include "backend-private.h"
- #include <cups/array.h>
--#include <dns_sd.h>
-+#ifdef HAVE_DNSSD
-+# include <dns_sd.h>
-+#endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+# include <avahi-client/client.h>
-+# include <avahi-client/lookup.h>
-+# include <avahi-common/simple-watch.h>
-+# include <avahi-common/domain.h>
-+# include <avahi-common/error.h>
-+# include <avahi-common/malloc.h>
-+#define kDNSServiceMaxDomainName AVAHI_DOMAIN_NAME_MAX
-+#endif /* HAVE_AVAHI */
-
-
- /*
-@@ -52,7 +64,12 @@ typedef enum
-
- typedef struct
- {
-+#ifdef HAVE_DNSSD
- DNSServiceRef ref; /* Service reference for resolve */
-+#endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+ int resolved; /* Did we resolve the device? */
-+#endif /* HAVE_AVAHI */
- char *name, /* Service name */
- *domain, /* Domain name */
- *fullName, /* Full name */
-@@ -64,6 +81,20 @@ typedef struct
- sent; /* Did we list the device? */
- } cups_device_t;
-
-+typedef struct
-+{
-+ char key[256];
-+ char value[256];
-+
-+#ifdef HAVE_DNSSD
-+ const uint8_t *data;
-+ const uint8_t *datanext;
-+ const uint8_t *dataend;
-+#else /* HAVE_AVAHI */
-+ AvahiStringList *txt;
-+#endif /* HAVE_DNSSD */
-+} cups_txt_records_t;
-+
-
- /*
- * Local globals...
-@@ -77,6 +108,7 @@ static int job_canceled = 0;
- * Local functions...
- */
-
-+#ifdef HAVE_DNSSD
- static void browse_callback(DNSServiceRef sdRef,
- DNSServiceFlags flags,
- uint32_t interfaceIndex,
-@@ -92,12 +124,6 @@ static void browse_local_callback(DNSSe
- const char *regtype,
- const char *replyDomain,
- void *context);
--static int compare_devices(cups_device_t *a, cups_device_t *b);
--static void exec_backend(char **argv);
--static cups_device_t *get_device(cups_array_t *devices,
-- const char *serviceName,
-- const char *regtype,
-- const char *replyDomain);
- static void query_callback(DNSServiceRef sdRef,
- DNSServiceFlags flags,
- uint32_t interfaceIndex,
-@@ -106,9 +132,111 @@ static void query_callback(DNSServiceRe
- uint16_t rrclass, uint16_t rdlen,
- const void *rdata, uint32_t ttl,
- void *context);
-+#endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+static void avahi_client_callback (AvahiClient *client,
-+ AvahiClientState state,
-+ void *context);
-+static void avahi_browse_callback (AvahiServiceBrowser *browser,
-+ AvahiIfIndex interface,
-+ AvahiProtocol protocol,
-+ AvahiBrowserEvent event,
-+ const char *serviceName,
-+ const char *regtype,
-+ const char *replyDomain,
-+ AvahiLookupResultFlags flags,
-+ void *context);
-+#endif /* HAVE_AVAHI */
-+
-+static cups_device_t * find_device (cups_array_t *devices,
-+ cups_txt_records_t *txt,
-+ cups_device_t *dkey);
-+static int compare_devices(cups_device_t *a, cups_device_t *b);
-+static void exec_backend(char **argv);
-+static cups_device_t *get_device(cups_array_t *devices,
-+ const char *serviceName,
-+ const char *regtype,
-+ const char *replyDomain);
- static void sigterm_handler(int sig);
- static void unquote(char *dst, const char *src, size_t dstsize);
-
-+#ifdef HAVE_AVAHI
-+static AvahiSimplePoll *simple_poll = NULL;
-+static int avahi_got_callback;
-+#endif /* HAVE_AVAHI */
-+
-+
-+/*
-+ * cups_txt_records_t access functions
-+ */
-+static cups_txt_records_t *
-+next_txt_record (cups_txt_records_t *txt)
-+{
-+#ifdef HAVE_DNSSD
-+ txt->data = txt->datanext;
-+#else /* HAVE_AVAHI */
-+ txt->txt = avahi_string_list_get_next (txt->txt);
-+ if (txt->txt == NULL)
-+ return NULL;
-+#endif /* HAVE_DNSSD */
-+
-+ return txt;
-+}
-+
-+static int
-+parse_txt_record_pair (cups_txt_records_t *txt)
-+{
-+#ifdef HAVE_DNSSD
-+ uint8_t datalen;
-+ uint8_t *data = txt->data;
-+ char *ptr;
-+
-+ /*
-+ * Read a key/value pair starting with an 8-bit length. Since the
-+ * length is 8 bits and the size of the key/value buffers is 256, we
-+ * don't need to check for overflow...
-+ */
-+
-+ datalen = *data++;
-+ if (!datalen || (data + datalen) >= txt->dataend)
-+ return NULL;
-+ txt->datanext = data + datalen;
-+
-+ for (ptr = txt->key; data < txt->datanext && *data != '='; data ++)
-+ *ptr++ = *data;
-+ *ptr = '\0';
-+
-+ if (data < txt->datanext && *data == '=')
-+ {
-+ data++;
-+
-+ if (data < datanext)
-+ memcpy (txt->value, data, txt->datanext - data);
-+ value[txt->datanext - data] = '\0';
-+ }
-+ else
-+ return 1;
-+#else /* HAVE_AVAHI */
-+ char *key, *value;
-+ size_t len;
-+ avahi_string_list_get_pair (txt->txt, &key, &value, &len);
-+ if (len > sizeof (txt->value) - 1)
-+ len = sizeof (txt->value) - 1;
-+
-+ memcpy (txt->value, value, len);
-+ txt->value[len] = '\0';
-+ len = strlen (key);
-+ if (len > sizeof (txt->key) - 1)
-+ len = sizeof (txt->key) - 1;
-+
-+ memcpy (txt->key, key, len);
-+ txt->key[len] = '\0';
-+ avahi_free (key);
-+ avahi_free (value);
-+#endif /* HAVE_AVAHI */
-+
-+ return 0;
-+}
-
- /*
- * 'main()' - Browse for printers.
-@@ -119,6 +247,13 @@ main(int argc, /* I - Number of comm
- char *argv[]) /* I - Command-line arguments */
- {
- const char *name; /* Backend name */
-+ cups_array_t *devices; /* Device array */
-+ cups_device_t *device; /* Current device */
-+ char uriName[1024]; /* Unquoted fullName for URI */
-+#ifdef HAVE_DNSSD
-+ int fd; /* Main file descriptor */
-+ fd_set input; /* Input set for select() */
-+ struct timeval timeout; /* Timeout for select() */
- DNSServiceRef main_ref, /* Main service reference */
- fax_ipp_ref, /* IPP fax service reference */
- ipp_ref, /* IPP service reference */
-@@ -130,12 +265,11 @@ main(int argc, /* I - Number of comm
- pdl_datastream_ref, /* AppSocket service reference */
- printer_ref, /* LPD service reference */
- riousbprint_ref; /* Remote IO service reference */
-- int fd; /* Main file descriptor */
-- fd_set input; /* Input set for select() */
-- struct timeval timeout; /* Timeout for select() */
-- cups_array_t *devices; /* Device array */
-- cups_device_t *device; /* Current device */
-- char uriName[1024]; /* Unquoted fullName for URI */
-+#endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+ AvahiClient *client;
-+ int error;
-+#endif /* HAVE_AVAHI */
- #if defined(HAVE_SIGACTION) && !defined(HAVE_SIGSET)
- struct sigaction action; /* Actions for POSIX signals */
- #endif /* HAVE_SIGACTION && !HAVE_SIGSET */
-@@ -194,6 +328,49 @@ main(int argc, /* I - Number of comm
- * Browse for different kinds of printers...
- */
-
-+#ifdef HAVE_AVAHI
-+ if ((simple_poll = avahi_simple_poll_new ()) == NULL)
-+ {
-+ perror ("ERROR: Unable to create avahi simple poll object");
-+ return (1);
-+ }
-+
-+ client = avahi_client_new (avahi_simple_poll_get (simple_poll),
-+ 0, avahi_client_callback, NULL, &error);
-+ if (!client)
-+ {
-+ perror ("ERROR: Unable to create avahi client");
-+ return (1);
-+ }
-+
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_fax-ipp._tcp", NULL, 0,
-+ avahi_browse_callback, devices);
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_ipp._tcp", NULL, 0,
-+ avahi_browse_callback, devices);
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_ipp-tls._tcp", NULL, 0,
-+ avahi_browse_callback, devices);
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_pdl-datastream._tcp",
-+ NULL, 0,
-+ avahi_browse_callback,
-+ devices);
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_printer._tcp", NULL, 0,
-+ avahi_browse_callback, devices);
-+ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC,
-+ "_riousbprint._tcp", NULL, 0,
-+ avahi_browse_callback, devices);
-+#endif /* HAVE_AVAHI */
-+#ifdef HAVE_DNSSD
- if (DNSServiceCreateConnection(&main_ref) != kDNSServiceErr_NoError)
- {
- perror("ERROR: Unable to create service connection");
-@@ -245,6 +422,7 @@ main(int argc, /* I - Number of comm
- riousbprint_ref = main_ref;
- DNSServiceBrowse(&riousbprint_ref, kDNSServiceFlagsShareConnection, 0,
- "_riousbprint._tcp", NULL, browse_callback, devices);
-+#endif /* HAVE_DNSSD */
-
- /*
- * Loop until we are killed...
-@@ -252,6 +430,9 @@ main(int argc, /* I - Number of comm
-
- while (!job_canceled)
- {
-+ int announce = 0;
-+
-+#ifdef HAVE_DNSSD
- FD_ZERO(&input);
- FD_SET(fd, &input);
-
-@@ -271,11 +452,35 @@ main(int argc, /* I - Number of comm
- }
- else
- {
-+ announce = 1;
-+ }
-+#else /* HAVE_AVAHI */
-+ int r;
-+ avahi_got_callback = 0;
-+ r = avahi_simple_poll_iterate (simple_poll, 1);
-+ if (r != 0 && r != EINTR)
-+ {
-+ /*
-+ * We've been told to exit the loop. Perhaps the connection to
-+ * avahi failed.
-+ */
-+
-+ break;
-+ }
-+
-+ if (avahi_got_callback)
-+ announce = 1;
-+#endif /* HAVE_DNSSD */
-+
-+ if (announce)
-+ {
- /*
- * Announce any devices we've found...
- */
-
-+#ifdef HAVE_DNSSD
- DNSServiceErrorType status; /* DNS query status */
-+#endif /* HAVE_DNSSD */
- cups_device_t *best; /* Best matching device */
- char device_uri[1024]; /* Device URI */
- int count; /* Number of queries */
-@@ -285,6 +490,7 @@ main(int argc, /* I - Number of comm
- best = NULL, count = 0;
- device;
- device = (cups_device_t *)cupsArrayNext(devices))
-+#ifdef HAVE_DNSSD
- if (!device->ref && !device->sent)
- {
- /*
-@@ -313,14 +519,23 @@ main(int argc, /* I - Number of comm
- count ++;
- }
- }
-- else if (!device->sent)
-+ else
-+#endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+ if (!device->resolved)
-+ continue;
-+ else
-+#endif /* HAVE_AVAHI */
-+ if (!device->sent)
- {
-+#ifdef HAVE_DNSSD
- /*
- * Got the TXT records, now report the device...
- */
-
- DNSServiceRefDeallocate(device->ref);
- device->ref = 0;
-+#endif /* HAVE_DNSSD */
-
- if (!best)
- best = device;
-@@ -372,6 +587,7 @@ main(int argc, /* I - Number of comm
- * 'browse_callback()' - Browse devices.
- */
-
-+#ifdef HAVE_DNSSD
- static void
- browse_callback(
- DNSServiceRef sdRef, /* I - Service reference */
-@@ -405,12 +621,14 @@ browse_callback(
-
- get_device((cups_array_t *)context, serviceName, regtype, replyDomain);
- }
-+#endif /* HAVE_DNSSD */
-
-
- /*
- * 'browse_local_callback()' - Browse local devices.
- */
-
-+#ifdef HAVE_DNSSD
- static void
- browse_local_callback(
- DNSServiceRef sdRef, /* I - Service reference */
-@@ -456,6 +674,7 @@ browse_local_callback(
- device->fullName);
- device->sent = 1;
- }
-+#endif /* HAVE_DNSSD */
-
-
- /*
-@@ -528,6 +747,32 @@ exec_backend(char **argv) /* I - Comman
- exit(CUPS_BACKEND_STOP);
- }
-
-+static int
-+device_type (const char *regtype)
-+{
-+#ifdef HAVE_AVAHI
-+ if (!strcmp(regtype, "_ipp._tcp") ||
-+ !strcmp(regtype, "_ipp-tls._tcp"))
-+ return (CUPS_DEVICE_IPP);
-+ else if (!strcmp(regtype, "_fax-ipp._tcp"))
-+ return (CUPS_DEVICE_FAX_IPP);
-+ else if (!strcmp(regtype, "_printer._tcp"))
-+ return (CUPS_DEVICE_PDL_DATASTREAM);
-+#else
-+ if (!strcmp(regtype, "_ipp._tcp.") ||
-+ !strcmp(regtype, "_ipp-tls._tcp."))
-+ return (CUPS_DEVICE_IPP);
-+ else if (!strcmp(regtype, "_fax-ipp._tcp."))
-+ return (CUPS_DEVICE_FAX_IPP);
-+ else if (!strcmp(regtype, "_printer._tcp."))
-+ return (CUPS_DEVICE_PRINTER);
-+ else if (!strcmp(regtype, "_pdl-datastream._tcp."))
-+ return (CUPS_DEVICE_PDL_DATASTREAM);
-+#endif /* HAVE_AVAHI */
-+
-+ return (CUPS_DEVICE_RIOUSBPRINT);
-+}
-+
-
- /*
- * 'get_device()' - Create or update a device.
-@@ -550,18 +795,7 @@ get_device(cups_array_t *devices, /* I -
- */
-
- key.name = (char *)serviceName;
--
-- if (!strcmp(regtype, "_ipp._tcp.") ||
-- !strcmp(regtype, "_ipp-tls._tcp."))
-- key.type = CUPS_DEVICE_IPP;
-- else if (!strcmp(regtype, "_fax-ipp._tcp."))
-- key.type = CUPS_DEVICE_FAX_IPP;
-- else if (!strcmp(regtype, "_printer._tcp."))
-- key.type = CUPS_DEVICE_PRINTER;
-- else if (!strcmp(regtype, "_pdl-datastream._tcp."))
-- key.type = CUPS_DEVICE_PDL_DATASTREAM;
-- else
-- key.type = CUPS_DEVICE_RIOUSBPRINT;
-+ key.type = device_type (regtype);
-
- for (device = cupsArrayFind(devices, &key);
- device;
-@@ -581,8 +815,14 @@ get_device(cups_array_t *devices, /* I -
- free(device->domain);
- device->domain = strdup(replyDomain);
-
-+#ifdef HAVE_DNSSD
- DNSServiceConstructFullName(fullName, device->name, regtype,
- replyDomain);
-+#else /* HAVE_AVAHI */
-+ avahi_service_name_join (fullName, kDNSServiceMaxDomainName,
-+ serviceName, regtype, replyDomain);
-+#endif /* HAVE_DNSSD */
-+
- free(device->fullName);
- device->fullName = strdup(fullName);
- }
-@@ -602,6 +842,9 @@ get_device(cups_array_t *devices, /* I -
- device->domain = strdup(replyDomain);
- device->type = key.type;
- device->priority = 50;
-+#ifdef HAVE_AVAHI
-+ device->resolved = 0;
-+#endif /* HAVE_AVAHI */
-
- cupsArrayAdd(devices, device);
-
-@@ -609,7 +852,13 @@ get_device(cups_array_t *devices, /* I -
- * Set the "full name" of this service, which is used for queries...
- */
-
-+#ifdef HAVE_DNSSD
- DNSServiceConstructFullName(fullName, serviceName, regtype, replyDomain);
-+#else /* HAVE_AVAHI */
-+ avahi_service_name_join (fullName, kDNSServiceMaxDomainName,
-+ serviceName, regtype, replyDomain);
-+#endif /* HAVE_DNSSD */
-+
- device->fullName = strdup(fullName);
-
- return (device);
-@@ -620,6 +869,7 @@ get_device(cups_array_t *devices, /* I -
- * 'query_callback()' - Process query data.
- */
-
-+#ifdef HAVE_DNSSD
- static void
- query_callback(
- DNSServiceRef sdRef, /* I - Service reference */
-@@ -639,7 +889,7 @@ query_callback(
- *ptr; /* Pointer into string */
- cups_device_t dkey, /* Search key */
- *device; /* Device */
--
-+ cups_txt_records_t txt;
-
- fprintf(stderr, "DEBUG2: query_callback(sdRef=%p, flags=%x, "
- "interfaceIndex=%d, errorCode=%d, fullName=\"%s\", "
-@@ -673,84 +923,212 @@ query_callback(
- if ((ptr = strstr(name, "._")) != NULL)
- *ptr = '\0';
-
-- if (strstr(fullName, "_ipp._tcp.") ||
-- strstr(fullName, "_ipp-tls._tcp."))
-- dkey.type = CUPS_DEVICE_IPP;
-- else if (strstr(fullName, "_fax-ipp._tcp."))
-- dkey.type = CUPS_DEVICE_FAX_IPP;
-- else if (strstr(fullName, "_printer._tcp."))
-- dkey.type = CUPS_DEVICE_PRINTER;
-- else if (strstr(fullName, "_pdl-datastream._tcp."))
-- dkey.type = CUPS_DEVICE_PDL_DATASTREAM;
-+ dkey.type = device_type (fullName);
-+
-+ txt.data = rdata;
-+ txt.dataend = rdata + rdlen;
-+ device = find_device ((cups_array_t *) context, &txt, &dkey);
-+ if (!device)
-+ fprintf(stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", fullName);
-+}
-+#endif /* HAVE_DNSSD */
-+
-+#ifdef HAVE_AVAHI
-+static void
-+avahi_client_callback(AvahiClient *client,
-+ AvahiClientState state,
-+ void *context)
-+{
-+ /*
-+ * If the connection drops, quit.
-+ */
-+
-+ if (state == AVAHI_CLIENT_FAILURE)
-+ {
-+ fprintf (stderr, "ERROR: Avahi connection failed\n");
-+ avahi_simple_poll_quit (simple_poll);
-+ }
-+}
-+
-+static void
-+avahi_query_callback(AvahiServiceResolver *resolver,
-+ AvahiIfIndex interface,
-+ AvahiProtocol protocol,
-+ AvahiResolverEvent event,
-+ const char *name,
-+ const char *type,
-+ const char *domain,
-+ const char *host_name,
-+ const AvahiAddress *address,
-+ uint16_t port,
-+ AvahiStringList *txt,
-+ AvahiLookupResultFlags flags,
-+ void *context)
-+{
-+ AvahiClient *client;
-+ cups_device_t key,
-+ *device;
-+ char uqname[1024],
-+ *ptr;
-+ cups_txt_records_t txtr;
-+
-+ client = avahi_service_resolver_get_client (resolver);
-+ if (event != AVAHI_RESOLVER_FOUND)
-+ {
-+ if (event == AVAHI_RESOLVER_FAILURE)
-+ {
-+ fprintf (stderr, "ERROR: %s\n",
-+ avahi_strerror (avahi_client_errno (client)));
-+ }
-+
-+ avahi_service_resolver_free (resolver);
-+ return;
-+ }
-+
-+ /*
-+ * Set search key for device.
-+ */
-+
-+ key.name = uqname;
-+ unquote (uqname, name, sizeof (uqname));
-+ if ((ptr = strstr(name, "._")) != NULL)
-+ *ptr = '\0';
-+
-+ key.domain = (char *) domain;
-+ key.type = device_type (type);
-+
-+ /*
-+ * Find the device and the the TXT information.
-+ */
-+
-+ txtr.txt = txt;
-+ device = find_device ((cups_array_t *) context, &txtr, &key);
-+ if (device)
-+ {
-+ /*
-+ * Let the main loop know to announce the device.
-+ */
-+
-+ device->resolved = 1;
-+ avahi_got_callback = 1;
-+ }
- else
-- dkey.type = CUPS_DEVICE_RIOUSBPRINT;
-+ fprintf (stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", name);
-+
-+ avahi_service_resolver_free (resolver);
-+}
-+
-+static void
-+avahi_browse_callback(AvahiServiceBrowser *browser,
-+ AvahiIfIndex interface,
-+ AvahiProtocol protocol,
-+ AvahiBrowserEvent event,
-+ const char *name,
-+ const char *type,
-+ const char *domain,
-+ AvahiLookupResultFlags flags,
-+ void *context)
-+{
-+ AvahiClient *client = avahi_service_browser_get_client (browser);
-+
-+ switch (event)
-+ {
-+ case AVAHI_BROWSER_FAILURE:
-+ fprintf (stderr, "ERROR: %s\n",
-+ avahi_strerror (avahi_client_errno (client)));
-+ avahi_simple_poll_quit (simple_poll);
-+ return;
-+
-+ case AVAHI_BROWSER_NEW:
-+ /*
-+ * This object is new on the network.
-+ */
-+
-+ if (flags & AVAHI_LOOKUP_RESULT_LOCAL)
-+ {
-+ /*
-+ * This comes from the local machine so ignore it.
-+ */
-+
-+ fprintf (stderr, "DEBUG: ignoring local service %s\n", name);
-+ }
-+ else
-+ {
-+ /*
-+ * Create a device entry for it if it doesn't yet exist.
-+ */
-+
-+ get_device ((cups_array_t *)context, name, type, domain);
-+
-+ /*
-+ * Now look for a TXT entry.
-+ */
-+
-+ if (avahi_service_resolver_new (client, interface, protocol,
-+ name, type, domain,
-+ AVAHI_PROTO_UNSPEC, 0,
-+ avahi_query_callback, context) == NULL)
-+ {
-+ fprintf (stderr, "ERROR: failed to resolve service %s: %s\n",
-+ name, avahi_strerror (avahi_client_errno (client)));
-+ }
-+ }
-+
-+ break;
-
-- for (device = cupsArrayFind(devices, &dkey);
-+ case AVAHI_BROWSER_REMOVE:
-+ case AVAHI_BROWSER_ALL_FOR_NOW:
-+ case AVAHI_BROWSER_CACHE_EXHAUSTED:
-+ break;
-+ }
-+}
-+#endif /* HAVE_AVAHI */
-+
-+static cups_device_t *
-+find_device (cups_array_t *devices,
-+ cups_txt_records_t *txt,
-+ cups_device_t *dkey)
-+{
-+ cups_device_t *device;
-+ char *ptr;
-+
-+ for (device = cupsArrayFind(devices, dkey);
- device;
- device = cupsArrayNext(devices))
- {
-- if (strcasecmp(device->name, dkey.name) ||
-- strcasecmp(device->domain, dkey.domain))
-+ if (strcasecmp(device->name, dkey->name) ||
-+ strcasecmp(device->domain, dkey->domain))
- {
- device = NULL;
- break;
- }
-- else if (device->type == dkey.type)
-+ else if (device->type == dkey->type)
- {
- /*
- * Found it, pull out the priority and make and model from the TXT
- * record and save it...
- */
-
-- const uint8_t *data, /* Pointer into data */
-- *datanext, /* Next key/value pair */
-- *dataend; /* End of entire TXT record */
-- uint8_t datalen; /* Length of current key/value pair */
-- char key[256], /* Key string */
-- value[256], /* Value string */
-- make_and_model[512],
-+ char make_and_model[512],
- /* Manufacturer and model */
- model[256], /* Model */
-- device_id[2048];/* 1284 device ID */
--
-+ device_id[2048]; /* 1284 device ID */
-
- device_id[0] = '\0';
- make_and_model[0] = '\0';
-
- strcpy(model, "Unknown");
-
-- for (data = rdata, dataend = data + rdlen;
-- data < dataend;
-- data = datanext)
-+ for (;;)
- {
-- /*
-- * Read a key/value pair starting with an 8-bit length. Since the
-- * length is 8 bits and the size of the key/value buffers is 256, we
-- * don't need to check for overflow...
-- */
--
-- datalen = *data++;
--
-- if (!datalen || (data + datalen) >= dataend)
-- break;
--
-- datanext = data + datalen;
-+ char *key;
-+ char *value;
-
-- for (ptr = key; data < datanext && *data != '='; data ++)
-- *ptr++ = *data;
-- *ptr = '\0';
--
-- if (data < datanext && *data == '=')
-- {
-- data ++;
--
-- if (data < datanext)
-- memcpy(value, data, datanext - data);
-- value[datanext - data] = '\0';
-- }
-- else
-- continue;
-+ if (parse_txt_record_pair (txt))
-+ goto next;
-
-+ key = txt->key;
-+ value = txt->value;
- if (!strncasecmp(key, "usb_", 4))
- {
- /*
-@@ -805,6 +1183,10 @@ query_callback(
- if (device->type == CUPS_DEVICE_PRINTER)
- device->sent = 1;
- }
-+
-+ next:
-+ if (next_txt_record (txt) == NULL)
-+ break;
- }
-
- if (device->device_id)
-@@ -854,11 +1236,9 @@ query_callback(
- }
- }
-
-- if (!device)
-- fprintf(stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", fullName);
-+ return device;
- }
-
--
- /*
- * 'sigterm_handler()' - Handle termination signals...
- */
-diff -up cups-1.4.5/config.h.in.avahi cups-1.4.5/config.h.in
---- cups-1.4.5/config.h.in.avahi 2010-08-13 06:11:46.000000000 +0200
-+++ cups-1.4.5/config.h.in 2010-11-12 13:13:31.000000000 +0100
-@@ -344,6 +344,13 @@
-
-
- /*
-+ * Do we have Avahi for DNS Service Discovery?
-+ */
-+
-+#undef HAVE_AVAHI
-+
-+
-+/*
- * Do we have <sys/ioctl.h>?
- */
-
-diff -up cups-1.4.5/config-scripts/cups-dnssd.m4.avahi cups-1.4.5/config-scripts/cups-dnssd.m4
---- cups-1.4.5/config-scripts/cups-dnssd.m4.avahi 2009-08-29 00:54:34.000000000 +0200
-+++ cups-1.4.5/config-scripts/cups-dnssd.m4 2010-11-12 13:13:31.000000000 +0100
-@@ -27,6 +27,21 @@ AC_ARG_WITH(dnssd-includes, [ --with-dn
- DNSSDLIBS=""
- DNSSD_BACKEND=""
-
-+AC_ARG_ENABLE(avahi, [ --enable-avahi turn on DNS Service Discovery support, default=no],
-+ [if test x$enable_avahi = xyes; then
-+ AC_MSG_CHECKING(for Avahi)
-+ if $PKGCONFIG --exists avahi-client; then
-+ AC_MSG_RESULT(yes)
-+ CFLAGS="$CFLAGS `$PKGCONFIG --cflags avahi-client`"
-+ DNSSDLIBS="`$PKGCONFIG --libs avahi-client`"
-+ DNSSD_BACKEND="dnssd"
-+ AC_DEFINE(HAVE_AVAHI)
-+ enable_dnssd=no
-+ else
-+ AC_MSG_RESULT(no)
-+ fi
-+ fi])
-+
- if test x$enable_dnssd != xno; then
- AC_CHECK_HEADER(dns_sd.h, [
- case "$uname" in
-diff -up cups-1.4.5/cups/http-support.c.avahi cups-1.4.5/cups/http-support.c
---- cups-1.4.5/cups/http-support.c.avahi 2010-10-02 00:40:38.000000000 +0200
-+++ cups-1.4.5/cups/http-support.c 2010-11-12 13:28:45.000000000 +0100
-@@ -55,6 +55,11 @@
- # include <dns_sd.h>
- # include <poll.h>
- #endif /* HAVE_DNSSD */
-+#ifdef HAVE_AVAHI
-+# include <avahi-client/client.h>
-+# include <avahi-client/lookup.h>
-+# include <avahi-common/simple-watch.h>
-+#endif /* HAVE_AVAHI */
-
-
- /*
-@@ -121,6 +126,24 @@ static void resolve_callback(DNSService
- void *context);
- #endif /* HAVE_DNSSD */
-
-+#ifdef HAVE_AVAHI
-+static void avahi_resolve_uri_client_cb(AvahiClient *client,
-+ AvahiClientState state,
-+ void *simple_poll);
-+static void avahi_resolve_uri_resolver_cb(AvahiServiceResolver *resolver,
-+ AvahiIfIndex interface,
-+ AvahiProtocol protocol,
-+ AvahiResolverEvent event,
-+ const char *name,
-+ const char *type,
-+ const char *domain,
-+ const char *host_name,
-+ const AvahiAddress *address,
-+ uint16_t port,
-+ AvahiStringList *txt,
-+ AvahiLookupResultFlags flags,
-+ void *context);
-+#endif /* HAVE_AVAHI */
-
- /*
- * 'httpAssembleURI()' - Assemble a uniform resource identifier from its
-@@ -1351,16 +1374,27 @@ _httpResolveURI(
-
- if (strstr(hostname, "._tcp"))
- {
-+#if defined(HAVE_DNSSD) || defined(HAVE_AVAHI)
-+ char *regtype, /* Pointer to type in hostname */
-+ *domain; /* Pointer to domain in hostname */
- #ifdef HAVE_DNSSD
- DNSServiceRef ref, /* DNS-SD master service reference */
- domainref, /* DNS-SD service reference for domain */
- localref; /* DNS-SD service reference for .local */
- int domainsent = 0, /* Send the domain resolve? */
- offline = 0; /* offline-report state set? */
-- char *regtype, /* Pointer to type in hostname */
-- *domain; /* Pointer to domain in hostname */
- _http_uribuf_t uribuf; /* URI buffer */
- struct pollfd polldata; /* Polling data */
-+#else /* HAVE_AVAHI */
-+ AvahiSimplePoll *simple_poll;
-+ AvahiClient *client;
-+ int error;
-+ struct
-+ {
-+ AvahiSimplePoll *poll;
-+ _http_uribuf_t uribuf;
-+ } user_data;
-+#endif /* HAVE_DNSSD */
-
-
- if (logit)
-@@ -1398,8 +1432,13 @@ _httpResolveURI(
- if (domain)
- *domain++ = '\0';
-
-+#ifdef HAVE_DNSSD
- uribuf.buffer = resolved_uri;
- uribuf.bufsize = resolved_size;
-+#else
-+ user_data.uribuf.buffer = resolved_uri;
-+ user_data.uribuf.bufsize = resolved_size;
-+#endif
-
- resolved_uri[0] = '\0';
-
-@@ -1414,6 +1453,7 @@ _httpResolveURI(
-
- uri = NULL;
-
-+#ifdef HAVE_DNSSD
- if (DNSServiceCreateConnection(&ref) == kDNSServiceErr_NoError)
- {
- localref = ref;
-@@ -1500,6 +1540,36 @@ _httpResolveURI(
-
- DNSServiceRefDeallocate(ref);
- }
-+#else /* HAVE_AVAHI */
-+ if ((simple_poll = avahi_simple_poll_new ()) != NULL)
-+ {
-+ if ((client = avahi_client_new (avahi_simple_poll_get (simple_poll),
-+ 0, avahi_resolve_uri_client_cb,
-+ &simple_poll, &error)) != NULL)
-+ {
-+ user_data.poll = simple_poll;
-+ if (avahi_service_resolver_new (client, AVAHI_IF_UNSPEC,
-+ AVAHI_PROTO_UNSPEC, hostname,
-+ regtype, domain, AVAHI_PROTO_UNSPEC, 0,
-+ avahi_resolve_uri_resolver_cb,
-+ &user_data) != NULL)
-+ {
-+ avahi_simple_poll_loop (simple_poll);
-+
-+ /*
-+ * Collect the result.
-+ */
-+
-+ if (resolved_uri[0])
-+ uri = resolved_uri;
-+ }
-+
-+ avahi_client_free (client);
-+ }
-+
-+ avahi_simple_poll_free (simple_poll);
-+ }
-+#endif /* HAVE_DNSSD */
-
- if (logit)
- {
-@@ -1511,13 +1581,13 @@ _httpResolveURI(
- fputs("STATE: -connecting-to-device,offline-report\n", stderr);
- }
-
--#else
-+#else /* HAVE_DNSSD || HAVE_AVAHI */
- /*
- * No DNS-SD support...
- */
-
- uri = NULL;
--#endif /* HAVE_DNSSD */
-+#endif /* HAVE_DNSSD || HAVE_AVAHI */
-
- if (logit && !uri)
- _cupsLangPuts(stderr, _("Unable to find printer!\n"));
-@@ -1722,6 +1792,105 @@ resolve_callback(
- }
- #endif /* HAVE_DNSSD */
-
-+#ifdef HAVE_AVAHI
-+static void
-+avahi_resolve_uri_client_cb (AvahiClient *client,
-+ AvahiClientState state,
-+ void *simple_poll)
-+{
-+ DEBUG_printf(("avahi_resolve_uri_client_callback(client=%p, state=%d, "
-+ "simple_poll=%p)\n", client, state, simple_poll));
-+
-+ /*
-+ * If the connection drops, quit.
-+ */
-+
-+ if (state == AVAHI_CLIENT_FAILURE)
-+ avahi_simple_poll_quit (simple_poll);
-+}
-+
-+static void
-+avahi_resolve_uri_resolver_cb (AvahiServiceResolver *resolver,
-+ AvahiIfIndex interface,
-+ AvahiProtocol protocol,
-+ AvahiResolverEvent event,
-+ const char *name,
-+ const char *type,
-+ const char *domain,
-+ const char *host_name,
-+ const AvahiAddress *address,
-+ uint16_t port,
-+ AvahiStringList *txt,
-+ AvahiLookupResultFlags flags,
-+ void *context)
-+{
-+ const char *scheme; /* URI scheme */
-+ char rp[256]; /* Remote printer */
-+ AvahiStringList *pair;
-+ char *value;
-+ size_t valueLen = 0;
-+ char addr[AVAHI_ADDRESS_STR_MAX];
-+ struct
-+ {
-+ AvahiSimplePoll *poll;
-+ _http_uribuf_t uribuf;
-+ } *poll_uribuf = context;
-+
-+ DEBUG_printf(("avahi_resolve_uri_resolver_callback(resolver=%p, "
-+ "interface=%d, protocol=%d, event=%d, name=\"%s\", "
-+ "type=\"%s\", domain=\"%s\", host_name=\"%s\", address=%p, "
-+ "port=%d, txt=%p, flags=%d, context=%p)\n",
-+ resolver, interface, protocol, event, name, type, domain,
-+ host_name, address, port, txt, flags, context));
-+
-+ if (event != AVAHI_RESOLVER_FOUND)
-+ {
-+ avahi_service_resolver_free (resolver);
-+ avahi_simple_poll_quit (poll_uribuf->poll);
-+ return;
-+ }
-+
-+ /*
-+ * Figure out the scheme from the full name...
-+ */
-+
-+ if (strstr(type, "_ipp."))
-+ scheme = "ipp";
-+ else if (strstr(type, "_printer."))
-+ scheme = "lpd";
-+ else if (strstr(type, "_pdl-datastream."))
-+ scheme = "socket";
-+ else
-+ scheme = "riousbprint";
-+
-+ /*
-+ * Extract the "remote printer key from the TXT record...
-+ */
-+
-+ if ((pair = avahi_string_list_find (txt, "rp")) != NULL)
-+ {
-+ avahi_string_list_get_pair (pair, NULL, &value, &valueLen);
-+ rp[0] = '/';
-+ memcpy (rp + 1, value, valueLen);
-+ rp[valueLen + 1] = '\0';
-+ }
-+ else
-+ rp[0] = '\0';
-+
-+ /*
-+ * Assemble the final device URI...
-+ */
-+
-+ avahi_address_snprint (addr, AVAHI_ADDRESS_STR_MAX, address);
-+ httpAssembleURI(HTTP_URI_CODING_ALL, poll_uribuf->uribuf.buffer,
-+ poll_uribuf->uribuf.bufsize, scheme, NULL,
-+ addr, port, rp);
-+ DEBUG_printf(("avahi_resolve_uri_resolver_callback: Resolved URI is \"%s\"\n",
-+ poll_uribuf->uribuf.buffer));
-+ avahi_simple_poll_quit (poll_uribuf->poll);
-+}
-+#endif /* HAVE_AVAHI */
-+
-
- /*
- * End of "$Id: http-support.c 9322 2010-10-01 22:40:38Z mike $".
diff --git a/staging/cups/cups.install b/staging/cups/cups.install
deleted file mode 100644
index e92e17ed3..000000000
--- a/staging/cups/cups.install
+++ /dev/null
@@ -1,15 +0,0 @@
-post_install() {
- xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
- echo ">> If you use an HTTPS connection to CUPS, the first time you access"
- echo ">> the interface it may take a very long time before the site comes up."
- echo ">> This is because the first request triggers the generation of the CUPS"
- echo ">> SSL certificates which can be a very time-consuming job."
-}
-
-post_upgrade() {
- xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
-}
-
-post_remove() {
- xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
-}
diff --git a/staging/cups/cups.logrotate b/staging/cups/cups.logrotate
deleted file mode 100644
index 9c49bbdaf..000000000
--- a/staging/cups/cups.logrotate
+++ /dev/null
@@ -1,8 +0,0 @@
-/var/log/cups/*log {
- missingok
- notifempty
- delaycompress
- postrotate
- /bin/kill -HUP `cat /var/run/cups.pid 2>/dev/null` 2>/dev/null || true
- endscript
-}
diff --git a/staging/cups/cups.pam b/staging/cups/cups.pam
deleted file mode 100644
index 53724d1f8..000000000
--- a/staging/cups/cups.pam
+++ /dev/null
@@ -1,3 +0,0 @@
-auth required pam_unix.so
-account required pam_unix.so
-session required pam_unix.so
diff --git a/staging/cvs/PKGBUILD b/staging/cvs/PKGBUILD
deleted file mode 100644
index 76f71a2a1..000000000
--- a/staging/cvs/PKGBUILD
+++ /dev/null
@@ -1,39 +0,0 @@
-# $Id: PKGBUILD 121072 2011-04-28 22:54:58Z stephane $
-# Contributor: dorphell <dorphell@archlinux.org>
-
-pkgname=cvs
-pkgver=1.11.23
-pkgrel=7
-pkgdesc="Concurrent Versions System - a source control system"
-arch=(i686 x86_64)
-url="http://cvs.nongnu.org"
-license=('GPL')
-depends=('krb5')
-optdepends=('openssh: for using cvs over ssh' 'inetutils: for using cvs over rsh')
-install=cvs.install
-source=(ftp://ftp.gnu.org/non-gnu/cvs/source/stable/${pkgver}/${pkgname}-${pkgver}.tar.bz2
- cvs-1.11.23-getline64.patch
- cvs-1.11.23-cve-2010-3846.patch)
-sha256sums=('400f51b59d85116e79b844f2d5dbbad4759442a789b401a94aa5052c3d7a4aa9'
- '9126d7992ace943980ad8a10d5a09aeb6f1eeeb9b921fc796fe31de7b1c220cf'
- 'c6506d0a5efc7b0cab6415f26e070ec214fb9781fac8d295506f4d0825431a8f')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- unset EDITOR VISUAL
-
- patch -Np1 -i ../cvs-1.11.23-getline64.patch
-
- # CVE-2010-3864, see https://www.redhat.com/security/data/cve/CVE-2010-3846.html
- patch -Np1 -i ../cvs-1.11.23-cve-2010-3846.patch
-
- ./configure --prefix=/usr
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
-
- make DESTDIR="${pkgdir}" install
- rm "${pkgdir}"/usr/share/info/dir
-}
diff --git a/staging/cvs/cvs-1.11.23-cve-2010-3846.patch b/staging/cvs/cvs-1.11.23-cve-2010-3846.patch
deleted file mode 100644
index e1560cef8..000000000
--- a/staging/cvs/cvs-1.11.23-cve-2010-3846.patch
+++ /dev/null
@@ -1,167 +0,0 @@
-From b122edcb68ff05bb6eb22f6e50423e7f1050841b Mon Sep 17 00:00:00 2001
-From: Larry Jones <lawrence.jones@siemens.com>
-Date: Thu, 21 Oct 2010 10:08:16 +0200
-Subject: [PATCH] Fix for CVE-2010-3846
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Mallformed RCS revision (delete after the end of input file, or overlayed
-deleted regions) screws output file image size computation. This leads to
-write attempt after the allocated memory opening hiden memory corruption
-driven by CVS server.
-
-Signed-off-by: Petr PĂ­saĹ™ <ppisar@redhat.com>
----
- src/rcs.c | 52 +++++++++++++++++++++++++++++-----------------------
- 1 files changed, 29 insertions(+), 23 deletions(-)
-
-diff --git a/src/rcs.c b/src/rcs.c
-index 7d0d078..2f88f85 100644
---- a/src/rcs.c
-+++ b/src/rcs.c
-@@ -7128,7 +7128,7 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- struct deltafrag *dfhead;
- struct deltafrag **dftail;
- struct deltafrag *df;
-- unsigned long numlines, lastmodline, offset;
-+ unsigned long numlines, offset;
- struct linevector lines;
- int err;
-
-@@ -7202,12 +7202,12 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
-
- /* New temp data structure to hold new org before
- copy back into original structure. */
-- lines.nlines = lines.lines_alloced = numlines;
-+ lines.lines_alloced = numlines;
- lines.vector = xmalloc (numlines * sizeof *lines.vector);
-
- /* We changed the list order to first to last -- so the
- list never gets larger than the size numlines. */
-- lastmodline = 0;
-+ lines.nlines = 0;
-
- /* offset created when adding/removing lines
- between new and original structure */
-@@ -7216,25 +7216,24 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- for (df = dfhead; df != NULL; )
- {
- unsigned int ln;
-- unsigned long deltaend;
-+ unsigned long newpos = df->pos - offset;
-
-- if (df->pos > orig_lines->nlines)
-+ if (newpos < lines.nlines || newpos > numlines)
- err = 1;
-
- /* On error, just free the rest of the list. */
- if (!err)
- {
-- /* Here we need to get to the line where the next insert will
-+ /* Here we need to get to the line where the next change will
- begin, which is DF->pos in ORIG_LINES. We will fill up to
- DF->pos - OFFSET in LINES with original items. */
-- for (deltaend = df->pos - offset;
-- lastmodline < deltaend;
-- lastmodline++)
-+ while (lines.nlines < newpos)
- {
- /* we need to copy from the orig structure into new one */
-- lines.vector[lastmodline] =
-- orig_lines->vector[lastmodline + offset];
-- lines.vector[lastmodline]->refcount++;
-+ lines.vector[lines.nlines] =
-+ orig_lines->vector[lines.nlines + offset];
-+ lines.vector[lines.nlines]->refcount++;
-+ lines.nlines++;
- }
-
- switch (df->type)
-@@ -7246,7 +7245,12 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- struct line *q;
- int nextline_newline;
- size_t nextline_len;
--
-+
-+ if (newpos + df->nlines > numlines)
-+ {
-+ err = 1;
-+ break;
-+ }
- textend = df->new_lines + df->len;
- nextline_newline = 0;
- nextline_text = df->new_lines;
-@@ -7271,8 +7275,7 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- q->has_newline = nextline_newline;
- q->refcount = 1;
- memcpy (q->text, nextline_text, nextline_len);
-- lines.vector[lastmodline++] = q;
-- offset--;
-+ lines.vector[lines.nlines++] = q;
-
- nextline_text = (char *)p + 1;
- nextline_newline = 0;
-@@ -7286,11 +7289,11 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- q->has_newline = nextline_newline;
- q->refcount = 1;
- memcpy (q->text, nextline_text, nextline_len);
-- lines.vector[lastmodline++] = q;
-+ lines.vector[lines.nlines++] = q;
-
- /* For each line we add the offset between the #'s
- decreases. */
-- offset--;
-+ offset -= df->nlines;
- break;
- }
-
-@@ -7301,7 +7304,9 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- if (df->pos + df->nlines > orig_lines->nlines)
- err = 1;
- else if (delvers)
-+ {
- for (ln = df->pos; ln < df->pos + df->nlines; ++ln)
-+ {
- if (orig_lines->vector[ln]->refcount > 1)
- /* Annotate needs this but, since the original
- * vector is disposed of before returning from
-@@ -7309,6 +7314,8 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- * there are multiple references.
- */
- orig_lines->vector[ln]->vers = delvers;
-+ }
-+ }
- break;
- }
- }
-@@ -7328,21 +7335,20 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
- else
- {
- /* add the rest of the remaining lines to the data vector */
-- for (; lastmodline < numlines; lastmodline++)
-+ while (lines.nlines < numlines)
- {
- /* we need to copy from the orig structure into new one */
-- lines.vector[lastmodline] = orig_lines->vector[lastmodline
-+ lines.vector[lines.nlines] = orig_lines->vector[lines.nlines
- + offset];
-- lines.vector[lastmodline]->refcount++;
-+ lines.vector[lines.nlines]->refcount++;
-+ lines.nlines++;
- }
-
- /* Move the lines vector to the original structure for output,
- * first deleting the old.
- */
- linevector_free (orig_lines);
-- orig_lines->vector = lines.vector;
-- orig_lines->lines_alloced = numlines;
-- orig_lines->nlines = lines.nlines;
-+ *orig_lines = lines;
- }
-
- return !err;
---
-1.7.2.3
-
diff --git a/staging/cvs/cvs-1.11.23-getline64.patch b/staging/cvs/cvs-1.11.23-getline64.patch
deleted file mode 100644
index 99942e058..000000000
--- a/staging/cvs/cvs-1.11.23-getline64.patch
+++ /dev/null
@@ -1,34 +0,0 @@
---- cvs-1.11.23/lib/getline.c 2005-04-04 22:46:05.000000000 +0200
-+++ cvs-1.11.23/lib/getline.c.old 2008-06-03 19:06:25.000000000 +0200
-@@ -154,7 +154,7 @@
- return ret;
- }
-
--int
-+ssize_t
- getline (lineptr, n, stream)
- char **lineptr;
- size_t *n;
-@@ -163,7 +163,7 @@
- return getstr (lineptr, n, stream, '\n', 0, GETLINE_NO_LIMIT);
- }
-
--int
-+ssize_t
- getline_safe (lineptr, n, stream, limit)
- char **lineptr;
- size_t *n;
---- cvs-1.11.23/lib/getline.h 2005-04-04 22:46:05.000000000 +0200
-+++ cvs-1.11.23/lib/getline.h.old 2008-06-03 19:06:27.000000000 +0200
-@@ -11,9 +11,9 @@
-
- #define GETLINE_NO_LIMIT -1
-
--int
-+ssize_t
- getline __PROTO ((char **_lineptr, size_t *_n, FILE *_stream));
--int
-+ssize_t
- getline_safe __PROTO ((char **_lineptr, size_t *_n, FILE *_stream,
- int limit));
- int
diff --git a/staging/cvs/cvs.install b/staging/cvs/cvs.install
deleted file mode 100644
index f1cdd1f3e..000000000
--- a/staging/cvs/cvs.install
+++ /dev/null
@@ -1,20 +0,0 @@
-infodir=/usr/share/info
-filelist=(cvs.info cvs-info-1 cvs-info-2 cvsclient.info)
-
-post_install() {
- for file in ${filelist[@]}; do
- install-info $infodir/$file $infodir/dir 2> /dev/null
- done
-}
-
-post_upgrade() {
- post_install $1
-}
-
-pre_remove() {
- for file in ${filelist[@]}; do
- install-info --delete $infodir/$file $infodir/dir 2> /dev/null
- done
-}
-
-# vim:set ts=2 sw=2 et:
diff --git a/staging/cyrus-sasl-plugins/PKGBUILD b/staging/cyrus-sasl-plugins/PKGBUILD
deleted file mode 100644
index 5817bb657..000000000
--- a/staging/cyrus-sasl-plugins/PKGBUILD
+++ /dev/null
@@ -1,52 +0,0 @@
-# $Id: PKGBUILD 122144 2011-05-02 13:27:49Z stephane $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=cyrus-sasl-plugins
-pkgver=2.1.23
-pkgrel=5
-pkgdesc="Cyrus Simple Authentication Service Layer (SASL) library"
-arch=('i686' 'x86_64')
-url="http://cyrusimap.web.cmu.edu/"
-license=('custom')
-depends=('postgresql-libs>=9.0.3' 'krb5' 'libldap>2.4'
- 'libmysqlclient>=5.5.10')
-source=(ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/cyrus-sasl-${pkgver}.tar.gz
- cyrus-sasl-2.1.22-gcc44.patch
- cyrus-sasl-2.1.23-db5-fix.patch)
-md5sums=('2eb0e48106f0e9cd8001e654f267ecbc'
- '5deb4d67b53ecba20c7887fc8fdebee1'
- '3ae4347705141145f31cf786c38ea9ef')
-options=('!libtool')
-
-build() {
- cd ${srcdir}/cyrus-sasl-${pkgver}
- patch -Np1 -i $srcdir/cyrus-sasl-2.1.22-gcc44.patch
- # from http://bugs.gentoo.org/show_bug.cgi?id=319935
- patch -Np0 -i ${srcdir}/cyrus-sasl-2.1.23-db5-fix.patch
- ./configure --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --disable-login \
- --disable-plain \
- --enable-sql \
- --disable-sqlite \
- --enable-gssapi=/usr/include/gssapi \
- --with-mysql=/usr \
- --with-pgsql=/usr \
- --enable-postgresql \
- --enable-ldapdb \
- --with-ldap=/usr
- cd sasldb
- make
- cd ../plugins
- make
-}
-
-package () {
- cd ${srcdir}/cyrus-sasl-${pkgver}/plugins
- make DESTDIR=${pkgdir} install
-
- install -Dm644 ../COPYING ${pkgdir}/usr/share/licenses/${pkgname}/COPYING
-
- rm -f ${pkgdir}/usr/lib/sasl2/libsasldb.*
-}
diff --git a/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.22-gcc44.patch b/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.22-gcc44.patch
deleted file mode 100644
index e2621278b..000000000
--- a/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.22-gcc44.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-fix warnings with gcc-4.4
-
-http://bugs.gentoo.org/248738
-
---- cyrus-sasl-2.1.22/plugins/digestmd5.c
-+++ cyrus-sasl-2.1.22/plugins/digestmd5.c
-@@ -2715,7 +2715,7 @@ static sasl_server_plug_t digestmd5_serv
- "DIGEST-MD5", /* mech_name */
- #ifdef WITH_RC4
- 128, /* max_ssf */
--#elif WITH_DES
-+#elif defined(WITH_DES)
- 112,
- #else
- 1,
-@@ -4034,7 +4034,7 @@ static sasl_client_plug_t digestmd5_clie
- "DIGEST-MD5",
- #ifdef WITH_RC4 /* mech_name */
- 128, /* max ssf */
--#elif WITH_DES
-+#elif defined(WITH_DES)
- 112,
- #else
- 1,
diff --git a/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.23-db5-fix.patch b/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.23-db5-fix.patch
deleted file mode 100644
index 2ccd6cdb3..000000000
--- a/staging/cyrus-sasl-plugins/cyrus-sasl-2.1.23-db5-fix.patch
+++ /dev/null
@@ -1,23 +0,0 @@
---- sasldb/db_berkeley.c.orig 2010-10-04 21:11:15.044010468 -0400
-+++ sasldb/db_berkeley.c 2010-10-04 21:12:18.921998718 -0400
-@@ -100,7 +100,7 @@
- ret = db_create(mbdb, NULL, 0);
- if (ret == 0 && *mbdb != NULL)
- {
--#if DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1
-+#if (DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1) || DB_VERSION_MAJOR == 5
- ret = (*mbdb)->open(*mbdb, NULL, path, NULL, DB_HASH, flags, 0660);
- #else
- ret = (*mbdb)->open(*mbdb, path, NULL, DB_HASH, flags, 0660);
-
---- utils/dbconverter-2.c.orig 2010-10-04 21:23:39.778000256 -0400
-+++ utils/dbconverter-2.c 2010-10-04 21:24:50.384999893 -0400
-@@ -214,7 +214,7 @@
- ret = db_create(mbdb, NULL, 0);
- if (ret == 0 && *mbdb != NULL)
- {
--#if DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1
-+#if (DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1) || DB_VERSION_MAJOR == 5
- ret = (*mbdb)->open(*mbdb, NULL, path, NULL, DB_HASH, DB_CREATE, 0664);
- #else
- ret = (*mbdb)->open(*mbdb, path, NULL, DB_HASH, DB_CREATE, 0664);
diff --git a/staging/cyrus-sasl/PKGBUILD b/staging/cyrus-sasl/PKGBUILD
deleted file mode 100644
index 6d42f9f75..000000000
--- a/staging/cyrus-sasl/PKGBUILD
+++ /dev/null
@@ -1,53 +0,0 @@
-# $Id: PKGBUILD 122168 2011-05-02 15:13:37Z stephane $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=cyrus-sasl
-pkgver=2.1.23
-pkgrel=5
-pkgdesc="SASL authentication daemon"
-arch=('i686' 'x86_64')
-license=('custom')
-url="http://asg.web.cmu.edu/cyrus/download/"
-depends=('pam>=1.0.1-2' 'krb5' 'libldap' 'cyrus-sasl-plugins' 'db>=5.0')
-replaces=(cyrus-sasl-mysql cyrus-sasl-pgsql)
-conflicts=(cyrus-sasl-mysql cyrus-sasl-pgsql)
-backup=(etc/conf.d/saslauthd)
-source=(ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/${pkgname}-${pkgver}.tar.gz
- saslauthd
- saslauthd.conf.d
- cyrus-sasl-2.1.23-gcc4.patch
- cyrus-sasl-2.1.23+db-5.0.patch)
-md5sums=('2eb0e48106f0e9cd8001e654f267ecbc'
- '697dfb51206c398bc976ce9f4cffe72d'
- '96d8a2f6189501f8044838e04d5cae7f'
- '3a71688df7d5724cd55a8de17d74f34e'
- '35c189c8e93ad37e3ae3c49386fdeb2c')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
-
- # Fix building with db v5.x
- patch -Np1 -i ../cyrus-sasl-2.1.23+db-5.0.patch
-
- # Fix error: #elif with no expression
- patch -Np1 -i ../cyrus-sasl-2.1.23-gcc4.patch
-
- ./configure --prefix=/usr --mandir=/usr/share/man \
- --with-ldap=/usr --with-saslauthd=/var/run/saslauthd \
- --disable-krb4 --with-gss_impl=mit --disable-otp
- cd saslauthd
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}/saslauthd"
- make DESTDIR="${pkgdir}" install
- make testsaslauthd
- install -m755 testsaslauthd "${pkgdir}/usr/sbin"
-
- install -dm766 "${pkgdir}/var/run/saslauthd"
- install -Dm755 "${srcdir}/saslauthd" "${pkgdir}/etc/rc.d/saslauthd"
- install -Dm644 "${srcdir}/saslauthd.conf.d" "${pkgdir}/etc/conf.d/saslauthd"
-
- install -Dm644 ../COPYING "${pkgdir}/usr/share/licenses/cyrus-sasl/COPYING"
-}
diff --git a/staging/cyrus-sasl/cyrus-sasl-2.1.23+db-5.0.patch b/staging/cyrus-sasl/cyrus-sasl-2.1.23+db-5.0.patch
deleted file mode 100644
index 62df3e67e..000000000
--- a/staging/cyrus-sasl/cyrus-sasl-2.1.23+db-5.0.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-diff -Naur cyrus-sasl-2.1.23.ori/sasldb/db_berkeley.c cyrus-sasl-2.1.23/sasldb/db_berkeley.c
---- cyrus-sasl-2.1.23.ori/sasldb/db_berkeley.c 2009-04-28 08:09:18.000000000 -0700
-+++ cyrus-sasl-2.1.23/sasldb/db_berkeley.c 2011-05-02 07:16:42.748675977 -0700
-@@ -100,7 +100,7 @@
- ret = db_create(mbdb, NULL, 0);
- if (ret == 0 && *mbdb != NULL)
- {
--#if DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1
-+#if (DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1) || DB_VERSION_MAJOR >= 5
- ret = (*mbdb)->open(*mbdb, NULL, path, NULL, DB_HASH, flags, 0660);
- #else
- ret = (*mbdb)->open(*mbdb, path, NULL, DB_HASH, flags, 0660);
-diff -Naur cyrus-sasl-2.1.23.ori/utils/dbconverter-2.c cyrus-sasl-2.1.23/utils/dbconverter-2.c
---- cyrus-sasl-2.1.23.ori/utils/dbconverter-2.c 2003-02-13 11:56:17.000000000 -0800
-+++ cyrus-sasl-2.1.23/utils/dbconverter-2.c 2011-05-02 07:16:42.748675977 -0700
-@@ -214,7 +214,7 @@
- ret = db_create(mbdb, NULL, 0);
- if (ret == 0 && *mbdb != NULL)
- {
--#if DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1
-+#if (DB_VERSION_MAJOR == 4 && DB_VERSION_MINOR >= 1) || DB_VERSION_MAJOR >= 5
- ret = (*mbdb)->open(*mbdb, NULL, path, NULL, DB_HASH, DB_CREATE, 0664);
- #else
- ret = (*mbdb)->open(*mbdb, path, NULL, DB_HASH, DB_CREATE, 0664);
diff --git a/staging/cyrus-sasl/cyrus-sasl-2.1.23-gcc4.patch b/staging/cyrus-sasl/cyrus-sasl-2.1.23-gcc4.patch
deleted file mode 100644
index 0d8627b1f..000000000
--- a/staging/cyrus-sasl/cyrus-sasl-2.1.23-gcc4.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-diff -Naur cyrus-sasl-2.1.23.ori/plugins/digestmd5.c cyrus-sasl-2.1.23/plugins/digestmd5.c
---- cyrus-sasl-2.1.23.ori/plugins/digestmd5.c 2009-04-28 08:09:17.000000000 -0700
-+++ cyrus-sasl-2.1.23/plugins/digestmd5.c 2011-05-02 07:56:55.375403814 -0700
-@@ -2715,7 +2715,7 @@
- "DIGEST-MD5", /* mech_name */
- #ifdef WITH_RC4
- 128, /* max_ssf */
--#elif WITH_DES
-+#elif defined(WITH_DES)
- 112,
- #else
- 1,
-@@ -4034,7 +4034,7 @@
- "DIGEST-MD5",
- #ifdef WITH_RC4 /* mech_name */
- 128, /* max ssf */
--#elif WITH_DES
-+#elif defined(WITH_DES)
- 112,
- #else
- 1,
diff --git a/staging/cyrus-sasl/saslauthd b/staging/cyrus-sasl/saslauthd
deleted file mode 100644
index c470c801c..000000000
--- a/staging/cyrus-sasl/saslauthd
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-# source application-specific settings
-[ -f /etc/conf.d/saslauthd ] && . /etc/conf.d/saslauthd
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-
-DAEMON_NAME="saslauthd"
-SASLAUTHD_BIN=/usr/sbin/saslauthd
-SASLAUTHD_PID=`pidof -o %PPID $SASLAUTHD_BIN`
-
-case "$1" in
- start)
- stat_busy "Starting $DAEMON_NAME"
- [ -z "$SASLAUTHD_PID" ] && $SASLAUTHD_BIN $SASLAUTHD_OPTS
- if [ $? -gt 0 ]; then
- stat_fail
- exit 1
- else
- echo `pidof -o %PPID $SASLAUTHD_BIN` > /var/run/$DAEMON_NAME.pid
- fi
- add_daemon $DAEMON_NAME
- stat_done
- ;;
-
- stop)
- stat_busy "Stopping $DAEMON_NAME"
- [ ! -z "$SASLAUTHD_PID" ] && kill $SASLAUTHD_PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- exit 1
- else
- rm /var/run/$DAEMON_NAME.pid &> /dev/null
- fi
- rm_daemon $DAEMON_NAME
- stat_done
- ;;
-
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
-
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
-exit 0
-# vim: ts=2 sw=2 et ft=sh
diff --git a/staging/cyrus-sasl/saslauthd.conf.d b/staging/cyrus-sasl/saslauthd.conf.d
deleted file mode 100644
index b42b5d0b0..000000000
--- a/staging/cyrus-sasl/saslauthd.conf.d
+++ /dev/null
@@ -1 +0,0 @@
-SASLAUTHD_OPTS="-m /var/run/saslauthd -a pam"
diff --git a/staging/dovecot/PKGBUILD b/staging/dovecot/PKGBUILD
deleted file mode 100644
index 7b9a0304c..000000000
--- a/staging/dovecot/PKGBUILD
+++ /dev/null
@@ -1,74 +0,0 @@
-# $Id: PKGBUILD 121982 2011-05-01 01:05:24Z stephane $
-# Contributor: Paul Mattal <paul@mattal.com>
-# Contributor: Federico Quagliata (quaqo) <quaqo@despammed.com>
-# Contributor: GARETTE Emmanuel <gnunux at laposte dot net>
-# Maintainer: Andreas Radke <andyrtr@archlinux.org>
-
-pkgname=dovecot
-pkgver=2.0.12
-pkgrel=2
-pkgdesc="An IMAP and POP3 server written with security primarily in mind"
-arch=('i686' 'x86_64')
-url="http://dovecot.org/"
-license=("LGPL")
-depends=('krb5' 'openssl' 'sqlite3>=3.7.5' 'libmysqlclient>=5.5.10'
- 'postgresql-libs>=9.0.3' 'bzip2' 'expat' 'curl')
-makedepends=('pam>=1.1.1' 'libcap>=2.19' 'libldap>=2.4.22')
-optdepends=('libldap: ldap plugin')
-provides=('imap-server' 'pop3-server')
-options=('!libtool')
-backup=(etc/dovecot/dovecot.conf
- etc/dovecot/conf.d/{10-auth,10-director,10-logging,10-mail,10-master,10-ssl}.conf
- etc/dovecot/conf.d/{15-lda,20-imap,20-lmtp,20-pop3}.conf
- etc/dovecot/conf.d/{90-acl,90-plugin,90-quota}.conf
- etc/dovecot/conf.d/auth-{checkpassword,deny,ldap,master,passwdfile,sql,static,system,vpopmail}.conf.ext
- etc/ssl/dovecot-openssl.cnf)
-install=$pkgname.install
-source=(http://dovecot.org/releases/2.0/${pkgname}-${pkgver}.tar.gz dovecot.sh)
-md5sums=('689e1a8863d4fb2fd252e1a6121dd181'
- 'd020d43eab4ded6fb379dadc570a9490')
-
-build() {
- cd ${srcdir}/$pkgname-$pkgver
-
- # configure with openssl, mysql, and postgresql support
- ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
- --libexecdir=/usr/lib --with-moduledir=/usr/lib/dovecot/modules \
- --disable-static \
- --with-nss \
- --with-pam \
- --with-mysql \
- --with-pgsql \
- --with-sqlite \
- --with-ssl=openssl --with-ssldir=/etc/dovecot/ssl \
- --with-gssapi \
- --with-ldap=plugin \
- --with-zlib --with-bzlib \
- --with-libcap \
- --with-solr \
- --with-docs
- make
-}
-
-package() {
- cd ${srcdir}/$pkgname-$pkgver
- make DESTDIR=${pkgdir} install
-
- # install the launch script
- install -D -m755 ${srcdir}/$pkgname.sh ${pkgdir}/etc/rc.d/$pkgname
-
- # install example conf files and ssl.conf
- install -d -m755 ${pkgdir}/etc/dovecot/conf.d
- install -m 644 ${pkgdir}/usr/share/doc/dovecot/example-config/conf.d/*.conf ${pkgdir}/etc/dovecot/conf.d
- install -m 644 ${pkgdir}/usr/share/doc/dovecot/example-config/conf.d/*.conf.ext ${pkgdir}/etc/dovecot/conf.d
- install -m 644 ${pkgdir}/usr/share/doc/dovecot/example-config/dovecot.conf ${pkgdir}/etc/dovecot/
- install -d -m755 ${pkgdir}/etc/ssl
- install -m 644 ${srcdir}/$pkgname-$pkgver/doc/dovecot-openssl.cnf ${pkgdir}/etc/ssl/
-
- rm ${pkgdir}/etc/dovecot/README
-
- # install dovecot userdir - https://bugs.archlinux.org/task/20533
- install -d -m755 ${pkgdir}/var/run/dovecot/{login,empty}
- chmod 755 ${pkgdir}/var/run/dovecot
- chmod 750 ${pkgdir}/var/run/dovecot/login
-}
diff --git a/staging/dovecot/dovecot.install b/staging/dovecot/dovecot.install
deleted file mode 100644
index 51d1509ee..000000000
--- a/staging/dovecot/dovecot.install
+++ /dev/null
@@ -1,57 +0,0 @@
-# arg 1: the new package version
-post_install() {
-
- # Make sure the group and user "dovecot"+"dovenull exists on this system and have the correct values
-
- # dovecot
- if grep -q "^dovecot:" /etc/group &> /dev/null ; then
- groupmod -g 76 -n dovecot dovecot &> /dev/null
- else
- groupadd -g 76 dovecot &> /dev/null
- fi
-
- if grep -q "^dovecot:" /etc/passwd 2> /dev/null ; then
- usermod -s /sbin/nologin -c "Dovecot user" -d /var/run/dovecot/login -u 76 -g dovecot dovecot &> /dev/null
- else
- useradd -s /sbin/nologin -c "Dovecot user" -d /var/run/dovecot/login -u 76 -g dovecot -m -r dovecot &> /dev/null
- fi
-
- # dovenull
- if grep -q "^dovenull:" /etc/group &> /dev/null ; then
- groupmod -g 74 -n dovenull dovenull &> /dev/null
- else
- groupadd -g 74 dovenull &> /dev/null
- fi
-
- if grep -q "^dovenull:" /etc/passwd 2> /dev/null ; then
- usermod -s /sbin/nologin -c "Dovecot user for completely untrustworthy processes" -d /var/run/dovecot/login -u 74 -g dovenull dovenull &> /dev/null
- else
- useradd -s /sbin/nologin -c "Dovecot user for completely untrustworthy processes" -d /var/run/dovecot/login -u 74 -g dovenull -m -r dovenull &> /dev/null
- fi
-
- # harden some permissions
- chgrp dovenull /var/run/dovecot/login
-}
-
-# arg 1: the new package version
-# arg 2: the old package version
-post_upgrade() {
- post_install $1
- if [ "`vercmp $2 2.0.0`" -lt 0 ]; then
- # important upgrade notice
- echo "> IMPORTANT DOVECOT 2.0 UPGRADE NOTICE"
- echo "> ------------------------------------"
- echo "> see http://wiki2.dovecot.org/Upgrading/2.0"
- echo "> make sure, you convert the dovecot.conf file"
- fi
-
-}
-
-# arg 1: the old package version
-pre_remove() {
- userdel dovecot &> /dev/null
- userdel dovenull &> /dev/null
- groupdel dovecot &> /dev/null || /bin/true
- groupdel dovenull &> /dev/null || /bin/true
- rm -rf /var/run/dovecot/ &> /dev/null || /bin/true
-}
diff --git a/staging/dovecot/dovecot.sh b/staging/dovecot/dovecot.sh
deleted file mode 100755
index b7555fe50..000000000
--- a/staging/dovecot/dovecot.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-
-PID=`pidof -o %PPID /usr/sbin/dovecot`
-case "$1" in
- start)
- stat_busy "Starting Dovecot"
- [ -z "$PID" ] && /usr/sbin/dovecot
- if [ $? -gt 0 ]; then
- stat_fail
- else
- add_daemon dovecot
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping Dovecot"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm_daemon dovecot
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 2
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
-exit 0
diff --git a/staging/evolution-data-server/PKGBUILD b/staging/evolution-data-server/PKGBUILD
deleted file mode 100644
index 195be21c6..000000000
--- a/staging/evolution-data-server/PKGBUILD
+++ /dev/null
@@ -1,29 +0,0 @@
-# $Id: PKGBUILD 122571 2011-05-04 16:49:38Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=evolution-data-server
-pkgver=3.0.1
-pkgrel=2
-pkgdesc="Central location for addressbook and calendar storage in the GNOME Desktop"
-arch=('i686' 'x86_64')
-depends=('libsoup' 'nss' 'libgnome-keyring' 'krb5' 'libgweather' 'libical' 'db' 'libgdata')
-makedepends=('intltool' 'gperf' 'gobject-introspection')
-options=('!libtool')
-url="http://www.gnome.org"
-license=('GPL')
-source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/3.0/${pkgname}-${pkgver}.tar.bz2)
-sha256sums=('8592be94027a9848ac18670a0e86e48e857539cd2813eb345eda0ace19688dd0')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure --prefix=/usr --sysconfdir=/etc \
- --localstatedir=/var --with-openldap=yes \
- --libexecdir=/usr/lib/evolution-data-server \
- --with-krb5=/usr --with-libdb=/usr
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/evolution-exchange/PKGBUILD b/staging/evolution-exchange/PKGBUILD
deleted file mode 100644
index 963b54508..000000000
--- a/staging/evolution-exchange/PKGBUILD
+++ /dev/null
@@ -1,33 +0,0 @@
-# $Id: PKGBUILD 122586 2011-05-04 18:25:32Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=evolution-exchange
-pkgver=3.0.1
-pkgrel=2
-pkgdesc="Ximian Connector Exchange plugin for Evolution"
-arch=('i686' 'x86_64')
-license=('GPL')
-url="http://www.ximian.com"
-depends=('evolution-data-server' 'gtkhtml4' 'gnome-desktop' 'libunique3')
-makedepends=('intltool' 'evolution')
-options=('!libtool' '!emptydirs')
-install=evolution-exchange.install
-source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/${pkgver%.*}/${pkgname}-${pkgver}.tar.bz2)
-sha256sums=('4c020f7946f534e3245806f29f48dbc9ccb98afdbc92d7bb5b14a5410668ad02')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
- --libexecdir=/usr/lib/evolution --disable-static \
- --with-libdb=/usr --with-krb5=/usr
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-
- install -m755 -d "${pkgdir}/usr/share/gconf/schemas"
- gconf-merge-schema "${pkgdir}/usr/share/gconf/schemas/${pkgname}.schemas" --domain evolution-exchange-3.0 ${pkgdir}/etc/gconf/schemas/*.schemas
- rm -f ${pkgdir}/etc/gconf/schemas/*.schemas
-}
diff --git a/staging/evolution-exchange/evolution-exchange.install b/staging/evolution-exchange/evolution-exchange.install
deleted file mode 100644
index 1179887f9..000000000
--- a/staging/evolution-exchange/evolution-exchange.install
+++ /dev/null
@@ -1,17 +0,0 @@
-pkgname=evolution-exchange
-
-post_install() {
- usr/sbin/gconfpkg --install ${pkgname}
-}
-
-pre_upgrade() {
- pre_remove $1
-}
-
-post_upgrade() {
- post_install $1
-}
-
-pre_remove() {
- usr/sbin/gconfpkg --uninstall ${pkgname}
-}
diff --git a/staging/gnome-control-center/PKGBUILD b/staging/gnome-control-center/PKGBUILD
deleted file mode 100644
index 1031b6b1d..000000000
--- a/staging/gnome-control-center/PKGBUILD
+++ /dev/null
@@ -1,34 +0,0 @@
-# $Id: PKGBUILD 122550 2011-05-04 14:36:21Z stephane $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=gnome-control-center
-pkgver=3.0.1.1
-pkgrel=2
-pkgdesc="The Control Center for GNOME"
-arch=('i686' 'x86_64')
-depends=('gtk3' 'gsettings-desktop-schemas' 'gnome-menus' 'gnome-desktop' 'gnome-settings-daemon' 'upower' 'libgtop' 'cups-pk-helper' 'accountsservice' 'sound-theme-freedesktop' 'krb5')
-optdepends=('mesa-demos: provides glxinfo for graphics information'
- 'apg: adds password generation for user accounts')
-makedepends=('gnome-doc-utils' 'intltool' 'networkmanager')
-url="http://www.gnome.org"
-groups=('gnome')
-install=gnome-control-center.install
-license=('GPL')
-options=('!libtool' '!emptydirs')
-source=(http://ftp.gnome.org/pub/GNOME/sources/${pkgname}/3.0/${pkgname}-${pkgver}.tar.bz2)
-sha256sums=('b191991d3932b363154e6cf2b5055bc711272065397daee2163b6fb04402ad79')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure --prefix=/usr --sysconfdir=/etc \
- --localstatedir=/var --disable-static \
- --disable-scrollkeeper --disable-update-mimedb
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
-
- make DESTDIR="${pkgdir}" install
-}
-
diff --git a/staging/gnome-control-center/gnome-control-center.install b/staging/gnome-control-center/gnome-control-center.install
deleted file mode 100644
index eb703319f..000000000
--- a/staging/gnome-control-center/gnome-control-center.install
+++ /dev/null
@@ -1,19 +0,0 @@
-pkgname=gnome-control-center
-
-post_install() {
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
-}
-
-pre_upgrade() {
- if [ -f usr/share/gconf/schemas/${pkgname}.schemas ]; then
- usr/sbin/gconfpkg --uninstall ${pkgname}
- fi
-}
-
-post_upgrade() {
- post_install $1
-}
-
-post_remove() {
- post_install $1
-}
diff --git a/staging/gnome-settings-daemon/PKGBUILD b/staging/gnome-settings-daemon/PKGBUILD
deleted file mode 100644
index 06f71588b..000000000
--- a/staging/gnome-settings-daemon/PKGBUILD
+++ /dev/null
@@ -1,33 +0,0 @@
-# $Id: PKGBUILD 122583 2011-05-04 18:20:31Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=gnome-settings-daemon
-pkgver=3.0.1
-pkgrel=2
-pkgdesc="The GNOME Settings daemon"
-arch=('i686' 'x86_64')
-license=('GPL')
-depends=('libgnomekbd' 'gnome-desktop' 'libnotify' 'hicolor-icon-theme'
- 'libcanberra-pulse' 'gsettings-desktop-schemas' 'nss' 'gconf'
- 'dconf' 'pulseaudio' 'pulseaudio-alsa')
-makedepends=('intltool' 'gtk-doc')
-options=('!emptydirs' '!libtool')
-install=gnome-settings-daemon.install
-url="http://www.gnome.org"
-groups=('gnome')
-replaces=(gnome-settings-daemon-pulse)
-conflicts=(gnome-settings-daemon-pulse)
-source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/${pkgver%.*}/${pkgname}-${pkgver}.tar.bz2)
-sha256sums=('51cdd0842b907e95c79d4e2b26f554e26fc626f7c2e6c3a14e3fc7954ca91117')
-
-build() {
- cd "$srcdir/$pkgname-$pkgver"
- ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
- --libexecdir=/usr/bin --disable-static --enable-pulse
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/gnome-settings-daemon/gnome-settings-daemon.install b/staging/gnome-settings-daemon/gnome-settings-daemon.install
deleted file mode 100644
index 483fb9450..000000000
--- a/staging/gnome-settings-daemon/gnome-settings-daemon.install
+++ /dev/null
@@ -1,18 +0,0 @@
-post_install() {
- usr/bin/glib-compile-schemas usr/share/glib-2.0/schemas
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
-}
-
-pre_upgrade() {
- if (( $(vercmp $2 2.90.0) < 0 )); then
- usr/sbin/gconfpkg --uninstall gnome-settings-daemon
- fi
-}
-
-post_upgrade() {
- post_install
-}
-
-post_remove() {
- post_install
-}
diff --git a/staging/gnome-vfs/PKGBUILD b/staging/gnome-vfs/PKGBUILD
deleted file mode 100644
index 3474b4c22..000000000
--- a/staging/gnome-vfs/PKGBUILD
+++ /dev/null
@@ -1,45 +0,0 @@
-# $Id: PKGBUILD 122580 2011-05-04 18:07:34Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=gnome-vfs
-pkgver=2.24.4
-pkgrel=3
-pkgdesc="The GNOME Virtual File System"
-arch=(i686 x86_64)
-license=('LGPL')
-depends=('fam' 'gconf' 'bzip2' 'avahi' 'smbclient' 'gnome-mime-data' 'krb5' 'gnutls')
-makedepends=('pkgconfig' 'intltool' 'gtk-doc' 'gnome-common')
-options=('!libtool' '!emptydirs')
-url="http://www.gnome.org"
-install=gnome-vfs.install
-source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/2.24/gnome-vfs-${pkgver}.tar.bz2
- gnutls-config.patch)
-sha256sums=('62de64b5b804eb04104ff98fcd6a8b7276d510a49fbd9c0feb568f8996444faa'
- '66c7cfb12995c0dd94a2caea95c7e3c55981993f05a79c585d60915ff131955d')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- #Fix build with new gnutls
- patch -Np1 -i "${srcdir}/gnutls-config.patch"
- libtoolize --force
- gtkdocize
- aclocal
- autoconf
- automake
- ./configure --prefix=/usr --sysconfdir=/etc \
- --localstatedir=/var --disable-static \
- --libexecdir=/usr/lib/gnome-vfs-2.0 \
- --enable-samba --disable-hal \
- --enable-avahi --disable-howl \
- --disable-openssl --enable-gnutls
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL=1 DESTDIR="${pkgdir}" install
-
- install -d -m755 "${pkgdir}/usr/share/gconf/schemas"
- gconf-merge-schema "${pkgdir}/usr/share/gconf/schemas/${pkgname}.schemas" --domain gnome-vfs-2.0 ${pkgdir}/etc/gconf/schemas/*.schemas
- rm -f ${pkgdir}/etc/gconf/schemas/*.schemas
-}
diff --git a/staging/gnome-vfs/gnome-vfs.install b/staging/gnome-vfs/gnome-vfs.install
deleted file mode 100644
index 793a3e7ab..000000000
--- a/staging/gnome-vfs/gnome-vfs.install
+++ /dev/null
@@ -1,17 +0,0 @@
-pkgname=gnome-vfs
-
-post_install() {
- usr/sbin/gconfpkg --install ${pkgname}
-}
-
-pre_upgrade() {
- pre_remove $1
-}
-
-post_upgrade() {
- post_install $1
-}
-
-pre_remove() {
- usr/sbin/gconfpkg --uninstall ${pkgname}
-}
diff --git a/staging/gnome-vfs/gnutls-config.patch b/staging/gnome-vfs/gnutls-config.patch
deleted file mode 100644
index f6fa18b17..000000000
--- a/staging/gnome-vfs/gnutls-config.patch
+++ /dev/null
@@ -1,25 +0,0 @@
-# Allow gnome-vfs-2.24 to build with gnutls >= 2.7.0
-# Use pkg-config in place of gnutls own macro since it's not present anymore.
---- a/configure.in 2009-03-07 19:59:53.805507753 +0100
-+++ b/configure.in 2009-03-07 18:34:36.928169018 +0100
-@@ -686,14 +686,14 @@
- AC_MSG_ERROR([*** Can't use both openssl and gnutls at the same time. Please pick one only. ***])
- else
- AC_CHECK_HEADER(gcrypt.h,, AC_MSG_ERROR([*** Need gcrypt.h to compile with GnuTLS support ***]))
-- AM_PATH_LIBGNUTLS(1.0.0, [AC_DEFINE(HAVE_GNUTLS, 1, [Define to 1 if GnuTLS is available])
-- have_ssl=true])
-+ PKG_CHECK_MODULES(LIBGNUTLS, gnutls >= 1.0.0, [AC_DEFINE(HAVE_GNUTLS, 1, [Define to 1 if GnuTLS is available])
-+ have_ssl=true])
-
- if test "x${LIBGNUTLS_LIBS}" = "x"; then
-- AM_PATH_LIBGNUTLS(0.5.1, [AC_DEFINE(HAVE_GNUTLS, 1, [Define to 1 if GnuTLS is available])
-- AC_DEFINE(GNUTLS_COMPAT, 1, [FIXME])
-- have_ssl=true],
-- AC_MSG_ERROR([Unable to find GNUTLS]))
-+ PKG_CHECK_MODULES(LIBGNUTLS, gnutls >= 0.5.1, [AC_DEFINE(HAVE_GNUTLS, 1, [Define to 1 if GnuTLS is available])
-+ AC_DEFINE(GNUTLS_COMPAT, 1, [FIXME])
-+ have_ssl=true],
-+ AC_MSG_ERROR([Unable to find GNUTLS]))
- fi
- fi
- fi
diff --git a/staging/gtk2/PKGBUILD b/staging/gtk2/PKGBUILD
deleted file mode 100644
index e67d63520..000000000
--- a/staging/gtk2/PKGBUILD
+++ /dev/null
@@ -1,50 +0,0 @@
-# $Id: PKGBUILD 122323 2011-05-03 20:18:27Z stephane $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgbase=gtk2
-pkgname=('gtk2' 'gtk-update-icon-cache')
-pkgver=2.24.4
-pkgrel=2
-arch=('i686' 'x86_64')
-url="http://www.gtk.org/"
-makedepends=('atk' 'pango' 'libxcursor' 'libxinerama' 'libxrandr' 'libxi' 'libxcomposite' 'libxdamage' 'krb5' 'gnutls'
- 'shared-mime-info' 'cairo' 'libcups' 'gdk-pixbuf2' 'gobject-introspection')
-options=('!libtool' '!docs')
-license=('LGPL')
-source=(http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-${pkgver}.tar.bz2
- xid-collision-debug.patch)
-sha256sums=('7d3033ad83647079977466d3e8f1a7533f47abd5cc693f01b8797ff43dd407a5'
- 'd758bb93e59df15a4ea7732cf984d1c3c19dff67c94b957575efea132b8fe558')
-
-build() {
- cd "${srcdir}/gtk+-${pkgver}"
- patch -Np1 -i "${srcdir}/xid-collision-debug.patch"
-
- CXX=/bin/false ./configure --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --with-xinput=yes
- make
-}
-package_gtk2() {
- pkgdesc="The GTK+ Toolkit (v2)"
- install=gtk2.install
- depends=('atk' 'pango' 'libxcursor' 'libxinerama' 'libxrandr' 'libxi' 'libxcomposite' 'libxdamage' 'krb5' 'gnutls' 'shared-mime-info' 'cairo' 'libcups' 'gtk-update-icon-cache')
- backup=(etc/gtk-2.0/gtkrc)
-
- cd "${srcdir}/gtk+-${pkgver}"
-
- make DESTDIR="${pkgdir}" install
- sed -i "s#env python#env python2#" $pkgdir/usr/bin/gtk-builder-convert
- echo 'gtk-fallback-icon-theme = "gnome"' > "${pkgdir}/etc/gtk-2.0/gtkrc"
- #split this out to use with gtk3 too
- rm ${pkgdir}/usr/bin/gtk-update-icon-cache
-}
-package_gtk-update-icon-cache() {
- pkgdesc="The GTK+ update icon cache tool"
- depends=('gdk-pixbuf2')
-
- cd "${srcdir}/gtk+-${pkgver}/gtk"
-
- install -D -m755 gtk-update-icon-cache ${pkgdir}/usr/bin/gtk-update-icon-cache
-}
diff --git a/staging/gtk2/gtk2.install b/staging/gtk2/gtk2.install
deleted file mode 100644
index 4e2b72f1b..000000000
--- a/staging/gtk2/gtk2.install
+++ /dev/null
@@ -1,16 +0,0 @@
-post_install() {
- usr/bin/gtk-query-immodules-2.0 > etc/gtk-2.0/gtk.immodules
-}
-
-pre_upgrade() {
- pre_remove
-}
-
-post_upgrade() {
- post_install
-}
-
-pre_remove() {
- rm -f etc/gtk-2.0/gtk.immodules &>/dev/null
- rm -f etc/gtk-2.0/gdk-pixbuf.loaders &>/dev/null
-}
diff --git a/staging/gtk2/xid-collision-debug.patch b/staging/gtk2/xid-collision-debug.patch
deleted file mode 100644
index d61238c3b..000000000
--- a/staging/gtk2/xid-collision-debug.patch
+++ /dev/null
@@ -1,15 +0,0 @@
---- gtk+-2.18.3/gdk/x11/gdkxid.c 2009-06-19 04:59:18.000000000 +0200
-+++ gtk+-2.18.3/gdk/x11/gdkxid.c.new 2009-07-22 11:30:12.000000000 +0200
-@@ -56,10 +56,10 @@
- if (!display_x11->xid_ht)
- display_x11->xid_ht = g_hash_table_new ((GHashFunc) gdk_xid_hash,
- (GEqualFunc) gdk_xid_equal);
--
-+/*
- if (g_hash_table_lookup (display_x11->xid_ht, xid))
- g_warning ("XID collision, trouble ahead");
--
-+*/
- g_hash_table_insert (display_x11->xid_ht, xid, data);
- }
-
diff --git a/staging/gtk3/PKGBUILD b/staging/gtk3/PKGBUILD
deleted file mode 100644
index 5550647fc..000000000
--- a/staging/gtk3/PKGBUILD
+++ /dev/null
@@ -1,36 +0,0 @@
-# $Id: PKGBUILD 122547 2011-05-04 14:30:27Z stephane $
-# Maintainer: Ionut Biru <ibiru@archlinux.org>
-
-pkgname=gtk3
-pkgver=3.0.9
-pkgrel=2
-pkgdesc="The GTK+ Toolkit (v3)"
-arch=('i686' 'x86_64')
-url="http://www.gtk.org/"
-install=gtk3.install
-depends=('atk' 'cairo' 'gtk-update-icon-cache' 'gnutls' 'krb5' 'libcups' 'libxcursor' 'libxinerama' 'libxrandr' 'libxi' 'libxcomposite' 'libxdamage' 'pango' 'shared-mime-info')
-makedepends=('gobject-introspection')
-options=('!libtool' '!docs')
-backup=(etc/gtk-3.0/settings.ini)
-license=('LGPL')
-source=(http://ftp.gnome.org/pub/gnome/sources/gtk+/3.0/gtk+-${pkgver}.tar.bz2
- settings.ini)
-sha256sums=('88a9dda6f2a23155ac3d7aca6b414ca3b55e2817b46bcc87733f1a407e16678c'
- 'c214d3dcdcadda3d642112287524ab3e526ad592b70895c9f3e3733c23701621')
-
-build() {
- cd "${srcdir}/gtk+-${pkgver}"
- CXX=/bin/false ./configure --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --enable-gtk2-dependency \
- --disable-schemas-compile
- make
-}
-
-package() {
- cd "${srcdir}/gtk+-${pkgver}"
- make DESTDIR="${pkgdir}" install
-
- install -Dm644 "${srcdir}/settings.ini" "${pkgdir}/etc/gtk-3.0/settings.ini"
-}
diff --git a/staging/gtk3/gtk3.install b/staging/gtk3/gtk3.install
deleted file mode 100644
index cd8965d1f..000000000
--- a/staging/gtk3/gtk3.install
+++ /dev/null
@@ -1,15 +0,0 @@
-post_install() {
- usr/bin/gtk-query-immodules-3.0 --update-cache
- usr/bin/glib-compile-schemas usr/share/glib-2.0/schemas
-}
-
-post_upgrade() {
- post_install
-}
-
-pre_remove() {
- rm -f usr/lib/gtk-3.0/3.0.0/immodules.cache
-}
-post_remove() {
- usr/bin/glib-compile-schemas usr/share/glib-2.0/schemas
-}
diff --git a/staging/gtk3/settings.ini b/staging/gtk3/settings.ini
deleted file mode 100644
index 039000d38..000000000
--- a/staging/gtk3/settings.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[Settings]
-gtk-fallback-icon-theme = gnome
diff --git a/staging/kdelibs/PKGBUILD b/staging/kdelibs/PKGBUILD
deleted file mode 100644
index 0f4a867d4..000000000
--- a/staging/kdelibs/PKGBUILD
+++ /dev/null
@@ -1,62 +0,0 @@
-# $Id: PKGBUILD 122843 2011-05-06 11:48:02Z andrea $
-# Maintainer: Andrea Scarpino <andrea@archlinux.org
-# Contributor: Pierre Schmitz <pierre@archlinux.de>
-
-pkgname=kdelibs
-pkgver=4.6.3
-pkgrel=2
-pkgdesc="KDE Core Libraries"
-arch=('i686' 'x86_64')
-url='http://www.kde.org'
-license=('GPL' 'LGPL' 'FDL')
-depends=('strigi' 'attica' 'libxss' 'xz' 'openssl' 'soprano' 'krb5'
- 'shared-desktop-ontologies' 'qca' 'libdbusmenu-qt' 'polkit-qt' 'grantlee'
- 'shared-mime-info' 'enchant' 'giflib' 'jasper' 'openexr' 'xdg-utils'
- 'phonon' 'hicolor-icon-theme' 'upower' 'udisks' 'libxcursor'
- 'docbook-xsl')
-makedepends=('pkgconfig' 'cmake' 'automoc4' 'intltool' 'avahi' 'libgl'
- 'hspell')
-replaces=('arts' 'kdelibs-experimental')
-install='kdelibs.install'
-source=("http://download.kde.org/stable/${pkgver}/src/${pkgname}-${pkgver}.tar.bz2"
- 'kde-applications-menu.patch' 'archlinux-menu.patch' 'abs-syntax-highlight.patch')
-sha1sums=('c7fb089c9d52a6b1d9188b9e788753373a3288e4'
- '86ee8c8660f19de8141ac99cd6943964d97a1ed7'
- '63a850ab4196b9d06934f2b4a13acd9f7739bc67'
- 'd994f262356af5b9e4e9619646e471bd98c91efb')
-
-build() {
- cd ${srcdir}/${pkgname}-${pkgver}
-
- # avoid file conflict with gnome-menu
- patch -p1 -i $srcdir/kde-applications-menu.patch
- # add Archlinux menu entry
- patch -p1 -i $srcdir/archlinux-menu.patch
- # add syntax highlightning for PKGBUILD and .install files
- patch -p1 -i $srcdir/abs-syntax-highlight.patch
-
- cd ${srcdir}
- mkdir build
- cd build
- cmake ../${pkgname}-${pkgver} \
- -DCMAKE_BUILD_TYPE=Release \
- -DCMAKE_SKIP_RPATH=ON \
- -DKDE_DISTRIBUTION_TEXT='Arch Linux' \
- -DCMAKE_INSTALL_PREFIX=/usr \
- -DSYSCONF_INSTALL_DIR=/etc \
- -DHTML_INSTALL_DIR=/usr/share/doc/kde/html \
- -DKDE_DEFAULT_HOME='.kde4' \
- -DWITH_FAM=OFF \
- -DKAUTH_BACKEND=PolkitQt-1
- make
-}
-
-package() {
- cd $srcdir/build
- make DESTDIR=$pkgdir install
-
- # cert bundle seems to be hardcoded
- # link it to the one from ca-certificates
- rm -f $pkgdir/usr/share/apps/kssl/ca-bundle.crt
- ln -sf /etc/ssl/certs/ca-certificates.crt $pkgdir/usr/share/apps/kssl/ca-bundle.crt
-}
diff --git a/staging/kdelibs/abs-syntax-highlight.patch b/staging/kdelibs/abs-syntax-highlight.patch
deleted file mode 100644
index 477479a9b..000000000
--- a/staging/kdelibs/abs-syntax-highlight.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff -Nura kdelibs-4.3.0.orig/kate/syntax/data/bash.xml kdelibs-4.3.0/kate/syntax/data/bash.xml
---- kdelibs-4.3.0.orig/kate/syntax/data/bash.xml 2009-04-15 12:26:37.000000000 +0200
-+++ kdelibs-4.3.0/kate/syntax/data/bash.xml 2009-07-30 13:24:01.000000000 +0200
-@@ -8,7 +8,7 @@
- <!ENTITY noword "(?![\w$+-])"> <!-- no word, $, + or - following -->
- <!ENTITY pathpart "([\w_@.&#37;*?+-]|\\ )"> <!-- valid character in a file name -->
- ]>
--<language name="Bash" version="2.12" kateversion="2.4" section="Scripts" extensions="*.sh;*.bash;*.ebuild;*.eclass;.bashrc;.bash_profile;.bash_login;.profile" mimetype="application/x-shellscript" casesensitive="1" author="Wilbert Berendsen (wilbert@kde.nl)" license="LGPL">
-+<language name="Bash" version="2.12" kateversion="2.4" section="Scripts" extensions="*.sh;*.bash;*.ebuild;*.eclass;.bashrc;.bash_profile;.bash_login;.profile;PKGBUILD;*.install" mimetype="application/x-shellscript" casesensitive="1" author="Wilbert Berendsen (wilbert@kde.nl)" license="LGPL">
-
- <!-- (c) 2004 by Wilbert Berendsen (wilbert@kde.nl)
- Changes by Matthew Woehlke (mw_triad@users.sourceforge.net)
diff --git a/staging/kdelibs/archlinux-menu.patch b/staging/kdelibs/archlinux-menu.patch
deleted file mode 100644
index 546784fa2..000000000
--- a/staging/kdelibs/archlinux-menu.patch
+++ /dev/null
@@ -1,22 +0,0 @@
---- kdelibs-4.3.98/kded/applications.menu 2010-01-31 19:28:11.000000000 +0000
-+++ kdelibs-4.3.98/kded/applications.menu 2010-01-31 22:25:53.556043077 +0000
-@@ -16,11 +16,19 @@
- </DefaultLayout>
- <Layout>
- <Merge type="menus"/>
-+ <Menuname>Arch Linux</Menuname>
- <Menuname>Applications</Menuname>
- <Merge type="files"/>
- </Layout>
-
- <Menu>
-+ <Name>Arch Linux</Name>
-+ <Directory>Archlinux.directory</Directory>
-+ <Include>
-+ <Category>Archlinux</Category>
-+ </Include>
-+ </Menu>
-+ <Menu>
- <Name>Applications</Name>
- <Directory>kde-unknown.directory</Directory>
- <OnlyUnallocated/>
diff --git a/staging/kdelibs/kde-applications-menu.patch b/staging/kdelibs/kde-applications-menu.patch
deleted file mode 100644
index 4b513298a..000000000
--- a/staging/kdelibs/kde-applications-menu.patch
+++ /dev/null
@@ -1,22 +0,0 @@
---- kdelibs-4.3.98/kded/CMakeLists.txt 2009-10-02 14:55:17.000000000 +0000
-+++ kdelibs-4.3.98/kded/CMakeLists.txt 2010-01-31 22:16:13.946933892 +0000
-@@ -69,7 +69,7 @@
- if (WIN32)
- install( FILES applications.menu DESTINATION ${SHARE_INSTALL_PREFIX}/xdg/menus )
- else (WIN32)
--install( FILES applications.menu DESTINATION ${SYSCONF_INSTALL_DIR}/xdg/menus )
-+install( FILES applications.menu DESTINATION ${SYSCONF_INSTALL_DIR}/xdg/menus RENAME kde-applications.menu )
- endif (WIN32)
- install( FILES kdedmodule.desktop DESTINATION ${SERVICETYPES_INSTALL_DIR} )
- install( FILES kded.upd DESTINATION ${DATA_INSTALL_DIR}/kconf_update )
---- kdelibs-4.3.98/kded/kbuildsycoca.cpp 2009-12-04 23:10:18.000000000 +0000
-+++ kdelibs-4.3.98/kded/kbuildsycoca.cpp 2010-01-31 22:16:13.962766572 +0000
-@@ -302,7 +302,7 @@
- if (!m_trackId.isEmpty())
- g_vfolder->setTrackId(m_trackId);
-
-- VFolderMenu::SubMenu *kdeMenu = g_vfolder->parseMenu("applications.menu", true);
-+ VFolderMenu::SubMenu *kdeMenu = g_vfolder->parseMenu("kde-applications.menu", true);
-
- KServiceGroup::Ptr entry = g_bsgf->addNew("/", kdeMenu->directoryFile, KServiceGroup::Ptr(), false);
- entry->setLayoutInfo(kdeMenu->layoutList);
diff --git a/staging/kdelibs/kdelibs.install b/staging/kdelibs/kdelibs.install
deleted file mode 100644
index ce5c32e1b..000000000
--- a/staging/kdelibs/kdelibs.install
+++ /dev/null
@@ -1,12 +0,0 @@
-post_install() {
- xdg-icon-resource forceupdate --theme hicolor &> /dev/null
- update-mime-database usr/share/mime &> /dev/null
-}
-
-post_upgrade() {
- post_install
-}
-
-post_remove() {
- post_install
-} \ No newline at end of file
diff --git a/staging/krb5/CVE-2010-4022.patch b/staging/krb5/CVE-2010-4022.patch
deleted file mode 100644
index 30ebf9638..000000000
--- a/staging/krb5/CVE-2010-4022.patch
+++ /dev/null
@@ -1,19 +0,0 @@
-diff -up krb5/src/slave/kpropd.c krb5/src/slave/kpropd.c
---- krb5/src/slave/kpropd.c 2010-12-17 11:14:26.000000000 -0500
-+++ krb5/src/slave/kpropd.c 2010-12-17 11:41:19.000000000 -0500
-@@ -404,11 +404,11 @@ retry:
- }
-
- close(s);
-- if (iproprole == IPROP_SLAVE)
-+ if (iproprole == IPROP_SLAVE) {
- close(finet);
--
-- if ((ret = WEXITSTATUS(status)) != 0)
-- return (ret);
-+ if ((ret = WEXITSTATUS(status)) != 0)
-+ return (ret);
-+ }
- }
- if (iproprole == IPROP_SLAVE)
- break;
diff --git a/staging/krb5/CVE-2011-0281.0282.0283.patch b/staging/krb5/CVE-2011-0281.0282.0283.patch
deleted file mode 100644
index e4623e910..000000000
--- a/staging/krb5/CVE-2011-0281.0282.0283.patch
+++ /dev/null
@@ -1,126 +0,0 @@
-diff --git a/src/kdc/dispatch.c b/src/kdc/dispatch.c
-index 63ff3b3..b4a90bb 100644
---- a/src/kdc/dispatch.c
-+++ b/src/kdc/dispatch.c
-@@ -115,7 +115,8 @@ dispatch(void *cb, struct sockaddr *local_saddr, const krb5_fulladdr *from,
- kdc_insert_lookaside(pkt, *response);
- #endif
-
-- if (is_tcp == 0 && (*response)->length > max_dgram_reply_size) {
-+ if (is_tcp == 0 && *response != NULL &&
-+ (*response)->length > max_dgram_reply_size) {
- too_big_for_udp:
- krb5_free_data(kdc_context, *response);
- retval = make_too_big_error(response);
-diff --git a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
-index d677bb2..a356907 100644
---- a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
-+++ b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
-@@ -102,14 +102,18 @@ extern void prepend_err_str (krb5_context ctx, const char *s, krb5_error_code er
- #define LDAP_SEARCH(base, scope, filter, attrs) LDAP_SEARCH_1(base, scope, filter, attrs, CHECK_STATUS)
-
- #define LDAP_SEARCH_1(base, scope, filter, attrs, status_check) \
-- do { \
-- st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, NULL, NULL, &timelimit, LDAP_NO_LIMIT, &result); \
-- if (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR) { \
-- tempst = krb5_ldap_rebind(ldap_context, &ldap_server_handle); \
-- if (ldap_server_handle) \
-- ld = ldap_server_handle->ldap_handle; \
-- } \
-- }while (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR && tempst == 0); \
-+ tempst = 0; \
-+ st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, NULL, \
-+ NULL, &timelimit, LDAP_NO_LIMIT, &result); \
-+ if (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR) { \
-+ tempst = krb5_ldap_rebind(ldap_context, &ldap_server_handle); \
-+ if (ldap_server_handle) \
-+ ld = ldap_server_handle->ldap_handle; \
-+ if (tempst == 0) \
-+ st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, \
-+ NULL, NULL, &timelimit, \
-+ LDAP_NO_LIMIT, &result); \
-+ } \
- \
- if (status_check != IGNORE_STATUS) { \
- if (tempst != 0) { \
-diff --git a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
-index 82b0333..84e80ee 100644
---- a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
-+++ b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
-@@ -302,6 +302,7 @@ krb5_ldap_rebind(krb5_ldap_context *ldap_context,
- {
- krb5_ldap_server_handle *handle = *ldap_server_handle;
-
-+ ldap_unbind_ext_s(handle->ldap_handle, NULL, NULL);
- if ((ldap_initialize(&handle->ldap_handle, handle->server_info->server_name) != LDAP_SUCCESS)
- || (krb5_ldap_bind(ldap_context, handle) != LDAP_SUCCESS))
- return krb5_ldap_request_next_handle_from_pool(ldap_context, ldap_server_handle);
-diff --git a/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c b/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
-index 86fa4d1..0f49c86 100644
---- a/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
-+++ b/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
-@@ -487,12 +487,11 @@ is_principal_in_realm(krb5_ldap_context *ldap_context,
- * portion, then the first portion of the principal name SHOULD be
- * "krbtgt". All this check is done in the immediate block.
- */
-- if (searchfor->length == 2)
-- if ((strncasecmp(searchfor->data[0].data, "krbtgt",
-- FIND_MAX(searchfor->data[0].length, strlen("krbtgt"))) == 0) &&
-- (strncasecmp(searchfor->data[1].data, defrealm,
-- FIND_MAX(searchfor->data[1].length, defrealmlen)) == 0))
-+ if (searchfor->length == 2) {
-+ if (data_eq_string(searchfor->data[0], "krbtgt") &&
-+ data_eq_string(searchfor->data[1], defrealm))
- return 0;
-+ }
-
- /* first check the length, if they are not equal, then they are not same */
- if (strlen(defrealm) != searchfor->realm.length)
-diff --git a/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c b/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
-index 140db1a..552e39a 100644
---- a/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
-+++ b/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
-@@ -78,10 +78,10 @@ krb5_error_code
- krb5_ldap_get_principal(krb5_context context, krb5_const_principal searchfor,
- unsigned int flags, krb5_db_entry **entry_ptr)
- {
-- char *user=NULL, *filter=NULL, **subtree=NULL;
-+ char *user=NULL, *filter=NULL, *filtuser=NULL;
- unsigned int tree=0, ntrees=1, princlen=0;
- krb5_error_code tempst=0, st=0;
-- char **values=NULL, *cname=NULL;
-+ char **values=NULL, **subtree=NULL, *cname=NULL;
- LDAP *ld=NULL;
- LDAPMessage *result=NULL, *ent=NULL;
- krb5_ldap_context *ldap_context=NULL;
-@@ -115,12 +115,18 @@ krb5_ldap_get_principal(krb5_context context, krb5_const_principal searchfor,
- if ((st=krb5_ldap_unparse_principal_name(user)) != 0)
- goto cleanup;
-
-- princlen = strlen(FILTER) + strlen(user) + 2 + 1; /* 2 for closing brackets */
-+ filtuser = ldap_filter_correct(user);
-+ if (filtuser == NULL) {
-+ st = ENOMEM;
-+ goto cleanup;
-+ }
-+
-+ princlen = strlen(FILTER) + strlen(filtuser) + 2 + 1; /* 2 for closing brackets */
- if ((filter = malloc(princlen)) == NULL) {
- st = ENOMEM;
- goto cleanup;
- }
-- snprintf(filter, princlen, FILTER"%s))", user);
-+ snprintf(filter, princlen, FILTER"%s))", filtuser);
-
- if ((st = krb5_get_subtree_info(ldap_context, &subtree, &ntrees)) != 0)
- goto cleanup;
-@@ -207,6 +213,9 @@ cleanup:
- if (user)
- free(user);
-
-+ if (filtuser)
-+ free(filtuser);
-+
- if (cname)
- free(cname);
-
diff --git a/staging/krb5/CVE-2011-0284.patch b/staging/krb5/CVE-2011-0284.patch
deleted file mode 100644
index c97727568..000000000
--- a/staging/krb5/CVE-2011-0284.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/src/kdc/do_as_req.c b/src/kdc/do_as_req.c
-index 46b5fa1..464cb6e 100644
---- a/src/kdc/do_as_req.c
-+++ b/src/kdc/do_as_req.c
-@@ -741,6 +741,8 @@ prepare_error_as (struct kdc_request_state *rstate, krb5_kdc_req *request,
- pad->contents = td[size]->data;
- pad->length = td[size]->length;
- pa[size] = pad;
-+ td[size]->data = NULL;
-+ td[size]->length = 0;
- }
- krb5_free_typed_data(kdc_context, td);
- }
diff --git a/staging/krb5/CVE-2011-0285.patch b/staging/krb5/CVE-2011-0285.patch
deleted file mode 100644
index 61039113f..000000000
--- a/staging/krb5/CVE-2011-0285.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-diff --git a/src/kadmin/server/schpw.c b/src/kadmin/server/schpw.c
-index 1124445..0056885 100644
---- a/src/kadmin/server/schpw.c
-+++ b/src/kadmin/server/schpw.c
-@@ -52,6 +52,7 @@ process_chpw_request(context, server_handle, realm, keytab,
-
- ret = 0;
- rep->length = 0;
-+ rep->data = NULL;
-
- auth_context = NULL;
- changepw = NULL;
-@@ -76,8 +77,13 @@ process_chpw_request(context, server_handle, realm, keytab,
- plen = (*ptr++ & 0xff);
- plen = (plen<<8) | (*ptr++ & 0xff);
-
-- if (plen != req->length)
-- return(KRB5KRB_AP_ERR_MODIFIED);
-+ if (plen != req->length) {
-+ ret = KRB5KRB_AP_ERR_MODIFIED;
-+ numresult = KRB5_KPASSWD_MALFORMED;
-+ strlcpy(strresult, "Request length was inconsistent",
-+ sizeof(strresult));
-+ goto chpwfail;
-+ }
-
- /* verify version number */
-
-@@ -531,6 +537,10 @@ cleanup:
- if (local_kaddrs != NULL)
- krb5_free_addresses(server_handle->context, local_kaddrs);
-
-+ if ((*response)->data == NULL) {
-+ free(*response);
-+ *response = NULL;
-+ }
- krb5_kt_close(server_handle->context, kt);
-
- return ret;
diff --git a/staging/krb5/PKGBUILD b/staging/krb5/PKGBUILD
deleted file mode 100644
index 4e3dd2c4f..000000000
--- a/staging/krb5/PKGBUILD
+++ /dev/null
@@ -1,64 +0,0 @@
-# $Id: PKGBUILD 122855 2011-05-06 19:36:38Z stephane $
-# Maintainer: Stéphane Gaudreault <stephane@archlinux.org>
-
-pkgname=krb5
-pkgver=1.9.1
-pkgrel=1
-pkgdesc="The Kerberos network authentication system"
-arch=('i686' 'x86_64')
-url="http://web.mit.edu/kerberos/"
-license=('custom')
-depends=('e2fsprogs' 'libldap' 'keyutils')
-makedepends=('perl')
-provides=('heimdal')
-replaces=('heimdal')
-conflicts=('heimdal')
-backup=('etc/krb5/krb5.conf' 'etc/krb5/kdc.conf')
-source=(http://web.mit.edu/kerberos/dist/${pkgname}/1.9/${pkgname}-${pkgver}-signed.tar
- kadmind.rc
- krb5-kdc.rc)
-sha1sums=('e23a1795a237521493da9cf3443ac8b98a90c066'
- '640e3046c6558313d2be81cf2252afc8622892b0'
- '77d2312ecd8bf12a6e72cc8fd871a8ac93b23393')
-options=('!emptydirs')
-
-build() {
- tar zxvf ${pkgname}-${pkgver}.tar.gz
- cd "${srcdir}/${pkgname}-${pkgver}/src"
-
- export CFLAGS+=" -fPIC -fno-strict-aliasing -fstack-protector-all"
- export CPPFLAGS+=" -I/usr/include/et"
- ./configure --prefix=/usr \
- --sysconfdir=/etc/krb5 \
- --mandir=/usr/share/man \
- --localstatedir=/var/lib \
- --enable-shared \
- --with-system-et \
- --with-system-ss \
- --disable-rpath \
- --without-tcl \
- --enable-dns-for-realm \
- --with-ldap
-
- make
-}
-
-check() {
- # We can't do this in the build directory.
- cd "${srcdir}/${pkgname}-${pkgver}"
- make -C src check
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}/src"
- make DESTDIR="${pkgdir}" EXAMPLEDIR="/usr/share/doc/${pkgname}/examples" install
-
- install -D -m 644 config-files/kdc.conf "${pkgdir}"/etc/krb5/kdc.conf
- install -D -m 644 config-files/krb5.conf "${pkgdir}"/etc/krb5/krb5.conf
-
- install -d -m 755 "${pkgdir}"/etc/rc.d
- install -m 755 ../../krb5-kdc.rc "${pkgdir}"/etc/rc.d
- install -m 755 ../../kadmind.rc "${pkgdir}"/etc/rc.d
-
- install -Dm644 "${srcdir}"/${pkgname}-${pkgver}/NOTICE "${pkgdir}"/usr/share/licenses/${pkgname}/LICENSE
-}
diff --git a/staging/krb5/kadmind.rc b/staging/krb5/kadmind.rc
deleted file mode 100644
index 45835e35b..000000000
--- a/staging/krb5/kadmind.rc
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-# general config
-. /etc/rc.conf
-. /etc/rc.d/functions
-
-PID=`pidof -o %PPID /usr/sbin/kadmind`
-case "$1" in
- start)
- stat_busy "Starting Kerberos Admin Daemon"
- if [ -z "$PID" ]; then
- /usr/sbin/kadmind
- fi
- if [ ! -z "$PID" -o $? -gt 0 ]; then
- stat_fail
- else
- add_daemon kadmind
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping Kerberos Admin Daemon"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm_daemon kadmind
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
- ;;
-esac
-exit 0
diff --git a/staging/krb5/krb5-kdc.rc b/staging/krb5/krb5-kdc.rc
deleted file mode 100644
index 05a03411e..000000000
--- a/staging/krb5/krb5-kdc.rc
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-# general config
-. /etc/rc.conf
-. /etc/rc.d/functions
-
-PID=`pidof -o %PPID /usr/sbin/krb5kdc`
-case "$1" in
- start)
- stat_busy "Starting Kerberos Authentication"
- if [ -z "$PID" ]; then
- /usr/sbin/krb5kdc
- fi
- if [ ! -z "$PID" -o $? -gt 0 ]; then
- stat_fail
- else
- add_daemon krb5-kdc
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping Kerberos Authentication"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm_daemon krb5-kdc
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
- ;;
-esac
-exit 0
diff --git a/staging/libgnomecups/PKGBUILD b/staging/libgnomecups/PKGBUILD
deleted file mode 100644
index 157e18f50..000000000
--- a/staging/libgnomecups/PKGBUILD
+++ /dev/null
@@ -1,31 +0,0 @@
-# $Id: PKGBUILD 122592 2011-05-04 18:48:08Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-# Contributor: Paulius Palevicius <paulius@birzai.com>
-
-pkgname=libgnomecups
-pkgver=0.2.3
-pkgrel=8
-pkgdesc="GNOME cups library"
-arch=(i686 x86_64)
-license=('LGPL' 'GPL')
-url="http://www.gnome.org"
-depends=('libcups' 'glib2' 'krb5' 'gnutls')
-makedepends=('perlxml')
-options=(!libtool)
-source=(http://ftp.gnome.org/pub/GNOME/sources/${pkgname}/0.2/${pkgname}-${pkgver}.tar.bz2
- libgnomecups_0.2.3-ignore-ipp-not-found.patch)
-md5sums=('dc4920c15c9f886f73ea74fbff0ae48b'
- '973a1b9d93013ce431400a14b78f5d94')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- # This avoids generating huge 'IPP request failed with status 1030' lines
- patch -Np1 -i ${srcdir}/libgnomecups_0.2.3-ignore-ipp-not-found.patch
- ./configure --prefix=/usr
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/libgnomecups/libgnomecups_0.2.3-ignore-ipp-not-found.patch b/staging/libgnomecups/libgnomecups_0.2.3-ignore-ipp-not-found.patch
deleted file mode 100644
index 797367bf9..000000000
--- a/staging/libgnomecups/libgnomecups_0.2.3-ignore-ipp-not-found.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- a/libgnomecups/gnome-cups-request.c 2007-01-31 10:49:17.000000000 -0800
-+++ b/libgnomecups/gnome-cups-request.c 2009-11-24 12:49:30.000000000 -0800
-@@ -349,7 +349,7 @@
- if (request->response == NULL)
- status = IPP_INTERNAL_ERROR;
-
-- if (status > IPP_OK_CONFLICT) {
-+ if (status > IPP_OK_CONFLICT && status != IPP_NOT_FOUND) {
- g_warning ("IPP request failed with status %d", status);
- if (request->error != NULL)
- *(request->error) = g_error_new (GNOME_CUPS_ERROR,
diff --git a/staging/libgnomeprint/PKGBUILD b/staging/libgnomeprint/PKGBUILD
deleted file mode 100644
index db2bfedcd..000000000
--- a/staging/libgnomeprint/PKGBUILD
+++ /dev/null
@@ -1,28 +0,0 @@
-# $Id: PKGBUILD 122595 2011-05-04 18:55:04Z ibiru $
-# Maintainer: Jan de Groot <jgc@archlinux.org>
-
-pkgname=libgnomeprint
-pkgver=2.18.8
-pkgrel=2
-pkgdesc="Printing routines for GNOME"
-arch=(i686 x86_64)
-license=('LGPL' 'GPL')
-depends=('pango' 'libart-lgpl' 'libxml2' 'libgnomecups' 'krb5' 'gnutls')
-makedepends=('intltool' 'pkgconfig')
-replaces=('libgnomeprint-cups')
-conflicts=('libgnomeprint-cups')
-url="http://www.gnome.org"
-options=('!libtool')
-source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/2.18/${pkgname}-${pkgver}.tar.bz2)
-sha256sums=('1034ec8651051f84d2424e7a1da61c530422cc20ce5b2d9e107e1e46778d9691')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure --prefix=/usr --disable-static
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/librpcsecgss/PKGBUILD b/staging/librpcsecgss/PKGBUILD
deleted file mode 100644
index c16ea175b..000000000
--- a/staging/librpcsecgss/PKGBUILD
+++ /dev/null
@@ -1,27 +0,0 @@
-# $Id: PKGBUILD 121187 2011-04-29 07:12:44Z stephane $
-# Maintainer: Tobias Powalowski <tpowa@archlinux.org>
-# Contributor: Andrew Krawchyk <krawch_a@denison.edu>, Marco Lima <cipparello gmail com>
-
-pkgname=librpcsecgss
-pkgver=0.19
-pkgrel=5
-pkgdesc="Library for RPCSECGSS support"
-arch=('i686' 'x86_64')
-url="http://www.citi.umich.edu/projects/nfsv4/linux/"
-license=('GPL')
-depends=('glibc' 'krb5' 'libgssglue')
-makedepends=('pkg-config' 'autoconf')
-options=('!libtool')
-source=("http://www.citi.umich.edu/projects/nfsv4/linux/${pkgname}/${pkgname}-${pkgver}.tar.gz")
-md5sums=('b45ed565bdc3099023aa35830ec92997')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure --prefix=/usr
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="$pkgdir/" install
-}
diff --git a/staging/librpcsecgss/librpcsecgss-0.18-heimdal.patch b/staging/librpcsecgss/librpcsecgss-0.18-heimdal.patch
deleted file mode 100644
index deb039123..000000000
--- a/staging/librpcsecgss/librpcsecgss-0.18-heimdal.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-diff -NaurwB librpcsecgss-0.18.orig/configure.in librpcsecgss-0.18/configure.in
---- librpcsecgss-0.18.orig/configure.in 2008-04-09 00:05:40.000000000 +0200
-+++ librpcsecgss-0.18/configure.in 2008-06-12 19:05:51.000000000 +0200
-@@ -12,10 +12,15 @@
- AC_PROG_RANLIB
-
- # Checks for libraries.
--PKG_CHECK_MODULES([GSSGLUE], [libgssglue >= 0.1], [],
-+PKG_CHECK_MODULES([GSSGLUE], [libgssglue >= 0.1],
-+ [echo GSSGLUE found; GSSAPI_IMPLEMENTATION=libgssglue],
-+ [PKG_CHECK_MODULES([GSSGLUE], [heimdal-gssapi],
-+ [echo HEIMDAL found; GSSAPI_IMPLEMENTATION=heimdal-gssapi],
- [AC_MSG_ERROR([Unable to locate information required to use libgssglue.
- If you have pkgconfig installed, you might try setting environment
-- variable PKG_CONFIG_PATH to /usr/local/lib/pkgconfig])])
-+ variable PKG_CONFIG_PATH to /usr/local/lib/pkgconfig])])])
-+
-+AC_SUBST([GSSAPI_IMPLEMENTATION])
-
- # Checks for header files.
- AC_HEADER_STDC
-diff -NaurwB librpcsecgss-0.18.orig/librpcsecgss.pc.in librpcsecgss-0.18/librpcsecgss.pc.in
---- librpcsecgss-0.18.orig/librpcsecgss.pc.in 2007-09-06 17:39:04.000000000 +0200
-+++ librpcsecgss-0.18/librpcsecgss.pc.in 2008-06-12 19:06:40.000000000 +0200
-@@ -5,7 +5,7 @@
-
- Name: librpcsecgss
- Description: Library that implements rpcsec_gss interface.
--Requires: libgssglue
-+Requires: @GSSAPI_IMPLEMENTATION@
- Version: @PACKAGE_VERSION@
- Libs: -L@libdir@ -lrpcsecgss
- Cflags: -I@includedir@/rpcsecgss
diff --git a/staging/libtirpc/PKGBUILD b/staging/libtirpc/PKGBUILD
deleted file mode 100644
index b222249ff..000000000
--- a/staging/libtirpc/PKGBUILD
+++ /dev/null
@@ -1,32 +0,0 @@
-# $Id: PKGBUILD 121179 2011-04-29 06:50:46Z stephane $
-# Maintainer: Tobias Powalowski <tpowa@archlinux.org>
-pkgname=libtirpc
-pkgver=0.2.1
-pkgrel=3
-pkgdesc="Transport Independent RPC library (SunRPC replacement)"
-arch=(i686 x86_64)
-depends=('glibc' 'libgssglue' 'krb5')
-url="http://libtirpc.sourceforge.net/"
-license=('GPL2')
-source=(http://downloads.sourceforge.net/sourceforge/libtirpc/${pkgname}-${pkgver}.tar.bz2
- libtirpc-0.2.1-fortify.patch)
-backup=('etc/netconfig')
-options=(!libtool)
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- # fix http://bugs.gentoo.org/293593
- # https://bugs.archlinux.org/task/20082
- patch -Np1 -i ../libtirpc-0.2.1-fortify.patch
- ./configure --prefix=/usr --enable-gss
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
- # install netconfig
- install -D -m644 doc/etc_netconfig "${pkgdir}"/etc/netconfig
-}
-md5sums=('d77eb15f464bf9d6e66259eaf78b2a4e'
- '2e5c067f1651469dfbbdc91d3c9c60e8')
diff --git a/staging/libtirpc/libtirpc-0.2.1-fortify.patch b/staging/libtirpc/libtirpc-0.2.1-fortify.patch
deleted file mode 100644
index 7375bf83e..000000000
--- a/staging/libtirpc/libtirpc-0.2.1-fortify.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-Index: libtirpc-0.2.1/src/getrpcport.c
-===================================================================
---- libtirpc-0.2.1.orig/src/getrpcport.c
-+++ libtirpc-0.2.1/src/getrpcport.c
-@@ -54,11 +54,11 @@ getrpcport(host, prognum, versnum, proto
-
- if ((hp = gethostbyname(host)) == NULL)
- return (0);
-+ if (hp->h_length != sizeof(addr.sin_addr.s_addr))
-+ return (0);
- memset(&addr, 0, sizeof(addr));
- addr.sin_family = AF_INET;
- addr.sin_port = 0;
-- if (hp->h_length > sizeof(addr))
-- hp->h_length = sizeof(addr);
- memcpy(&addr.sin_addr.s_addr, hp->h_addr, (size_t)hp->h_length);
- /* Inconsistent interfaces need casts! :-( */
- return (pmap_getport(&addr, (u_long)prognum, (u_long)versnum,
diff --git a/staging/mutt/PKGBUILD b/staging/mutt/PKGBUILD
deleted file mode 100644
index 9a940857c..000000000
--- a/staging/mutt/PKGBUILD
+++ /dev/null
@@ -1,47 +0,0 @@
-# $Id: PKGBUILD 121107 2011-04-29 00:34:05Z stephane $
-# Contributor: tobias [tobias [at] archlinux.org]
-# Maintainer: Gaetan Bisson <bisson@archlinux.org>
-
-pkgname=mutt
-pkgver=1.5.21
-pkgrel=5
-pkgdesc='Small but very powerful text-based mail client'
-url='http://www.mutt.org/'
-license=('GPL')
-backup=('etc/Muttrc')
-arch=('i686' 'x86_64')
-depends=('gpgme' 'ncurses' 'openssl' 'libsasl' 'gdbm' 'libidn' 'mime-types' 'krb5')
-source=("ftp://ftp.mutt.org/mutt/devel/${pkgname}-${pkgver}.tar.gz")
-sha1sums=('a8475f2618ce5d5d33bff85c0affdf21ab1d76b9')
-
-install=install
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure \
- --prefix=/usr \
- --sysconfdir=/etc \
- --enable-gpgme \
- --enable-pop \
- --enable-imap \
- --enable-smtp \
- --enable-hcache \
- --with-curses=/usr \
- --with-regex \
- --with-gss=/usr \
- --with-ssl=/usr \
- --with-sasl \
- --with-idn \
-
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-
- rm "${pkgdir}"/usr/bin/{flea,muttbug}
- rm "${pkgdir}"/usr/share/man/man1/{flea,muttbug}.1
- rm "${pkgdir}"/etc/mime.types{,.dist}
- install -Dm644 contrib/gpg.rc "${pkgdir}"/etc/Muttrc.gpg.dist
-}
diff --git a/staging/mutt/install b/staging/mutt/install
deleted file mode 100644
index d65675c06..000000000
--- a/staging/mutt/install
+++ /dev/null
@@ -1,8 +0,0 @@
-post_install() {
- cat <<EOF
-
-==> For GPG support, add the following to your muttrc:
-==> source /etc/Muttrc.gpg.dist
-
-EOF
-}
diff --git a/staging/neon/PKGBUILD b/staging/neon/PKGBUILD
deleted file mode 100644
index c0fe650cf..000000000
--- a/staging/neon/PKGBUILD
+++ /dev/null
@@ -1,29 +0,0 @@
-# $Id: PKGBUILD 121086 2011-04-29 00:26:05Z stephane $
-# Contributor: Tom Newsom <Jeepster@gmx.co.uk>
-# Maintainer: Juergen Hoetzel <juergen@archlinux.org>
-
-# KEEP LIBTOOL FILES!
-pkgname=neon
-pkgver=0.29.3
-pkgrel=3
-pkgdesc="HTTP and WebDAV client library with a C interface"
-arch=('i686' 'x86_64')
-license=('GPL' 'LGPL')
-depends=('krb5' 'expat' 'ca-certificates')
-url="http://www.webdav.org/neon/"
-source=("http://www.webdav.org/neon/${pkgname}-${pkgver}.tar.gz")
-md5sums=('ba1015b59c112d44d7797b62fe7bee51')
-options=('libtool')
-
-build() {
- cd ${srcdir}/${pkgname}-${pkgver}
- ./configure --prefix=/usr \
- --with-expat --enable-shared --disable-static \
- --with-ssl=openssl --with-ca-bundle=/etc/ssl/certs/ca-certificates.crt
- make
-}
-
-package() {
- cd ${srcdir}/${pkgname}-${pkgver}
- make DESTDIR=${pkgdir} install
-}
diff --git a/staging/nss_ldap/PKGBUILD b/staging/nss_ldap/PKGBUILD
deleted file mode 100644
index 0a1f93d38..000000000
--- a/staging/nss_ldap/PKGBUILD
+++ /dev/null
@@ -1,33 +0,0 @@
-# $Id: PKGBUILD 121171 2011-04-29 06:45:19Z stephane $
-# Maintainer: Paul Mattal <paul@archlinux.org>
-# Contributor: Comete <la_comete@tiscali.fr>
-
-pkgname=nss_ldap
-pkgver=265
-pkgrel=2
-pkgdesc="The nss_ldap module provides the means for Linux and Solaris workstations to resolve the entities defined in RFC 2307 from LDAP directories."
-arch=(i686 x86_64)
-url="http://www.padl.com/OSS/nss_ldap.html"
-license=('LGPL')
-depends=('libldap>=2.4.18' 'krb5')
-backup=("etc/nss_ldap.conf")
-source=(http://www.padl.com/download/${pkgname}-${pkgver}.tar.gz)
-md5sums=('c1cb02d1a85538cf16bca6f6a562abe4')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- ./configure \
- --with-ldap-conf-file=/etc/nss_ldap.conf \
- --with-ldap=openldap \
- --libdir=/lib \
- --mandir=/usr/share/man \
- --enable-schema-mapping \
- --enable-rfc2307bis \
- --enable-configurable-krb5-ccname-gssapi
- env PATH=`pwd`:"$PATH" make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-}
diff --git a/staging/openssh/PKGBUILD b/staging/openssh/PKGBUILD
deleted file mode 100644
index beb09369d..000000000
--- a/staging/openssh/PKGBUILD
+++ /dev/null
@@ -1,56 +0,0 @@
-# $Id: PKGBUILD 122642 2011-05-05 12:30:25Z bisson $
-# Maintainer: Gaetan Bisson <bisson@archlinux.org>
-# Contributor: Aaron Griffin <aaron@archlinux.org>
-# Contributor: judd <jvinet@zeroflux.org>
-
-pkgname=openssh
-pkgver=5.8p2
-pkgrel=4
-pkgdesc='Free version of the SSH connectivity tools'
-arch=('i686' 'x86_64')
-license=('custom:BSD')
-url='http://www.openssh.org/portable.html'
-backup=('etc/ssh/ssh_config' 'etc/ssh/sshd_config' 'etc/pam.d/sshd' 'etc/conf.d/sshd')
-depends=('tcp_wrappers' 'krb5' 'openssl' 'libedit')
-source=("ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/${pkgname}-${pkgver}.tar.gz"
- 'sshd.confd'
- 'sshd.pam'
- 'sshd')
-sha1sums=('64798328d310e4f06c9f01228107520adbc8b3e5'
- 'ec102deb69cad7d14f406289d2fc11fee6eddbdd'
- '660092c57bde28bed82078f74011f95fc51c2293'
- '6b7f8ebf0c1cc37137a7d9a53447ac8a0ee6a2b5')
-
-build() {
- cd "${srcdir}/${pkgname}-${pkgver}"
-
- ./configure --prefix=/usr --libexecdir=/usr/lib/ssh \
- --sysconfdir=/etc/ssh --with-tcp-wrappers --with-privsep-user=nobody \
- --with-md5-passwords --with-pam --with-mantype=man --mandir=/usr/share/man \
- --with-xauth=/usr/bin/xauth --with-kerberos5=/usr --with-ssl-engine \
- --with-libedit=/usr/lib --disable-strip # stripping is done by makepkg
- make
-}
-
-package() {
- cd "${srcdir}/${pkgname}-${pkgver}"
- make DESTDIR="${pkgdir}" install
-
- install -Dm755 ../sshd "${pkgdir}"/etc/rc.d/sshd
- install -Dm644 ../sshd.pam "${pkgdir}"/etc/pam.d/sshd
- install -Dm644 ../sshd.confd "${pkgdir}"/etc/conf.d/sshd
- install -Dm644 LICENCE "${pkgdir}/usr/share/licenses/${pkgname}/LICENCE"
-
- rm "${pkgdir}"/usr/share/man/man1/slogin.1
- ln -sf ssh.1.gz "${pkgdir}"/usr/share/man/man1/slogin.1.gz
-
- # additional contrib scripts that we like
- install -Dm755 contrib/findssl.sh "${pkgdir}"/usr/bin/findssl.sh
- install -Dm755 contrib/ssh-copy-id "${pkgdir}"/usr/bin/ssh-copy-id
- install -Dm644 contrib/ssh-copy-id.1 "${pkgdir}"/usr/share/man/man1/ssh-copy-id.1
-
- # PAM is a common, standard feature to have
- sed -i -e '/^#ChallengeResponseAuthentication yes$/c ChallengeResponseAuthentication no' \
- -e '/^#UsePAM no$/c UsePAM yes' \
- "$pkgdir"/etc/ssh/sshd_config
-}
diff --git a/staging/openssh/sshd b/staging/openssh/sshd
deleted file mode 100755
index 2ee1091f0..000000000
--- a/staging/openssh/sshd
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-. /etc/conf.d/sshd
-
-PIDFILE=/var/run/sshd.pid
-PID=$(cat $PIDFILE 2>/dev/null)
-if ! readlink -q /proc/$PID/exe | grep -q '^/usr/sbin/sshd'; then
- PID=
- rm $PIDFILE 2>/dev/null
-fi
-
-case "$1" in
- start)
- stat_busy "Starting Secure Shell Daemon"
- [ -f /etc/ssh/ssh_host_key ] || { /usr/bin/ssh-keygen -t rsa1 -N "" -f /etc/ssh/ssh_host_key >/dev/null; }
- [ -f /etc/ssh/ssh_host_rsa_key ] || { /usr/bin/ssh-keygen -t rsa -N "" -f /etc/ssh/ssh_host_rsa_key >/dev/null; }
- [ -f /etc/ssh/ssh_host_dsa_key ] || { /usr/bin/ssh-keygen -t dsa -N "" -f /etc/ssh/ssh_host_dsa_key >/dev/null; }
- [ -f /etc/ssh/ssh_host_ecdsa_key ] || { /usr/bin/ssh-keygen -t ecdsa -N "" -f /etc/ssh/ssh_host_ecdsa_key >/dev/null; }
- [ -d /var/empty ] || mkdir -p /var/empty
- [ -z "$PID" ] && /usr/sbin/sshd $SSHD_ARGS
- if [ $? -gt 0 ]; then
- stat_fail
- else
- add_daemon sshd
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping Secure Shell Daemon"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm_daemon sshd
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
-exit 0
diff --git a/staging/openssh/sshd.confd b/staging/openssh/sshd.confd
deleted file mode 100644
index 5ce7c0079..000000000
--- a/staging/openssh/sshd.confd
+++ /dev/null
@@ -1,4 +0,0 @@
-#
-# Parameters to be passed to sshd
-#
-SSHD_ARGS=""
diff --git a/staging/openssh/sshd.pam b/staging/openssh/sshd.pam
deleted file mode 100644
index ae028ceb5..000000000
--- a/staging/openssh/sshd.pam
+++ /dev/null
@@ -1,10 +0,0 @@
-#%PAM-1.0
-#auth required pam_securetty.so #Disable remote root
-auth required pam_unix.so
-auth required pam_env.so
-account required pam_nologin.so
-account required pam_unix.so
-account required pam_time.so
-password required pam_unix.so
-session required pam_unix_session.so
-session required pam_limits.so
diff --git a/staging/samba/PKGBUILD b/staging/samba/PKGBUILD
deleted file mode 100644
index 6bd883503..000000000
--- a/staging/samba/PKGBUILD
+++ /dev/null
@@ -1,139 +0,0 @@
-# $Id: PKGBUILD 122577 2011-05-04 17:48:52Z ibiru $
-# Maintainer: Tobias Powalowski <tpowa@archlinux.org>
-# Contributor: judd <jvinet@zeroflux.org>
-pkgbase=samba
-pkgname=('smbclient' 'samba')
-pkgver=3.5.8
-# We use the 'A' to fake out pacman's version comparators. Samba chooses
-# to append 'a','b',etc to their subsequent releases, which pamcan
-# misconstrues as alpha, beta, etc. Bad samba!
-_realver=3.5.8
-pkgrel=3
-arch=(i686 x86_64)
-url="http://www.samba.org"
-license=('GPL3')
-makedepends=('db' 'popt' 'libcups' 'acl' 'libldap' 'libcap' 'krb5' 'pam' 'fam' 'gnutls>=2.4.1' 'talloc' 'tdb')
-options=(!makeflags)
-source=(http://us1.samba.org/samba/ftp/stable/${pkgbase}-${_realver}.tar.gz
- samba samba.logrotate
- swat.xinetd
- samba.pam
- samba.conf.d
- fix-ipv6-mount.patch)
-### UNINSTALL dmapi package before building!!!
-
-build() {
- cd ${srcdir}/${pkgbase}-${_realver}/source3
- ./configure --prefix=/usr \
- --libdir=/usr/lib/ \
- --localstatedir=/var \
- --with-configdir=/etc/samba \
- --with-lockdir=/var/cache/samba \
- --with-piddir=/var/run/samba \
- --with-fhs \
- --with-pam \
- --with-pam_smbpass \
- --with-pammodulesdir=/lib/security \
- --with-ads \
- --with-acl-support \
- --with-cifsmount \
- --with-libsmbclient \
- --with-syslog \
- --enable-external-libtalloc \
- --disable-dnssd \
- --disable-avahi \
- --with-shared-modules=idmap_ad,idmap_adex,idmap_rid,idmap_hash \
- --enable-external-libtdb
- make
-}
-
-package_smbclient () {
-pkgdesc="Tools to access a server's filespace and printers via SMB"
-depends=('readline' 'popt' 'libldap' 'cifs-utils' 'libcap' 'krb5' 'db' 'e2fsprogs' 'tdb' 'talloc')
- cd ${srcdir}/${pkgbase}-${_realver}/source3
- mkdir -p ${pkgdir}/usr/bin ${pkgdir}/usr/lib
- install -m755 bin/{smbclient,rpcclient,smbspool,smbtree,smbcacls,smbcquotas,smbget,net,nmblookup} ${pkgdir}/usr/bin/
- for i in libnetapi* libwbclient* libsmbclient*;do
- cp -a bin/${i}*.so* ${pkgdir}/usr/lib/
- done
- install -m755 script/smbtar ${pkgdir}/usr/bin/
- mkdir -p ${pkgdir}/usr/lib/cups/backend
- ln -sf /usr/bin/smbspool ${pkgdir}/usr/lib/cups/backend/smb
- mkdir -p ${pkgdir}/usr/include
- install -m644 include/libsmbclient.h ${pkgdir}/usr/include/
- install -m644 lib/netapi/netapi.h ${pkgdir}/usr/include/
- mkdir -p ${pkgdir}/usr/share/man/man{1,7}
- for man in rpcclient smbcacls smbclient smbcquotas smbget \
- smbtree smbtar nmblookup; do
- install -m644 ../docs/manpages/${man}.1 ${pkgdir}/usr/share/man/man1/
- done
- install -m644 ../docs/manpages/libsmbclient.7 ${pkgdir}/usr/share/man/man7/
-}
-
-package_samba () {
-pkgdesc="Tools to access a server's filespace and printers via SMB"
-backup=(etc/logrotate.d/samba
- etc/pam.d/samba
- etc/samba/smb.conf
- etc/xinetd.d/swat
- etc/conf.d/samba)
-depends=('db' 'popt' 'libcups' 'acl' 'libldap' "smbclient>=$pkgver" 'libcap' 'krb5' 'pam' 'fam' 'gnutls' 'e2fsprogs' 'tdb' 'talloc')
- cd ${srcdir}/samba-${_realver}/source3
- mkdir -p ${pkgdir}/var/log/samba
- mkdir -p ${pkgdir}/etc/samba/private
- chmod 700 ${pkgdir}/etc/samba/private
- make DESTDIR=${pkgdir} install
- chmod 644 ${pkgdir}/usr/include/*.h
- rm -rf ${pkgdir}/usr/var
- (cd script; cp installbin.sh i; cat i | sed 's/\/sbin\///' > installbin.sh)
- install -D -m755 ../../samba ${pkgdir}/etc/rc.d/samba
- install -D -m644 ../../samba.conf.d ${pkgdir}/etc/conf.d/samba
- mkdir -p ${pkgdir}/etc/samba
- cat ../examples/smb.conf.default | \
- sed 's|log file = .*$|log file = /var/log/samba/log.%m|g' >${pkgdir}/etc/samba/smb.conf.default
- install -D -m644 ../../samba.logrotate ${pkgdir}/etc/logrotate.d/samba
- install -D -m644 ../../swat.xinetd ${pkgdir}/etc/xinetd.d/swat
- install -D -m644 ../../samba.pam ${pkgdir}/etc/pam.d/samba
- # spool directory
- install -d -m1777 ${pkgdir}/var/spool/samba
- sed -i 's|/usr/spool/samba|/var/spool/samba|g' ${pkgdir}/etc/samba/smb.conf.default
- # fix logrotate
- sed -i -e 's|log.%m|%m.log|g' ${pkgdir}/etc/samba/smb.conf.default
- # nsswitch libraries
- install -D -m755 ${srcdir}/samba-${_realver}/nsswitch/libnss_wins.so ${pkgdir}/lib/libnss_wins.so
- ln -s libnss_wins.so ${pkgdir}/lib/libnss_wins.so.2
- install -D -m755 ${srcdir}/samba-${_realver}/nsswitch/libnss_winbind.so ${pkgdir}/lib/libnss_winbind.so
- # remove conflict files of smbclient
- for man in libsmbclient smbspool \
- mount.cifs net cifs.upcall; do
- rm -f ${pkgdir}/usr/share/man/man8/${man}.8
- done
- for i in libnetapi* libwbclient* libsmbclient*;do
- rm -f ${pkgdir}/usr/lib/$i
- done
- for bin in net \
- nmblookup rpcclient smbcacls smbclient \
- smbcquotas smbget smbspool smbtar smbtree; do
- rm -f ${pkgdir}/usr/bin/$bin
- done
- rm -f ${pkgdir}/usr/sbin/cifs.upcall
- rm -f ${pkgdir}/usr/include/netapi.h
- for man in rpcclient smbcacls smbclient smbcquotas \
- smbtree smbtar nmblookup smbget; do
- rm -f ${pkgdir}/usr/share/man/man1/${man}.1
- done
- for man in tdbbackup tdbdump tdbtool; do
- rm -f ${pkgdir}/usr/share/man/man8/${man}.8
- done
- rm -f ${pkgdir}/usr/share/man/man7/libsmbclient.7
- rm -f ${pkgdir}/usr/include/libsmbclient.h
- # copy ldap example
- install -D -m644 ${srcdir}/samba-${_realver}/examples/LDAP/samba.schema ${pkgdir}/usr/share/doc/samba/examples/LDAP/samba.schema
-}
-md5sums=('355b4530c20997e94aebc74cd6ea5307'
- '891b9be03e7adf65c9a00319c76d33e5'
- '5697da77590ec092cc8a883bae06093c'
- 'a4bbfa39fee95bba2e7ad6b535fae7e6'
- '96f82c38f3f540b53f3e5144900acf17'
- 'f2f2e348acd1ccb566e95fa8a561b828'
- 'c6a38a8c8fa24979e6217aed533358ea')
diff --git a/staging/samba/fix-ipv6-mount.patch b/staging/samba/fix-ipv6-mount.patch
deleted file mode 100644
index 89bef6533..000000000
--- a/staging/samba/fix-ipv6-mount.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- client/mount.cifs.c.orig 2010-03-03 13:42:02.143936727 +1000
-+++ client/mount.cifs.c 2010-03-04 01:53:22.752879004 +1000
-@@ -1563,7 +1563,7 @@
- }
- }
-
-- if (addr->ai_addr->sa_family == AF_INET6 && addr6->sin6_scope_id) {
-+ if (addr && addr->ai_addr->sa_family == AF_INET6 && addr6->sin6_scope_id) {
- strlcat(options, "%", options_size);
- current_len = strnlen(options, options_size);
- optionstail = options + current_len;
diff --git a/staging/samba/samba b/staging/samba/samba
deleted file mode 100755
index 1b222d6cc..000000000
--- a/staging/samba/samba
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-[ -f /etc/conf.d/samba ] && . /etc/conf.d/samba
-
-[ -z "$SAMBA_DAEMONS" ] && SAMBA_DAEMONS=(smbd nmbd)
-
-case "$1" in
- start)
- rc=0
- stat_busy "Starting Samba Server"
- if [ ! -x /var/run/samba ] ; then
- install -m755 -g 81 -o 81 -d /var/run/samba
- fi
- for d in ${SAMBA_DAEMONS[@]}; do
- PID=`pidof -o %PPID /usr/sbin/$d`
- [ -z "$PID" ] && /usr/sbin/$d -D
- rc=$(($rc+$?))
- done
- if [ $rc -gt 0 ]; then
- stat_fail
- else
- add_daemon samba
- stat_done
- fi
- ;;
- stop)
- rc=0
- stat_busy "Stopping Samba Server"
- for d in ${SAMBA_DAEMONS[@]}; do
- PID=`pidof -o %PPID /usr/sbin/$d`
- [ -z "$PID" ] || kill $PID &> /dev/null
- rc=$(($rc+$?))
- done
- if [ $rc -gt 0 ]; then
- stat_fail
- else
- rm /var/run/samba/smbd.pid &>/dev/null
- rm /var/run/samba/nmbd.pid &>/dev/null
- rm /var/run/samba/winbindd.pid &>/dev/null
- rm_daemon samba
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
-exit 0
diff --git a/staging/samba/samba.conf.d b/staging/samba/samba.conf.d
deleted file mode 100644
index 8721f956a..000000000
--- a/staging/samba/samba.conf.d
+++ /dev/null
@@ -1,7 +0,0 @@
-#
-# Configuration for the samba init script
-#
-
-# space separated list of daemons to launch
-SAMBA_DAEMONS=(smbd nmbd)
-#SAMBA_DAEMONS=(smbd nmbd winbindd)
diff --git a/staging/samba/samba.logrotate b/staging/samba/samba.logrotate
deleted file mode 100644
index 581f4c33b..000000000
--- a/staging/samba/samba.logrotate
+++ /dev/null
@@ -1,9 +0,0 @@
-/var/log/samba/log.smbd /var/log/samba/log.nmbd /var/log/samba/*.log {
- notifempty
- missingok
- sharedscripts
- copytruncate
- postrotate
- /bin/kill -HUP `cat /var/run/samba/*.pid 2>/dev/null` 2>/dev/null || true
- endscript
-}
diff --git a/staging/samba/samba.pam b/staging/samba/samba.pam
deleted file mode 100644
index 53724d1f8..000000000
--- a/staging/samba/samba.pam
+++ /dev/null
@@ -1,3 +0,0 @@
-auth required pam_unix.so
-account required pam_unix.so
-session required pam_unix.so
diff --git a/staging/samba/swat.xinetd b/staging/samba/swat.xinetd
deleted file mode 100644
index 2e62a82b2..000000000
--- a/staging/samba/swat.xinetd
+++ /dev/null
@@ -1,10 +0,0 @@
-service swat
-{
- socket_type = stream
- wait = no
- user = root
- server = /usr/sbin/swat
- log_on_success += HOST DURATION
- log_on_failure += HOST
- disable = yes
-}
diff --git a/staging/subversion/PKGBUILD b/staging/subversion/PKGBUILD
deleted file mode 100644
index 0f9c0a70a..000000000
--- a/staging/subversion/PKGBUILD
+++ /dev/null
@@ -1,92 +0,0 @@
-# $Id: PKGBUILD 121110 2011-04-29 01:29:30Z stephane $
-# Maintainer: Paul Mattal <paul@archlinux.org>
-# Contributor: Jason Chu <jason@archlinux.org>
-
-pkgname=subversion
-pkgver=1.6.15
-pkgrel=2
-pkgdesc="Replacement for CVS, another versioning system (SVN)"
-arch=('i686' 'x86_64')
-license=('apache' 'bsd')
-depends=('neon' 'apr-util')
-makedepends=('krb5' 'apache' 'python2' 'perl' 'swig' 'ruby' 'java-runtime'
- 'autoconf' 'sqlite3' 'db' 'e2fsprogs' 'libgnome-keyring' 'kdelibs')
-source=(http://subversion.tigris.org/downloads/$pkgname-$pkgver.tar.bz2
- svnserve svn svnserve.conf svnmerge.py
- subversion.rpath.fix.patch
- subversion.suppress.deprecation.warnings.patch)
-
-backup=('etc/xinetd.d/svn' 'etc/conf.d/svnserve')
-url="http://subversion.apache.org/"
-provides=('svn')
-options=('!makeflags' '!libtool')
-optdepends=('libgnome-keyring' 'kdeutils-kwallet' 'bash-completion: for svn bash completion')
-
-build() {
- cd ${srcdir}/${pkgname}-${pkgver}
-
- export PYTHON=/usr/bin/python2
-
- # apply patches
- patch -p0 < $srcdir/subversion.rpath.fix.patch
- patch -p1 -i $srcdir/subversion.suppress.deprecation.warnings.patch
-
- # configure
- autoreconf
- ./configure --prefix=/usr --with-apr=/usr --with-apr-util=/usr \
- --with-zlib=/usr --with-neon=/usr --with-apxs \
- --with-sqlite=/usr --with-berkeley-db=:/usr/include/:/usr/lib:db-5.1 \
- --enable-javahl --with-gnome-keyring --with-kwallet
-
- # build
- (make external-all && make LT_LDFLAGS="-L$Fdestdir/usr/lib" local-all )
-}
-
-package() {
- cd ${srcdir}/${pkgname}-${pkgver}
-
- # install
- export LD_LIBRARY_PATH=${pkgdir}/usr/lib:$LD_LIBRARY_PATH
- make DESTDIR=${pkgdir} install
-
- make DESTDIR=${pkgdir} swig-py
- make install-swig-py DESTDIR=${pkgdir}
-
- install -d ${pkgdir}/usr/lib/python2.7
- mv ${pkgdir}/usr/lib/svn-python/ ${pkgdir}/usr/lib/python2.7/site-packages
-
- install -d ${pkgdir}/usr/share/subversion
- install -d -m 755 tools/hook-scripts ${pkgdir}/usr/share/subversion/
- rm -f ${pkgdir}/usr/share/subversion/hook-scripts/*.in
-
- make DESTDIR=${pkgdir} swig-pl
- make install-swig-pl DESTDIR=${pkgdir} INSTALLDIRS=vendor
- rm -f ${pkgdir}/usr/lib/perl5/vendor_perl/auto/SVN/_Core/.packlist
- rm -rf ${pkgdir}/usr/lib/perl5/core_perl
-
- make DESTDIR=${pkgdir} swig-rb
- make install-swig-rb DESTDIR=${pkgdir}
-
- make DESTDIR=${pkgdir} javahl
- make DESTDIR=${pkgdir} install-javahl
-
- install -d ${pkgdir}/etc/{rc.d,xinetd.d,conf.d}
-
- install -m 755 ${srcdir}/svnserve ${pkgdir}/etc/rc.d
- install -m 644 ${srcdir}/svn ${pkgdir}/etc/xinetd.d
- install -m 644 ${srcdir}/svnserve.conf ${pkgdir}/etc/conf.d/svnserve
- install -m 755 ${srcdir}/svnmerge.py ${pkgdir}/usr/bin/svnmerge
- install -D -m 644 ${srcdir}/subversion-$pkgver/COPYING \
- ${pkgdir}/usr/share/licenses/$pkgname/LICENSE
-
- # bash completion
- install -Dm 644 ${srcdir}/${pkgname}-${pkgver}/tools/client-side/bash_completion \
- ${pkgdir}/etc/bash_completion.d/subversion
-}
-md5sums=('113fca1d9e4aa389d7dc2b210010fa69'
- 'a2b029e8385007ffb99b437b30521c90'
- 'a0db6dd43af33952739b6ec089852630'
- 'c459e299192552f61578f3438abf0664'
- 'a6371baeda7e224504629ecdda2749b4'
- '6b4340ba9d8845cd8497e013ae01be3f'
- '1166f3b7413d7e7450299b3525680bbe')
diff --git a/staging/subversion/subversion.rpath.fix.patch b/staging/subversion/subversion.rpath.fix.patch
deleted file mode 100644
index ba6ee9e4e..000000000
--- a/staging/subversion/subversion.rpath.fix.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- Makefile.in.orig 2009-02-16 14:10:48.000000000 -0200
-+++ Makefile.in 2009-06-04 00:56:29.000000000 -0300
-@@ -678,6 +678,7 @@
-
- $(SWIG_PL_DIR)/native/Makefile: $(SWIG_PL_DIR)/native/Makefile.PL
- cd $(SWIG_PL_DIR)/native; $(PERL) Makefile.PL
-+ cd $(SWIG_PL_DIR)/native; sed -i 's|LD_RUN_PATH|DIE_RPATH_DIE|g' Makefile{,.{client,delta,fs,ra,repos,wc}}
-
- swig-pl_DEPS = autogen-swig-pl libsvn_swig_perl \
- $(SWIG_PL_DIR)/native/Makefile
diff --git a/staging/subversion/subversion.suppress.deprecation.warnings.patch b/staging/subversion/subversion.suppress.deprecation.warnings.patch
deleted file mode 100644
index 94ce89b18..000000000
--- a/staging/subversion/subversion.suppress.deprecation.warnings.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-diff -urN subversion-1.6.9/subversion/bindings/swig/python/svn/core.py subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py
---- subversion-1.6.9/subversion/bindings/swig/python/svn/core.py 2009-02-13 11:22:26.000000000 -0500
-+++ subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py 2010-02-08 07:46:29.000000000 -0500
-@@ -19,6 +19,7 @@
- from libsvn.core import *
- import libsvn.core as _libsvncore
- import atexit as _atexit
-+import warnings
-
- class SubversionException(Exception):
- def __init__(self, message=None, apr_err=None, child=None,
-@@ -44,7 +45,9 @@
- Exception.__init__(self, *args)
-
- self.apr_err = apr_err
-- self.message = message
-+ with warnings.catch_warnings():
-+ warnings.simplefilter("ignore", DeprecationWarning)
-+ self.message = message
- self.child = child
- self.file = file
- self.line = line
diff --git a/staging/subversion/svn b/staging/subversion/svn
deleted file mode 100644
index 8988aaf63..000000000
--- a/staging/subversion/svn
+++ /dev/null
@@ -1,11 +0,0 @@
-service svn
-{
- flags = REUSE
- socket_type = stream
- wait = no
- user = root
- server = /usr/bin/svnserve
- server_args = -i
- log_on_failure += USERID
- disable = yes
-}
diff --git a/staging/subversion/svnmerge.py b/staging/subversion/svnmerge.py
deleted file mode 100644
index d8931648f..000000000
--- a/staging/subversion/svnmerge.py
+++ /dev/null
@@ -1,2370 +0,0 @@
-#!/usr/bin/env python2
-# -*- coding: utf-8 -*-
-# Copyright (c) 2005, Giovanni Bajo
-# Copyright (c) 2004-2005, Awarix, Inc.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-#
-# Author: Archie Cobbs <archie at awarix dot com>
-# Rewritten in Python by: Giovanni Bajo <rasky at develer dot com>
-#
-# Acknowledgments:
-# John Belmonte <john at neggie dot net> - metadata and usability
-# improvements
-# Blair Zajac <blair at orcaware dot com> - random improvements
-# Raman Gupta <rocketraman at fastmail dot fm> - bidirectional and transitive
-# merging support
-# Dustin J. Mitchell <dustin at zmanda dot com> - support for multiple
-# location identifier formats
-#
-# $HeadURL$
-# $LastChangedDate$
-# $LastChangedBy$
-# $LastChangedRevision$
-#
-# Requisites:
-# svnmerge.py has been tested with all SVN major versions since 1.1 (both
-# client and server). It is unknown if it works with previous versions.
-#
-# Differences from svnmerge.sh:
-# - More portable: tested as working in FreeBSD and OS/2.
-# - Add double-verbose mode, which shows every svn command executed (-v -v).
-# - "svnmerge avail" now only shows commits in source, not also commits in
-# other parts of the repository.
-# - Add "svnmerge block" to flag some revisions as blocked, so that
-# they will not show up anymore in the available list. Added also
-# the complementary "svnmerge unblock".
-# - "svnmerge avail" has grown two new options:
-# -B to display a list of the blocked revisions
-# -A to display both the blocked and the available revisions.
-# - Improved generated commit message to make it machine parsable even when
-# merging commits which are themselves merges.
-# - Add --force option to skip working copy check
-# - Add --record-only option to "svnmerge merge" to avoid performing
-# an actual merge, yet record that a merge happened.
-# - Can use a variety of location-identifier formats
-#
-# TODO:
-# - Add "svnmerge avail -R": show logs in reverse order
-#
-# Information for Hackers:
-#
-# Identifiers for branches:
-# A branch is identified in three ways within this source:
-# - as a working copy (variable name usually includes 'dir')
-# - as a fully qualified URL
-# - as a path identifier (an opaque string indicating a particular path
-# in a particular repository; variable name includes 'pathid')
-# A "target" is generally user-specified, and may be a working copy or
-# a URL.
-
-import sys, os, getopt, re, types, tempfile, time, locale
-from bisect import bisect
-from xml.dom import pulldom
-
-NAME = "svnmerge"
-if not hasattr(sys, "version_info") or sys.version_info < (2, 0):
- error("requires Python 2.0 or newer")
-
-# Set up the separator used to separate individual log messages from
-# each revision merged into the target location. Also, create a
-# regular expression that will find this same separator in already
-# committed log messages, so that the separator used for this run of
-# svnmerge.py will have one more LOG_SEPARATOR appended to the longest
-# separator found in all the commits.
-LOG_SEPARATOR = 8 * '.'
-LOG_SEPARATOR_RE = re.compile('^((%s)+)' % re.escape(LOG_SEPARATOR),
- re.MULTILINE)
-
-# Each line of the embedded log messages will be prefixed by LOG_LINE_PREFIX.
-LOG_LINE_PREFIX = 2 * ' '
-
-# Set python to the default locale as per environment settings, same as svn
-# TODO we should really parse config and if log-encoding is specified, set
-# the locale to match that encoding
-locale.setlocale(locale.LC_ALL, '')
-
-# We want the svn output (such as svn info) to be non-localized
-# Using LC_MESSAGES should not affect localized output of svn log, for example
-if os.environ.has_key("LC_ALL"):
- del os.environ["LC_ALL"]
-os.environ["LC_MESSAGES"] = "C"
-
-###############################################################################
-# Support for older Python versions
-###############################################################################
-
-# True/False constants are Python 2.2+
-try:
- True, False
-except NameError:
- True, False = 1, 0
-
-def lstrip(s, ch):
- """Replacement for str.lstrip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- i = 0
- try:
- while s[i] == ch:
- i = i+1
- return s[i:]
- except IndexError:
- return ""
-
-def rstrip(s, ch):
- """Replacement for str.rstrip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- try:
- if s[-1] != ch:
- return s
- i = -2
- while s[i] == ch:
- i = i-1
- return s[:i+1]
- except IndexError:
- return ""
-
-def strip(s, ch):
- """Replacement for str.strip (support for arbitrary chars to strip was
- added in Python 2.2.2)."""
- return lstrip(rstrip(s, ch), ch)
-
-def rsplit(s, sep, maxsplits=0):
- """Like str.rsplit, which is Python 2.4+ only."""
- L = s.split(sep)
- if not 0 < maxsplits <= len(L):
- return L
- return [sep.join(L[0:-maxsplits])] + L[-maxsplits:]
-
-###############################################################################
-
-def kwextract(s):
- """Extract info from a svn keyword string."""
- try:
- return strip(s, "$").strip().split(": ")[1]
- except IndexError:
- return "<unknown>"
-
-__revision__ = kwextract('$Rev$')
-__date__ = kwextract('$Date$')
-
-# Additional options, not (yet?) mapped to command line flags
-default_opts = {
- "svn": "svn",
- "prop": NAME + "-integrated",
- "block-prop": NAME + "-blocked",
- "commit-verbose": True,
- "verbose": 0,
-}
-logs = {}
-
-def console_width():
- """Get the width of the console screen (if any)."""
- try:
- return int(os.environ["COLUMNS"])
- except (KeyError, ValueError):
- pass
-
- try:
- # Call the Windows API (requires ctypes library)
- from ctypes import windll, create_string_buffer
- h = windll.kernel32.GetStdHandle(-11)
- csbi = create_string_buffer(22)
- res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
- if res:
- import struct
- (bufx, bufy,
- curx, cury, wattr,
- left, top, right, bottom,
- maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
- return right - left + 1
- except ImportError:
- pass
-
- # Parse the output of stty -a
- if os.isatty(1):
- out = os.popen("stty -a").read()
- m = re.search(r"columns (\d+);", out)
- if m:
- return int(m.group(1))
-
- # sensible default
- return 80
-
-def error(s):
- """Subroutine to output an error and bail."""
- print >> sys.stderr, "%s: %s" % (NAME, s)
- sys.exit(1)
-
-def report(s):
- """Subroutine to output progress message, unless in quiet mode."""
- if opts["verbose"]:
- print "%s: %s" % (NAME, s)
-
-def prefix_lines(prefix, lines):
- """Given a string representing one or more lines of text, insert the
- specified prefix at the beginning of each line, and return the result.
- The input must be terminated by a newline."""
- assert lines[-1] == "\n"
- return prefix + lines[:-1].replace("\n", "\n"+prefix) + "\n"
-
-def recode_stdout_to_file(s):
- if locale.getdefaultlocale()[1] is None or not hasattr(sys.stdout, "encoding") \
- or sys.stdout.encoding is None:
- return s
- u = s.decode(sys.stdout.encoding)
- return u.encode(locale.getdefaultlocale()[1])
-
-class LaunchError(Exception):
- """Signal a failure in execution of an external command. Parameters are the
- exit code of the process, the original command line, and the output of the
- command."""
-
-try:
- """Launch a sub-process. Return its output (both stdout and stderr),
- optionally split by lines (if split_lines is True). Raise a LaunchError
- exception if the exit code of the process is non-zero (failure).
-
- This function has two implementations, one based on subprocess (preferred),
- and one based on popen (for compatibility).
- """
- import subprocess
- import shlex
-
- def launch(cmd, split_lines=True):
- # Requiring python 2.4 or higher, on some platforms we get
- # much faster performance from the subprocess module (where python
- # doesn't try to close an exhorbitant number of file descriptors)
- stdout = ""
- stderr = ""
- try:
- if os.name == 'nt':
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, \
- close_fds=False, stderr=subprocess.PIPE)
- else:
- # Use shlex to break up the parameters intelligently,
- # respecting quotes. shlex can't handle unicode.
- args = shlex.split(cmd.encode('ascii'))
- p = subprocess.Popen(args, stdout=subprocess.PIPE, \
- close_fds=False, stderr=subprocess.PIPE)
- stdoutAndErr = p.communicate()
- stdout = stdoutAndErr[0]
- stderr = stdoutAndErr[1]
- except OSError, inst:
- # Using 1 as failure code; should get actual number somehow? For
- # examples see svnmerge_test.py's TestCase_launch.test_failure and
- # TestCase_launch.test_failurecode.
- raise LaunchError(1, cmd, stdout + " " + stderr + ": " + str(inst))
-
- if p.returncode == 0:
- if split_lines:
- # Setting keepends=True for compatibility with previous logic
- # (where file.readlines() preserves newlines)
- return stdout.splitlines(True)
- else:
- return stdout
- else:
- raise LaunchError(p.returncode, cmd, stdout + stderr)
-except ImportError:
- # support versions of python before 2.4 (slower on some systems)
- def launch(cmd, split_lines=True):
- if os.name not in ['nt', 'os2']:
- import popen2
- p = popen2.Popen4(cmd)
- p.tochild.close()
- if split_lines:
- out = p.fromchild.readlines()
- else:
- out = p.fromchild.read()
- ret = p.wait()
- if ret == 0:
- ret = None
- else:
- ret >>= 8
- else:
- i,k = os.popen4(cmd)
- i.close()
- if split_lines:
- out = k.readlines()
- else:
- out = k.read()
- ret = k.close()
-
- if ret is None:
- return out
- raise LaunchError(ret, cmd, out)
-
-def launchsvn(s, show=False, pretend=False, **kwargs):
- """Launch SVN and grab its output."""
- username = password = configdir = ""
- if opts.get("username", None):
- username = "--username=" + opts["username"]
- if opts.get("password", None):
- password = "--password=" + opts["password"]
- if opts.get("config-dir", None):
- configdir = "--config-dir=" + opts["config-dir"]
- cmd = ' '.join(filter(None, [opts["svn"], "--non-interactive",
- username, password, configdir, s]))
- if show or opts["verbose"] >= 2:
- print cmd
- if pretend:
- return None
- return launch(cmd, **kwargs)
-
-def svn_command(s):
- """Do (or pretend to do) an SVN command."""
- out = launchsvn(s, show=opts["show-changes"] or opts["dry-run"],
- pretend=opts["dry-run"],
- split_lines=False)
- if not opts["dry-run"]:
- print out
-
-def check_dir_clean(dir):
- """Check the current status of dir for local mods."""
- if opts["force"]:
- report('skipping status check because of --force')
- return
- report('checking status of "%s"' % dir)
-
- # Checking with -q does not show unversioned files or external
- # directories. Though it displays a debug message for external
- # directories, after a blank line. So, practically, the first line
- # matters: if it's non-empty there is a modification.
- out = launchsvn("status -q %s" % dir)
- if out and out[0].strip():
- error('"%s" has local modifications; it must be clean' % dir)
-
-class PathIdentifier:
- """Abstraction for a path identifier, so that we can start talking
- about it before we know the form that it takes in the properties (its
- external_form). Objects are referenced in the class variable 'locobjs',
- keyed by all known forms."""
-
- # a map of UUID (or None) to repository root URL.
- repo_hints = {}
-
- # a map from any known string form to the corresponding PathIdentifier
- locobjs = {}
-
- def __init__(self, repo_relative_path, uuid=None, url=None, external_form=None):
- self.repo_relative_path = repo_relative_path
- self.uuid = uuid
- self.url = url
- self.external_form = external_form
-
- def __repr__(self):
- return "<PathIdentifier " + ', '.join('%s=%r' % i for i in self.__dict__.items()) + '>'
-
- def __str__(self):
- """Return a printable string representation"""
- if self.external_form:
- return self.external_form
- if self.url:
- return self.format('url')
- if self.uuid:
- return self.format('uuid')
- return self.format('path')
-
- def from_pathid(pathid_str):
- """convert pathid_str to a PathIdentifier"""
- if not PathIdentifier.locobjs.has_key(pathid_str):
- if is_url(pathid_str):
- # we can determine every form; PathIdentifier.hint knows how to do that
- PathIdentifier.hint(pathid_str)
- elif pathid_str[:7] == 'uuid://':
- mo = re.match('uuid://([^/]*)(.*)', pathid_str)
- if not mo:
- error("Invalid path identifier '%s'" % pathid_str)
- uuid, repo_relative_path = mo.groups()
- pathid = PathIdentifier(repo_relative_path, uuid=uuid)
- # we can cache this by uuid:// pathid and by repo-relative path
- PathIdentifier.locobjs[pathid_str] = PathIdentifier.locobjs[repo_relative_path] = pathid
- elif pathid_str and pathid_str[0] == '/':
- # strip any trailing slashes
- pathid_str = pathid_str.rstrip('/')
- pathid = PathIdentifier(repo_relative_path=pathid_str)
- # we can only cache this by repo-relative path
- PathIdentifier.locobjs[pathid_str] = pathid
- else:
- error("Invalid path identifier '%s'" % pathid_str)
- return PathIdentifier.locobjs[pathid_str]
- from_pathid = staticmethod(from_pathid)
-
- def from_target(target):
- """Convert a target (either a working copy path or an URL) into a
- path identifier."""
- # prime the cache first if we don't know about this target yet
- if not PathIdentifier.locobjs.has_key(target):
- PathIdentifier.hint(target)
-
- try:
- return PathIdentifier.locobjs[target]
- except KeyError:
- error("Could not recognize path identifier '%s'" % target)
- from_target = staticmethod(from_target)
-
- def hint(target):
- """Cache some information about target, as it may be referenced by
- repo-relative path in subversion properties; the cache can help to
- expand such a relative path to a full path identifier."""
- if PathIdentifier.locobjs.has_key(target): return
- if not is_url(target) and not is_wc(target): return
-
- url = target_to_url(target)
-
- root = get_repo_root(url)
- assert root[-1] != "/"
- assert url[:len(root)] == root, "url=%r, root=%r" % (url, root)
- repo_relative_path = url[len(root):]
-
- try:
- uuid = get_svninfo(target)['Repository UUID']
- uuid_pathid = 'uuid://%s%s' % (uuid, repo_relative_path)
- except KeyError:
- uuid = None
- uuid_pathid = None
-
- locobj = PathIdentifier.locobjs.get(url) or \
- (uuid_pathid and PathIdentifier.locobjs.get(uuid_pathid))
- if not locobj:
- locobj = PathIdentifier(repo_relative_path, uuid=uuid, url=url)
-
- PathIdentifier.repo_hints[uuid] = root # (uuid may be None)
-
- PathIdentifier.locobjs[target] = locobj
- PathIdentifier.locobjs[url] = locobj
- if uuid_pathid:
- PathIdentifier.locobjs[uuid_pathid] = locobj
- if not PathIdentifier.locobjs.has_key(repo_relative_path):
- PathIdentifier.locobjs[repo_relative_path] = locobj
- hint = staticmethod(hint)
-
- def format(self, fmt):
- if fmt == 'path':
- return self.repo_relative_path
- elif fmt == 'uuid':
- return "uuid://%s%s" % (self.uuid, self.repo_relative_path)
- elif fmt == 'url':
- return self.url
- else:
- error("Unkonwn path type '%s'" % fmt)
-
- def match_substring(self, str):
- """Test whether str is a substring of any representation of this
- PathIdentifier."""
- if self.repo_relative_path.find(str) >= 0:
- return True
-
- if self.uuid:
- if ("uuid://%s%s" % (self.uuid, self.repo_relative_path)).find(str) >= 0:
- return True
-
- if self.url:
- if (self.url + self.repo_relative_path).find(str) >= 0:
- return True
-
- return False
-
- def get_url(self):
- """Convert a pathid into a URL. If this is not possible, error out."""
- if self.url:
- return self.url
- # if we have a uuid and happen to know the URL for it, use that
- elif self.uuid and PathIdentifier.repo_hints.has_key(self.uuid):
- self.url = PathIdentifier.repo_hints[self.uuid] + self.repo_relative_path
- PathIdentifier.locobjs[self.url] = self
- return self.url
- # if we've only seen one rep, use that (a guess, but an educated one)
- elif not self.uuid and len(PathIdentifier.repo_hints) == 1:
- uuid, root = PathIdentifier.repo_hints.items()[0]
- if uuid:
- self.uuid = uuid
- PathIdentifier.locobjs['uuid://%s%s' % (uuid, self.repo_relative_path)] = self
- self.url = root + self.repo_relative_path
- PathIdentifier.locobjs[self.url] = self
- report("Guessing that '%s' refers to '%s'" % (self, self.url))
- return self.url
- else:
- error("Cannot determine URL for '%s'; " % self +
- "Explicit source argument (-S/--source) required.\n")
-
-class RevisionLog:
- """
- A log of the revisions which affected a given URL between two
- revisions.
- """
-
- def __init__(self, url, begin, end, find_propchanges=False):
- """
- Create a new RevisionLog object, which stores, in self.revs, a list
- of the revisions which affected the specified URL between begin and
- end. If find_propchanges is True, self.propchange_revs will contain a
- list of the revisions which changed properties directly on the
- specified URL. URL must be the URL for a directory in the repository.
- """
- self.url = url
-
- # Setup the log options (--quiet, so we don't show log messages)
- log_opts = '--xml --quiet -r%s:%s "%s"' % (begin, end, url)
- if find_propchanges:
- # The --verbose flag lets us grab merge tracking information
- # by looking at propchanges
- log_opts = "--verbose " + log_opts
-
- # Read the log to look for revision numbers and merge-tracking info
- self.revs = []
- self.propchange_revs = []
- repos_pathid = PathIdentifier.from_target(url)
- for chg in SvnLogParser(launchsvn("log %s" % log_opts,
- split_lines=False)):
- self.revs.append(chg.revision())
- for p in chg.paths():
- if p.action() == 'M' and p.pathid() == repos_pathid.repo_relative_path:
- self.propchange_revs.append(chg.revision())
-
- # Save the range of the log
- self.begin = int(begin)
- if end == "HEAD":
- # If end is not provided, we do not know which is the latest
- # revision in the repository. So we set 'end' to the latest
- # known revision.
- self.end = self.revs[-1]
- else:
- self.end = int(end)
-
- self._merges = None
- self._blocks = None
-
- def merge_metadata(self):
- """
- Return a VersionedProperty object, with a cached view of the merge
- metadata in the range of this log.
- """
-
- # Load merge metadata if necessary
- if not self._merges:
- self._merges = VersionedProperty(self.url, opts["prop"])
- self._merges.load(self)
-
- return self._merges
-
- def block_metadata(self):
- if not self._blocks:
- self._blocks = VersionedProperty(self.url, opts["block-prop"])
- self._blocks.load(self)
-
- return self._blocks
-
-
-class VersionedProperty:
- """
- A read-only, cached view of a versioned property.
-
- self.revs contains a list of the revisions in which the property changes.
- self.values stores the new values at each corresponding revision. If the
- value of the property is unknown, it is set to None.
-
- Initially, we set self.revs to [0] and self.values to [None]. This
- indicates that, as of revision zero, we know nothing about the value of
- the property.
-
- Later, if you run self.load(log), we cache the value of this property over
- the entire range of the log by noting each revision in which the property
- was changed. At the end of the range of the log, we invalidate our cache
- by adding the value "None" to our cache for any revisions which fall out
- of the range of our log.
-
- Once self.revs and self.values are filled, we can find the value of the
- property at any arbitrary revision using a binary search on self.revs.
- Once we find the last revision during which the property was changed,
- we can lookup the associated value in self.values. (If the associated
- value is None, the associated value was not cached and we have to do
- a full propget.)
-
- An example: We know that the 'svnmerge' property was added in r10, and
- changed in r21. We gathered log info up until r40.
-
- revs = [0, 10, 21, 40]
- values = [None, "val1", "val2", None]
-
- What these values say:
- - From r0 to r9, we know nothing about the property.
- - In r10, the property was set to "val1". This property stayed the same
- until r21, when it was changed to "val2".
- - We don't know what happened after r40.
- """
-
- def __init__(self, url, name):
- """View the history of a versioned property at URL with name"""
- self.url = url
- self.name = name
-
- # We know nothing about the value of the property. Setup revs
- # and values to indicate as such.
- self.revs = [0]
- self.values = [None]
-
- # We don't have any revisions cached
- self._initial_value = None
- self._changed_revs = []
- self._changed_values = []
-
- def load(self, log):
- """
- Load the history of property changes from the specified
- RevisionLog object.
- """
-
- # Get the property value before the range of the log
- if log.begin > 1:
- self.revs.append(log.begin-1)
- try:
- self._initial_value = self.raw_get(log.begin-1)
- except LaunchError:
- # The specified URL might not exist before the
- # range of the log. If so, we can safely assume
- # that the property was empty at that time.
- self._initial_value = { }
- self.values.append(self._initial_value)
- else:
- self._initial_value = { }
- self.values[0] = self._initial_value
-
- # Cache the property values in the log range
- old_value = self._initial_value
- for rev in log.propchange_revs:
- new_value = self.raw_get(rev)
- if new_value != old_value:
- self._changed_revs.append(rev)
- self._changed_values.append(new_value)
- self.revs.append(rev)
- self.values.append(new_value)
- old_value = new_value
-
- # Indicate that we know nothing about the value of the property
- # after the range of the log.
- if log.revs:
- self.revs.append(log.end+1)
- self.values.append(None)
-
- def raw_get(self, rev=None):
- """
- Get the property at revision REV. If rev is not specified, get
- the property at revision HEAD.
- """
- return get_revlist_prop(self.url, self.name, rev)
-
- def get(self, rev=None):
- """
- Get the property at revision REV. If rev is not specified, get
- the property at revision HEAD.
- """
-
- if rev is not None:
-
- # Find the index using a binary search
- i = bisect(self.revs, rev) - 1
-
- # Return the value of the property, if it was cached
- if self.values[i] is not None:
- return self.values[i]
-
- # Get the current value of the property
- return self.raw_get(rev)
-
- def changed_revs(self, key=None):
- """
- Get a list of the revisions in which the specified dictionary
- key was changed in this property. If key is not specified,
- return a list of revisions in which any key was changed.
- """
- if key is None:
- return self._changed_revs
- else:
- changed_revs = []
- old_val = self._initial_value
- for rev, val in zip(self._changed_revs, self._changed_values):
- if val.get(key) != old_val.get(key):
- changed_revs.append(rev)
- old_val = val
- return changed_revs
-
- def initialized_revs(self):
- """
- Get a list of the revisions in which keys were added or
- removed in this property.
- """
- initialized_revs = []
- old_len = len(self._initial_value)
- for rev, val in zip(self._changed_revs, self._changed_values):
- if len(val) != old_len:
- initialized_revs.append(rev)
- old_len = len(val)
- return initialized_revs
-
-class RevisionSet:
- """
- A set of revisions, held in dictionary form for easy manipulation. If we
- were to rewrite this script for Python 2.3+, we would subclass this from
- set (or UserSet). As this class does not include branch
- information, it's assumed that one instance will be used per
- branch.
- """
- def __init__(self, parm):
- """Constructs a RevisionSet from a string in property form, or from
- a dictionary whose keys are the revisions. Raises ValueError if the
- input string is invalid."""
-
- self._revs = {}
-
- revision_range_split_re = re.compile('[-:]')
-
- if isinstance(parm, types.DictType):
- self._revs = parm.copy()
- elif isinstance(parm, types.ListType):
- for R in parm:
- self._revs[int(R)] = 1
- else:
- parm = parm.strip()
- if parm:
- for R in parm.split(","):
- rev_or_revs = re.split(revision_range_split_re, R)
- if len(rev_or_revs) == 1:
- self._revs[int(rev_or_revs[0])] = 1
- elif len(rev_or_revs) == 2:
- for rev in range(int(rev_or_revs[0]),
- int(rev_or_revs[1])+1):
- self._revs[rev] = 1
- else:
- raise ValueError, 'Ill formatted revision range: ' + R
-
- def sorted(self):
- revnums = self._revs.keys()
- revnums.sort()
- return revnums
-
- def normalized(self):
- """Returns a normalized version of the revision set, which is an
- ordered list of couples (start,end), with the minimum number of
- intervals."""
- revnums = self.sorted()
- revnums.reverse()
- ret = []
- while revnums:
- s = e = revnums.pop()
- while revnums and revnums[-1] in (e, e+1):
- e = revnums.pop()
- ret.append((s, e))
- return ret
-
- def __str__(self):
- """Convert the revision set to a string, using its normalized form."""
- L = []
- for s,e in self.normalized():
- if s == e:
- L.append(str(s))
- else:
- L.append(str(s) + "-" + str(e))
- return ",".join(L)
-
- def __contains__(self, rev):
- return self._revs.has_key(rev)
-
- def __sub__(self, rs):
- """Compute subtraction as in sets."""
- revs = {}
- for r in self._revs.keys():
- if r not in rs:
- revs[r] = 1
- return RevisionSet(revs)
-
- def __and__(self, rs):
- """Compute intersections as in sets."""
- revs = {}
- for r in self._revs.keys():
- if r in rs:
- revs[r] = 1
- return RevisionSet(revs)
-
- def __nonzero__(self):
- return len(self._revs) != 0
-
- def __len__(self):
- """Return the number of revisions in the set."""
- return len(self._revs)
-
- def __iter__(self):
- return iter(self.sorted())
-
- def __or__(self, rs):
- """Compute set union."""
- revs = self._revs.copy()
- revs.update(rs._revs)
- return RevisionSet(revs)
-
-def merge_props_to_revision_set(merge_props, pathid):
- """A converter which returns a RevisionSet instance containing the
- revisions from PATH as known to BRANCH_PROPS. BRANCH_PROPS is a
- dictionary of pathid -> revision set branch integration information
- (as returned by get_merge_props())."""
- if not merge_props.has_key(pathid):
- error('no integration info available for path "%s"' % pathid)
- return RevisionSet(merge_props[pathid])
-
-def dict_from_revlist_prop(propvalue):
- """Given a property value as a string containing per-source revision
- lists, return a dictionary whose key is a source path identifier
- and whose value is the revisions for that source."""
- prop = {}
-
- # Multiple sources are separated by any whitespace.
- for L in propvalue.split():
- # We use rsplit to play safe and allow colons in pathids.
- pathid_str, revs = rsplit(L.strip(), ":", 1)
-
- pathid = PathIdentifier.from_pathid(pathid_str)
-
- # cache the "external" form we saw
- pathid.external_form = pathid_str
-
- prop[pathid] = revs
- return prop
-
-def get_revlist_prop(url_or_dir, propname, rev=None):
- """Given a repository URL or working copy path and a property
- name, extract the values of the property which store per-source
- revision lists and return a dictionary whose key is a source path
- identifier, and whose value is the revisions for that source."""
-
- # Note that propget does not return an error if the property does
- # not exist, it simply does not output anything. So we do not need
- # to check for LaunchError here.
- args = '--strict "%s" "%s"' % (propname, url_or_dir)
- if rev:
- args = '-r %s %s' % (rev, args)
- out = launchsvn('propget %s' % args, split_lines=False)
-
- return dict_from_revlist_prop(out)
-
-def get_merge_props(dir):
- """Extract the merged revisions."""
- return get_revlist_prop(dir, opts["prop"])
-
-def get_block_props(dir):
- """Extract the blocked revisions."""
- return get_revlist_prop(dir, opts["block-prop"])
-
-def get_blocked_revs(dir, source_pathid):
- p = get_block_props(dir)
- if p.has_key(source_pathid):
- return RevisionSet(p[source_pathid])
- return RevisionSet("")
-
-def format_merge_props(props, sep=" "):
- """Formats the hash PROPS as a string suitable for use as a
- Subversion property value."""
- assert sep in ["\t", "\n", " "] # must be a whitespace
- props = props.items()
- props.sort()
- L = []
- for h, r in props:
- L.append("%s:%s" % (h, r))
- return sep.join(L)
-
-def _run_propset(dir, prop, value):
- """Set the property 'prop' of directory 'dir' to value 'value'. We go
- through a temporary file to not run into command line length limits."""
- try:
- fd, fname = tempfile.mkstemp()
- f = os.fdopen(fd, "wb")
- except AttributeError:
- # Fallback for Python <= 2.3 which does not have mkstemp (mktemp
- # suffers from race conditions. Not that we care...)
- fname = tempfile.mktemp()
- f = open(fname, "wb")
-
- try:
- f.write(value)
- f.close()
- report("property data written to temp file: %s" % value)
- svn_command('propset "%s" -F "%s" "%s"' % (prop, fname, dir))
- finally:
- os.remove(fname)
-
-def set_props(dir, name, props):
- props = format_merge_props(props)
- if props:
- _run_propset(dir, name, props)
- else:
- # Check if NAME exists on DIR before trying to delete it.
- # As of 1.6 propdel no longer supports deleting a
- # non-existent property.
- out = launchsvn('propget "%s" "%s"' % (name, dir))
- if out:
- svn_command('propdel "%s" "%s"' % (name, dir))
-
-def set_merge_props(dir, props):
- set_props(dir, opts["prop"], props)
-
-def set_block_props(dir, props):
- set_props(dir, opts["block-prop"], props)
-
-def set_blocked_revs(dir, source_pathid, revs):
- props = get_block_props(dir)
- if revs:
- props[source_pathid] = str(revs)
- elif props.has_key(source_pathid):
- del props[source_pathid]
- set_block_props(dir, props)
-
-def is_url(url):
- """Check if url looks like a valid url."""
- return re.search(r"^[a-zA-Z][-+\.\w]*://[^\s]+$", url) is not None and url[:4] != 'uuid'
-
-def check_url(url):
- """Similar to is_url, but actually invoke get_svninfo to find out"""
- return get_svninfo(url) != {}
-
-def is_pathid(pathid):
- return isinstance(pathid, PathIdentifier)
-
-def is_wc(dir):
- """Check if a directory is a working copy."""
- return os.path.isdir(os.path.join(dir, ".svn")) or \
- os.path.isdir(os.path.join(dir, "_svn"))
-
-_cache_svninfo = {}
-def get_svninfo(target):
- """Extract the subversion information for a target (through 'svn info').
- This function uses an internal cache to let clients query information
- many times."""
- if _cache_svninfo.has_key(target):
- return _cache_svninfo[target]
- info = {}
- for L in launchsvn('info "%s"' % target):
- L = L.strip()
- if not L:
- continue
- key, value = L.split(": ", 1)
- info[key] = value.strip()
- _cache_svninfo[target] = info
- return info
-
-def target_to_url(target):
- """Convert working copy path or repos URL to a repos URL."""
- if is_wc(target):
- info = get_svninfo(target)
- return info["URL"]
- return target
-
-_cache_reporoot = {}
-def get_repo_root(target):
- """Compute the root repos URL given a working-copy path, or a URL."""
- # Try using "svn info WCDIR". This works only on SVN clients >= 1.3
- if not is_url(target):
- try:
- info = get_svninfo(target)
- root = info["Repository Root"]
- _cache_reporoot[root] = None
- return root
- except KeyError:
- pass
- url = target_to_url(target)
- assert url[-1] != '/'
- else:
- url = target
-
- # Go through the cache of the repository roots. This avoids extra
- # server round-trips if we are asking the root of different URLs
- # in the same repository (the cache in get_svninfo() cannot detect
- # that of course and would issue a remote command).
- assert is_url(url)
- for r in _cache_reporoot:
- if url.startswith(r):
- return r
-
- # Try using "svn info URL". This works only on SVN clients >= 1.2
- try:
- info = get_svninfo(url)
- # info may be {}, in which case we'll see KeyError here
- root = info["Repository Root"]
- _cache_reporoot[root] = None
- return root
- except (KeyError, LaunchError):
- pass
-
- # Constrained to older svn clients, we are stuck with this ugly
- # trial-and-error implementation. It could be made faster with a
- # binary search.
- while url:
- temp = os.path.dirname(url)
- try:
- launchsvn('proplist "%s"' % temp)
- except LaunchError:
- _cache_reporoot[url] = None
- return rstrip(url, "/")
- url = temp
-
- error("svn repos root of %s not found" % target)
-
-class SvnLogParser:
- """
- Parse the "svn log", going through the XML output and using pulldom (which
- would even allow streaming the command output).
- """
- def __init__(self, xml):
- self._events = pulldom.parseString(xml)
- def __getitem__(self, idx):
- for event, node in self._events:
- if event == pulldom.START_ELEMENT and node.tagName == "logentry":
- self._events.expandNode(node)
- return self.SvnLogRevision(node)
- raise IndexError, "Could not find 'logentry' tag in xml"
-
- class SvnLogRevision:
- def __init__(self, xmlnode):
- self.n = xmlnode
- def revision(self):
- return int(self.n.getAttribute("revision"))
- def author(self):
- return self.n.getElementsByTagName("author")[0].firstChild.data
- def paths(self):
- return [self.SvnLogPath(n)
- for n in self.n.getElementsByTagName("path")]
-
- class SvnLogPath:
- def __init__(self, xmlnode):
- self.n = xmlnode
- def action(self):
- return self.n.getAttribute("action")
- def pathid(self):
- return self.n.firstChild.data
- def copyfrom_rev(self):
- try: return self.n.getAttribute("copyfrom-rev")
- except KeyError: return None
- def copyfrom_pathid(self):
- try: return self.n.getAttribute("copyfrom-path")
- except KeyError: return None
-
-def get_copyfrom(target):
- """Get copyfrom info for a given target (it represents the
- repository-relative path from where it was branched). NOTE:
- repos root has no copyfrom info. In this case None is returned.
-
- Returns the:
- - source file or directory from which the copy was made
- - revision from which that source was copied
- - revision in which the copy was committed
- """
- repos_path = PathIdentifier.from_target(target).repo_relative_path
- for chg in SvnLogParser(launchsvn('log -v --xml --stop-on-copy "%s"'
- % target, split_lines=False)):
- for p in chg.paths():
- if p.action() == 'A' and p.pathid() == repos_path:
- # These values will be None if the corresponding elements are
- # not found in the log.
- return p.copyfrom_pathid(), p.copyfrom_rev(), chg.revision()
- return None,None,None
-
-def get_latest_rev(url):
- """Get the latest revision of the repository of which URL is part."""
- try:
- info = get_svninfo(url)
- if not info.has_key("Revision"):
- error("Not a valid URL: %s" % url)
- return info["Revision"]
- except LaunchError:
- # Alternative method for latest revision checking (for svn < 1.2)
- report('checking latest revision of "%s"' % url)
- L = launchsvn('proplist --revprop -r HEAD "%s"' % opts["source-url"])[0]
- rev = re.search("revision (\d+)", L).group(1)
- report('latest revision of "%s" is %s' % (url, rev))
- return rev
-
-def get_created_rev(url):
- """Lookup the revision at which the path identified by the
- provided URL was first created."""
- oldest_rev = -1
- report('determining oldest revision for URL "%s"' % url)
- ### TODO: Refactor this to use a modified RevisionLog class.
- lines = None
- cmd = "log -r1:HEAD --stop-on-copy -q " + url
- try:
- lines = launchsvn(cmd + " --limit=1")
- except LaunchError:
- # Assume that --limit isn't supported by the installed 'svn'.
- lines = launchsvn(cmd)
- if lines and len(lines) > 1:
- i = lines[1].find(" ")
- if i != -1:
- oldest_rev = int(lines[1][1:i])
- if oldest_rev == -1:
- error('unable to determine oldest revision for URL "%s"' % url)
- return oldest_rev
-
-def get_commit_log(url, revnum):
- """Return the log message for a specific integer revision
- number."""
- out = launchsvn("log --incremental -r%d %s" % (revnum, url))
- return recode_stdout_to_file("".join(out[1:]))
-
-def construct_merged_log_message(url, revnums):
- """Return a commit log message containing all the commit messages
- in the specified revisions at the given URL. The separator used
- in this log message is determined by searching for the longest
- svnmerge separator existing in the commit log messages and
- extending it by one more separator. This results in a new commit
- log message that is clearer in describing merges that contain
- other merges. Trailing newlines are removed from the embedded
- log messages."""
- messages = ['']
- longest_sep = ''
- for r in revnums.sorted():
- message = get_commit_log(url, r)
- if message:
- message = re.sub(r'(\r\n|\r|\n)', "\n", message)
- message = rstrip(message, "\n") + "\n"
- messages.append(prefix_lines(LOG_LINE_PREFIX, message))
- for match in LOG_SEPARATOR_RE.findall(message):
- sep = match[1]
- if len(sep) > len(longest_sep):
- longest_sep = sep
-
- longest_sep += LOG_SEPARATOR + "\n"
- messages.append('')
- return longest_sep.join(messages)
-
-def get_default_source(branch_target, branch_props):
- """Return the default source for branch_target (given its branch_props).
- Error out if there is ambiguity."""
- if not branch_props:
- error("no integration info available")
-
- props = branch_props.copy()
- pathid = PathIdentifier.from_target(branch_target)
-
- # To make bidirectional merges easier, find the target's
- # repository local path so it can be removed from the list of
- # possible integration sources.
- if props.has_key(pathid):
- del props[pathid]
-
- if len(props) > 1:
- err_msg = "multiple sources found. "
- err_msg += "Explicit source argument (-S/--source) required.\n"
- err_msg += "The merge sources available are:"
- for prop in props:
- err_msg += "\n " + str(prop)
- error(err_msg)
-
- return props.keys()[0]
-
-def should_find_reflected(branch_dir):
- should_find_reflected = opts["bidirectional"]
-
- # If the source has integration info for the target, set find_reflected
- # even if --bidirectional wasn't specified
- if not should_find_reflected:
- source_props = get_merge_props(opts["source-url"])
- should_find_reflected = source_props.has_key(PathIdentifier.from_target(branch_dir))
-
- return should_find_reflected
-
-def analyze_revs(target_pathid, url, begin=1, end=None,
- find_reflected=False):
- """For the source of the merges in the source URL being merged into
- target_pathid, analyze the revisions in the interval begin-end (which
- defaults to 1-HEAD), to find out which revisions are changes in
- the url, which are changes elsewhere (so-called 'phantom'
- revisions), optionally which are reflected changes (to avoid
- conflicts that can occur when doing bidirectional merging between
- branches), and which revisions initialize merge tracking against other
- branches. Return a tuple of four RevisionSet's:
- (real_revs, phantom_revs, reflected_revs, initialized_revs).
-
- NOTE: To maximize speed, if "end" is not provided, the function is
- not able to find phantom revisions following the last real
- revision in the URL.
- """
-
- begin = str(begin)
- if end is None:
- end = "HEAD"
- else:
- end = str(end)
- if long(begin) > long(end):
- return RevisionSet(""), RevisionSet(""), \
- RevisionSet(""), RevisionSet("")
-
- logs[url] = RevisionLog(url, begin, end, find_reflected)
- revs = RevisionSet(logs[url].revs)
-
- if end == "HEAD":
- # If end is not provided, we do not know which is the latest revision
- # in the repository. So return the phantom revision set only up to
- # the latest known revision.
- end = str(list(revs)[-1])
-
- phantom_revs = RevisionSet("%s-%s" % (begin, end)) - revs
-
- if find_reflected:
- reflected_revs = logs[url].merge_metadata().changed_revs(target_pathid)
- reflected_revs += logs[url].block_metadata().changed_revs(target_pathid)
- else:
- reflected_revs = []
-
- initialized_revs = RevisionSet(logs[url].merge_metadata().initialized_revs())
- reflected_revs = RevisionSet(reflected_revs)
-
- return revs, phantom_revs, reflected_revs, initialized_revs
-
-def analyze_source_revs(branch_target, source_url, **kwargs):
- """For the given branch and source, extract the real and phantom
- source revisions."""
- branch_url = target_to_url(branch_target)
- branch_pathid = PathIdentifier.from_target(branch_target)
-
- # Extract the latest repository revision from the URL of the branch
- # directory (which is already cached at this point).
- end_rev = get_latest_rev(source_url)
-
- # Calculate the base of analysis. If there is a "1-XX" interval in the
- # merged_revs, we do not need to check those.
- base = 1
- r = opts["merged-revs"].normalized()
- if r and r[0][0] == 1:
- base = r[0][1] + 1
-
- # See if the user filtered the revision set. If so, we are not
- # interested in something outside that range.
- if opts["revision"]:
- revs = RevisionSet(opts["revision"]).sorted()
- if base < revs[0]:
- base = revs[0]
- if end_rev > revs[-1]:
- end_rev = revs[-1]
-
- return analyze_revs(branch_pathid, source_url, base, end_rev, **kwargs)
-
-def minimal_merge_intervals(revs, phantom_revs):
- """Produce the smallest number of intervals suitable for merging. revs
- is the RevisionSet which we want to merge, and phantom_revs are phantom
- revisions which can be used to concatenate intervals, thus minimizing the
- number of operations."""
- revnums = revs.normalized()
- ret = []
-
- cur = revnums.pop()
- while revnums:
- next = revnums.pop()
- assert next[1] < cur[0] # otherwise it is not ordered
- assert cur[0] - next[1] > 1 # otherwise it is not normalized
- for i in range(next[1]+1, cur[0]):
- if i not in phantom_revs:
- ret.append(cur)
- cur = next
- break
- else:
- cur = (next[0], cur[1])
-
- ret.append(cur)
- ret.reverse()
- return ret
-
-def display_revisions(revs, display_style, revisions_msg, source_url):
- """Show REVS as dictated by DISPLAY_STYLE, either numerically, in
- log format, or as diffs. When displaying revisions numerically,
- prefix output with REVISIONS_MSG when in verbose mode. Otherwise,
- request logs or diffs using SOURCE_URL."""
- if display_style == "revisions":
- if revs:
- report(revisions_msg)
- print revs
- elif display_style == "logs":
- for start,end in revs.normalized():
- svn_command('log --incremental -v -r %d:%d %s' % \
- (start, end, source_url))
- elif display_style in ("diffs", "summarize"):
- if display_style == 'summarize':
- summarize = '--summarize '
- else:
- summarize = ''
-
- for start, end in revs.normalized():
- print
- if start == end:
- print "%s: changes in revision %d follow" % (NAME, start)
- else:
- print "%s: changes in revisions %d-%d follow" % (NAME,
- start, end)
- print
-
- # Note: the starting revision number to 'svn diff' is
- # NOT inclusive so we have to subtract one from ${START}.
- svn_command("diff -r %d:%d %s %s" % (start - 1, end, summarize,
- source_url))
- else:
- assert False, "unhandled display style: %s" % display_style
-
-def action_init(target_dir, target_props):
- """Initialize for merges."""
- # Check that directory is ready for being modified
- check_dir_clean(target_dir)
-
- target_pathid = PathIdentifier.from_target(target_dir)
- source_pathid = opts['source-pathid']
- if source_pathid == target_pathid:
- error("cannot init integration source path '%s'\nIts path identifier does not "
- "differ from the path identifier of the current directory, '%s'."
- % (source_pathid, target_pathid))
-
- source_url = opts['source-url']
-
- # If the user hasn't specified the revisions to use, see if the
- # "source" is a copy from the current tree and if so, we can use
- # the version data obtained from it.
- revision_range = opts["revision"]
- if not revision_range:
- # If source was originally copied from target, and we are merging
- # changes from source to target (the copy target is the merge source,
- # and the copy source is the merge target), then we want to mark as
- # integrated up to the rev in which the copy was committed which
- # created the merge source:
- cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(source_url)
-
- cf_pathid = None
- if cf_source:
- cf_url = get_repo_root(source_url) + cf_source
- if is_url(cf_url) and check_url(cf_url):
- cf_pathid = PathIdentifier.from_target(cf_url)
-
- if target_pathid == cf_pathid:
- report('the source "%s" was copied from "%s" in rev %s and committed in rev %s' %
- (source_url, target_dir, cf_rev, copy_committed_in_rev))
- revision_range = "1-" + str(copy_committed_in_rev)
-
- if not revision_range:
- # If the reverse is true: copy source is the merge source, and
- # the copy target is the merge target, then we want to mark as
- # integrated up to the specific rev of the merge target from
- # which the merge source was copied. (Longer discussion at:
- # http://subversion.tigris.org/issues/show_bug.cgi?id=2810 )
- cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(target_dir)
-
- cf_pathid = None
- if cf_source:
- cf_url = get_repo_root(target_dir) + cf_source
- if is_url(cf_url) and check_url(cf_url):
- cf_pathid = PathIdentifier.from_target(cf_url)
-
- source_pathid = PathIdentifier.from_target(source_url)
- if source_pathid == cf_pathid:
- report('the target "%s" was copied the source "%s" in rev %s and committed in rev %s' %
- (target_dir, source_url, cf_rev, copy_committed_in_rev))
- revision_range = "1-" + cf_rev
-
- # When neither the merge source nor target is a copy of the other, and
- # the user did not specify a revision range, then choose a default which is
- # the current revision; saying, in effect, "everything has been merged, so
- # mark as integrated up to the latest rev on source url).
- if not revision_range:
- revision_range = "1-" + get_latest_rev(source_url)
-
- revs = RevisionSet(revision_range)
-
- report('marking "%s" as already containing revisions "%s" of "%s"' %
- (target_dir, revs, source_url))
-
- revs = str(revs)
- # If the local svnmerge-integrated property already has an entry
- # for the source-pathid, simply error out.
- if not opts["force"] and target_props.has_key(source_pathid):
- error('Repository-relative path %s has already been initialized at %s\n'
- 'Use --force to re-initialize' % (source_pathid, target_dir))
- # set the pathid's external_form based on the user's options
- source_pathid.external_form = source_pathid.format(opts['location-type'])
-
- revs = str(revs)
- target_props[source_pathid] = revs
-
- # Set property
- set_merge_props(target_dir, target_props)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Initialized merge tracking via "%s" with revisions "%s" from ' \
- % (NAME, revs)
- print >>f, '%s' % source_url
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_avail(branch_dir, branch_props):
- """Show commits available for merges."""
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"],
- find_reflected=
- should_find_reflected(branch_dir))
- report('skipping phantom revisions: %s' % phantom_revs)
- if reflected_revs:
- report('skipping reflected revisions: %s' % reflected_revs)
- report('skipping initialized revisions: %s' % initialized_revs)
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- avail_revs = source_revs - opts["merged-revs"] - blocked_revs - \
- reflected_revs - initialized_revs
-
- # Compose the set of revisions to show
- revs = RevisionSet("")
- report_msg = "revisions available to be merged are:"
- if "avail" in opts["avail-showwhat"]:
- revs |= avail_revs
- if "blocked" in opts["avail-showwhat"]:
- revs |= blocked_revs
- report_msg = "revisions blocked are:"
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs = revs & RevisionSet(opts["revision"])
-
- display_revisions(revs, opts["avail-display"],
- report_msg,
- opts["source-url"])
-
-def action_integrated(branch_dir, branch_props):
- """Show change sets already merged. This set of revisions is
- calculated from taking svnmerge-integrated property from the
- branch, and subtracting any revision older than the branch
- creation revision."""
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
- revs = merge_props_to_revision_set(branch_props, opts["source-pathid"])
-
- # Lookup the oldest revision on the branch path.
- oldest_src_rev = get_created_rev(opts["source-url"])
-
- # Subtract any revisions which pre-date the branch.
- report("subtracting revisions which pre-date the source URL (%d)" %
- oldest_src_rev)
- revs = revs - RevisionSet(range(1, oldest_src_rev))
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs = revs & RevisionSet(opts["revision"])
-
- display_revisions(revs, opts["integrated-display"],
- "revisions already integrated are:", opts["source-url"])
-
-def action_merge(branch_dir, branch_props):
- """Record merge meta data, and do the actual merge (if not
- requested otherwise via --record-only)."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"],
- find_reflected=
- should_find_reflected(branch_dir))
-
- if opts["revision"]:
- revs = RevisionSet(opts["revision"])
- else:
- revs = source_revs
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- merged_revs = opts["merged-revs"]
-
- # Show what we're doing
- if opts["verbose"]: # just to avoid useless calculations
- if merged_revs & revs:
- report('"%s" already contains revisions %s' % (branch_dir,
- merged_revs & revs))
- if phantom_revs:
- report('memorizing phantom revision(s): %s' % phantom_revs)
- if reflected_revs:
- report('memorizing reflected revision(s): %s' % reflected_revs)
- if blocked_revs & revs:
- report('skipping blocked revisions(s): %s' % (blocked_revs & revs))
- if initialized_revs:
- report('skipping initialized revision(s): %s' % initialized_revs)
-
- # Compute final merge set.
- revs = revs - merged_revs - blocked_revs - reflected_revs - \
- phantom_revs - initialized_revs
- if not revs:
- report('no revisions to merge, exiting')
- return
-
- # When manually marking revisions as merged, we only update the
- # integration meta data, and don't perform an actual merge.
- record_only = opts["record-only"]
-
- if record_only:
- report('recording merge of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
- else:
- report('merging in revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
-
- # Do the merge(s). Note: the starting revision number to 'svn merge'
- # is NOT inclusive so we have to subtract one from start.
- # We try to keep the number of merge operations as low as possible,
- # because it is faster and reduces the number of conflicts.
- old_block_props = get_block_props(branch_dir)
- merge_metadata = logs[opts["source-url"]].merge_metadata()
- block_metadata = logs[opts["source-url"]].block_metadata()
- for start,end in minimal_merge_intervals(revs, phantom_revs):
- if not record_only:
- # Preset merge/blocked properties to the source value at
- # the start rev to avoid spurious property conflicts
- set_merge_props(branch_dir, merge_metadata.get(start - 1))
- set_block_props(branch_dir, block_metadata.get(start - 1))
- # Do the merge
- svn_command("merge --force -r %d:%d %s %s" % \
- (start - 1, end, opts["source-url"], branch_dir))
- # TODO: to support graph merging, add logic to merge the property
- # meta-data manually
-
- # Update the set of merged revisions.
- merged_revs = merged_revs | revs | reflected_revs | phantom_revs | initialized_revs
- branch_props[opts["source-pathid"]] = str(merged_revs)
- set_merge_props(branch_dir, branch_props)
- # Reset the blocked revs
- set_block_props(branch_dir, old_block_props)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- if record_only:
- print >>f, 'Recorded merge of revisions %s via %s from ' % \
- (revs, NAME)
- else:
- print >>f, 'Merged revisions %s via %s from ' % \
- (revs, NAME)
- print >>f, '%s' % opts["source-url"]
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"], revs),
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_block(branch_dir, branch_props):
- """Block revisions."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- source_revs, phantom_revs, reflected_revs, initialized_revs = \
- analyze_source_revs(branch_dir, opts["source-url"])
- revs_to_block = source_revs - opts["merged-revs"]
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs_to_block = RevisionSet(opts["revision"]) & revs_to_block
-
- if not revs_to_block:
- error('no available revisions to block')
-
- # Change blocked information
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- blocked_revs = blocked_revs | revs_to_block
- set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Blocked revisions %s via %s' % (revs_to_block, NAME)
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"],
- revs_to_block),
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_unblock(branch_dir, branch_props):
- """Unblock revisions."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
- revs_to_unblock = blocked_revs
-
- # Limit to revisions specified by -r (if any)
- if opts["revision"]:
- revs_to_unblock = revs_to_unblock & RevisionSet(opts["revision"])
-
- if not revs_to_unblock:
- error('no available revisions to unblock')
-
- # Change blocked information
- blocked_revs = blocked_revs - revs_to_unblock
- set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Unblocked revisions %s via %s' % (revs_to_unblock, NAME)
- if opts["commit-verbose"]:
- print >>f
- print >>f, construct_merged_log_message(opts["source-url"],
- revs_to_unblock),
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-def action_rollback(branch_dir, branch_props):
- """Rollback previously integrated revisions."""
-
- # Make sure the revision arguments are present
- if not opts["revision"]:
- error("The '-r' option is mandatory for rollback")
-
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
- # Get the list of all revisions already merged into this source-pathid.
- merged_revs = merge_props_to_revision_set(branch_props,
- opts["source-pathid"])
-
- # At which revision was the src created?
- oldest_src_rev = get_created_rev(opts["source-url"])
- src_pre_exist_range = RevisionSet("1-%d" % oldest_src_rev)
-
- # Limit to revisions specified by -r (if any)
- revs = merged_revs & RevisionSet(opts["revision"])
-
- # make sure there's some revision to rollback
- if not revs:
- report("Nothing to rollback in revision range r%s" % opts["revision"])
- return
-
- # If even one specified revision lies outside the lifetime of the
- # merge source, error out.
- if revs & src_pre_exist_range:
- err_str = "Specified revision range falls out of the rollback range.\n"
- err_str += "%s was created at r%d" % (opts["source-pathid"],
- oldest_src_rev)
- error(err_str)
-
- record_only = opts["record-only"]
-
- if record_only:
- report('recording rollback of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
- else:
- report('rollback of revision(s) %s from "%s"' %
- (revs, opts["source-url"]))
-
- # Do the reverse merge(s). Note: the starting revision number
- # to 'svn merge' is NOT inclusive so we have to subtract one from start.
- # We try to keep the number of merge operations as low as possible,
- # because it is faster and reduces the number of conflicts.
- rollback_intervals = minimal_merge_intervals(revs, [])
- # rollback in the reverse order of merge
- rollback_intervals.reverse()
- for start, end in rollback_intervals:
- if not record_only:
- # Do the merge
- svn_command("merge --force -r %d:%d %s %s" % \
- (end, start - 1, opts["source-url"], branch_dir))
-
- # Write out commit message if desired
- # calculate the phantom revs first
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- if record_only:
- print >>f, 'Recorded rollback of revisions %s via %s from ' % \
- (revs , NAME)
- else:
- print >>f, 'Rolled back revisions %s via %s from ' % \
- (revs , NAME)
- print >>f, '%s' % opts["source-url"]
-
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
- # Update the set of merged revisions.
- merged_revs = merged_revs - revs
- branch_props[opts["source-pathid"]] = str(merged_revs)
- set_merge_props(branch_dir, branch_props)
-
-def action_uninit(branch_dir, branch_props):
- """Uninit SOURCE URL."""
- # Check branch directory is ready for being modified
- check_dir_clean(branch_dir)
-
- # If the source-pathid does not have an entry in the svnmerge-integrated
- # property, simply error out.
- if not branch_props.has_key(opts["source-pathid"]):
- error('Repository-relative path "%s" does not contain merge '
- 'tracking information for "%s"' \
- % (opts["source-pathid"], branch_dir))
-
- del branch_props[opts["source-pathid"]]
-
- # Set merge property with the selected source deleted
- set_merge_props(branch_dir, branch_props)
-
- # Set blocked revisions for the selected source to None
- set_blocked_revs(branch_dir, opts["source-pathid"], None)
-
- # Write out commit message if desired
- if opts["commit-file"]:
- f = open(opts["commit-file"], "w")
- print >>f, 'Removed merge tracking for "%s" for ' % NAME
- print >>f, '%s' % opts["source-url"]
- f.close()
- report('wrote commit message to "%s"' % opts["commit-file"])
-
-###############################################################################
-# Command line parsing -- options and commands management
-###############################################################################
-
-class OptBase:
- def __init__(self, *args, **kwargs):
- self.help = kwargs["help"]
- del kwargs["help"]
- self.lflags = []
- self.sflags = []
- for a in args:
- if a.startswith("--"): self.lflags.append(a)
- elif a.startswith("-"): self.sflags.append(a)
- else:
- raise TypeError, "invalid flag name: %s" % a
- if kwargs.has_key("dest"):
- self.dest = kwargs["dest"]
- del kwargs["dest"]
- else:
- if not self.lflags:
- raise TypeError, "cannot deduce dest name without long options"
- self.dest = self.lflags[0][2:]
- if kwargs:
- raise TypeError, "invalid keyword arguments: %r" % kwargs.keys()
- def repr_flags(self):
- f = self.sflags + self.lflags
- r = f[0]
- for fl in f[1:]:
- r += " [%s]" % fl
- return r
-
-class Option(OptBase):
- def __init__(self, *args, **kwargs):
- self.default = kwargs.setdefault("default", 0)
- del kwargs["default"]
- self.value = kwargs.setdefault("value", None)
- del kwargs["value"]
- OptBase.__init__(self, *args, **kwargs)
- def apply(self, state, value):
- assert value == ""
- if self.value is not None:
- state[self.dest] = self.value
- else:
- state[self.dest] += 1
-
-class OptionArg(OptBase):
- def __init__(self, *args, **kwargs):
- self.default = kwargs["default"]
- del kwargs["default"]
- self.metavar = kwargs.setdefault("metavar", None)
- del kwargs["metavar"]
- OptBase.__init__(self, *args, **kwargs)
-
- if self.metavar is None:
- if self.dest is not None:
- self.metavar = self.dest.upper()
- else:
- self.metavar = "arg"
- if self.default:
- self.help += " (default: %s)" % self.default
- def apply(self, state, value):
- assert value is not None
- state[self.dest] = value
- def repr_flags(self):
- r = OptBase.repr_flags(self)
- return r + " " + self.metavar
-
-class CommandOpts:
- class Cmd:
- def __init__(self, *args):
- self.name, self.func, self.usage, self.help, self.opts = args
- def short_help(self):
- return self.help.split(".")[0]
- def __str__(self):
- return self.name
- def __call__(self, *args, **kwargs):
- return self.func(*args, **kwargs)
-
- def __init__(self, global_opts, common_opts, command_table, version=None):
- self.progname = NAME
- self.version = version.replace("%prog", self.progname)
- self.cwidth = console_width() - 2
- self.ctable = command_table.copy()
- self.gopts = global_opts[:]
- self.copts = common_opts[:]
- self._add_builtins()
- for k in self.ctable.keys():
- cmd = self.Cmd(k, *self.ctable[k])
- opts = []
- for o in cmd.opts:
- if isinstance(o, types.StringType) or \
- isinstance(o, types.UnicodeType):
- o = self._find_common(o)
- opts.append(o)
- cmd.opts = opts
- self.ctable[k] = cmd
-
- def _add_builtins(self):
- self.gopts.append(
- Option("-h", "--help", help="show help for this command and exit"))
- if self.version is not None:
- self.gopts.append(
- Option("-V", "--version", help="show version info and exit"))
- self.ctable["help"] = (self._cmd_help,
- "help [COMMAND]",
- "Display help for a specific command. If COMMAND is omitted, "
- "display brief command description.",
- [])
-
- def _cmd_help(self, cmd=None, *args):
- if args:
- self.error("wrong number of arguments", "help")
- if cmd is not None:
- cmd = self._command(cmd)
- self.print_command_help(cmd)
- else:
- self.print_command_list()
-
- def _paragraph(self, text, width=78):
- chunks = re.split("\s+", text.strip())
- chunks.reverse()
- lines = []
- while chunks:
- L = chunks.pop()
- while chunks and len(L) + len(chunks[-1]) + 1 <= width:
- L += " " + chunks.pop()
- lines.append(L)
- return lines
-
- def _paragraphs(self, text, *args, **kwargs):
- pars = text.split("\n\n")
- lines = self._paragraph(pars[0], *args, **kwargs)
- for p in pars[1:]:
- lines.append("")
- lines.extend(self._paragraph(p, *args, **kwargs))
- return lines
-
- def _print_wrapped(self, text, indent=0):
- text = self._paragraphs(text, self.cwidth - indent)
- print text.pop(0)
- for t in text:
- print " " * indent + t
-
- def _find_common(self, fl):
- for o in self.copts:
- if fl in o.lflags+o.sflags:
- return o
- assert False, fl
-
- def _compute_flags(self, opts, check_conflicts=True):
- back = {}
- sfl = ""
- lfl = []
- for o in opts:
- sapp = lapp = ""
- if isinstance(o, OptionArg):
- sapp, lapp = ":", "="
- for s in o.sflags:
- if check_conflicts and back.has_key(s):
- raise RuntimeError, "option conflict: %s" % s
- back[s] = o
- sfl += s[1:] + sapp
- for l in o.lflags:
- if check_conflicts and back.has_key(l):
- raise RuntimeError, "option conflict: %s" % l
- back[l] = o
- lfl.append(l[2:] + lapp)
- return sfl, lfl, back
-
- def _extract_command(self, args):
- """
- Try to extract the command name from the argument list. This is
- non-trivial because we want to allow command-specific options even
- before the command itself.
- """
- opts = self.gopts[:]
- for cmd in self.ctable.values():
- opts.extend(cmd.opts)
- sfl, lfl, _ = self._compute_flags(opts, check_conflicts=False)
-
- lopts,largs = getopt.getopt(args, sfl, lfl)
- if not largs:
- return None
- return self._command(largs[0])
-
- def _fancy_getopt(self, args, opts, state=None):
- if state is None:
- state= {}
- for o in opts:
- if not state.has_key(o.dest):
- state[o.dest] = o.default
-
- sfl, lfl, back = self._compute_flags(opts)
- try:
- lopts,args = getopt.gnu_getopt(args, sfl, lfl)
- except AttributeError:
- # Before Python 2.3, there was no gnu_getopt support.
- # So we can't parse intermixed positional arguments
- # and options.
- lopts,args = getopt.getopt(args, sfl, lfl)
-
- for o,v in lopts:
- back[o].apply(state, v)
- return state, args
-
- def _command(self, cmd):
- if not self.ctable.has_key(cmd):
- self.error("unknown command: '%s'" % cmd)
- return self.ctable[cmd]
-
- def parse(self, args):
- if not args:
- self.print_small_help()
- sys.exit(0)
-
- cmd = None
- try:
- cmd = self._extract_command(args)
- opts = self.gopts[:]
- if cmd:
- opts.extend(cmd.opts)
- args.remove(cmd.name)
- state, args = self._fancy_getopt(args, opts)
- except getopt.GetoptError, e:
- self.error(e, cmd)
-
- # Handle builtins
- if self.version is not None and state["version"]:
- self.print_version()
- sys.exit(0)
- if state["help"]: # special case for --help
- if cmd:
- self.print_command_help(cmd)
- sys.exit(0)
- cmd = self.ctable["help"]
- else:
- if cmd is None:
- self.error("command argument required")
- if str(cmd) == "help":
- cmd(*args)
- sys.exit(0)
- return cmd, args, state
-
- def error(self, s, cmd=None):
- print >>sys.stderr, "%s: %s" % (self.progname, s)
- if cmd is not None:
- self.print_command_help(cmd)
- else:
- self.print_small_help()
- sys.exit(1)
- def print_small_help(self):
- print "Type '%s help' for usage" % self.progname
- def print_usage_line(self):
- print "usage: %s <subcommand> [options...] [args...]\n" % self.progname
- def print_command_list(self):
- print "Available commands (use '%s help COMMAND' for more details):\n" \
- % self.progname
- cmds = self.ctable.keys()
- cmds.sort()
- indent = max(map(len, cmds))
- for c in cmds:
- h = self.ctable[c].short_help()
- print " %-*s " % (indent, c),
- self._print_wrapped(h, indent+6)
- def print_command_help(self, cmd):
- cmd = self.ctable[str(cmd)]
- print 'usage: %s %s\n' % (self.progname, cmd.usage)
- self._print_wrapped(cmd.help)
- def print_opts(opts, self=self):
- if not opts: return
- flags = [o.repr_flags() for o in opts]
- indent = max(map(len, flags))
- for f,o in zip(flags, opts):
- print " %-*s :" % (indent, f),
- self._print_wrapped(o.help, indent+5)
- print '\nCommand options:'
- print_opts(cmd.opts)
- print '\nGlobal options:'
- print_opts(self.gopts)
-
- def print_version(self):
- print self.version
-
-###############################################################################
-# Options and Commands description
-###############################################################################
-
-global_opts = [
- Option("-F", "--force",
- help="force operation even if the working copy is not clean, or "
- "there are pending updates"),
- Option("-n", "--dry-run",
- help="don't actually change anything, just pretend; "
- "implies --show-changes"),
- Option("-s", "--show-changes",
- help="show subversion commands that make changes"),
- Option("-v", "--verbose",
- help="verbose mode: output more information about progress"),
- OptionArg("-u", "--username",
- default=None,
- help="invoke subversion commands with the supplied username"),
- OptionArg("-p", "--password",
- default=None,
- help="invoke subversion commands with the supplied password"),
- OptionArg("-c", "--config-dir", metavar="DIR",
- default=None,
- help="cause subversion commands to consult runtime config directory DIR"),
-]
-
-common_opts = [
- Option("-b", "--bidirectional",
- value=True,
- default=False,
- help="remove reflected and initialized revisions from merge candidates. "
- "Not required but may be specified to speed things up slightly"),
- OptionArg("-f", "--commit-file", metavar="FILE",
- default="svnmerge-commit-message.txt",
- help="set the name of the file where the suggested log message "
- "is written to"),
- Option("-M", "--record-only",
- value=True,
- default=False,
- help="do not perform an actual merge of the changes, yet record "
- "that a merge happened"),
- OptionArg("-r", "--revision",
- metavar="REVLIST",
- default="",
- help="specify a revision list, consisting of revision numbers "
- 'and ranges separated by commas, e.g., "534,537-539,540"'),
- OptionArg("-S", "--source", "--head",
- default=None,
- help="specify a merge source for this branch. It can be either "
- "a working directory path, a full URL, or an unambiguous "
- "substring of one of the locations for which merge tracking was "
- "already initialized. Needed only to disambiguate in case of "
- "multiple merge sources"),
-]
-
-command_table = {
- "init": (action_init,
- "init [OPTION...] [SOURCE]",
- """Initialize merge tracking from SOURCE on the current working
- directory.
-
- If SOURCE is specified, all the revisions in SOURCE are marked as already
- merged; if this is not correct, you can use --revision to specify the
- exact list of already-merged revisions.
-
- If SOURCE is omitted, then it is computed from the "svn cp" history of the
- current working directory (searching back for the branch point); in this
- case, %s assumes that no revision has been integrated yet since
- the branch point (unless you teach it with --revision).""" % NAME,
- [
- "-f", "-r", # import common opts
- OptionArg("-L", "--location-type",
- dest="location-type",
- default="path",
- help="Use this type of location identifier in the new " +
- "Subversion properties; 'uuid', 'url', or 'path' " +
- "(default)"),
- ]),
-
- "avail": (action_avail,
- "avail [OPTION...] [PATH]",
- """Show unmerged revisions available for PATH as a revision list.
- If --revision is given, the revisions shown will be limited to those
- also specified in the option.
-
- When svnmerge is used to bidirectionally merge changes between a
- branch and its source, it is necessary to not merge the same changes
- forth and back: e.g., if you committed a merge of a certain
- revision of the branch into the source, you do not want that commit
- to appear as available to merged into the branch (as the code
- originated in the branch itself!). svnmerge will automatically
- exclude these so-called "reflected" revisions.""",
- [
- Option("-A", "--all",
- dest="avail-showwhat",
- value=["blocked", "avail"],
- default=["avail"],
- help="show both available and blocked revisions (aka ignore "
- "blocked revisions)"),
- "-b",
- Option("-B", "--blocked",
- dest="avail-showwhat",
- value=["blocked"],
- help="show the blocked revision list (see '%s block')" % NAME),
- Option("-d", "--diff",
- dest="avail-display",
- value="diffs",
- default="revisions",
- help="show corresponding diff instead of revision list"),
- Option("--summarize",
- dest="avail-display",
- value="summarize",
- help="show summarized diff instead of revision list"),
- Option("-l", "--log",
- dest="avail-display",
- value="logs",
- help="show corresponding log history instead of revision list"),
- "-r",
- "-S",
- ]),
-
- "integrated": (action_integrated,
- "integrated [OPTION...] [PATH]",
- """Show merged revisions available for PATH as a revision list.
- If --revision is given, the revisions shown will be limited to
- those also specified in the option.""",
- [
- Option("-d", "--diff",
- dest="integrated-display",
- value="diffs",
- default="revisions",
- help="show corresponding diff instead of revision list"),
- Option("-l", "--log",
- dest="integrated-display",
- value="logs",
- help="show corresponding log history instead of revision list"),
- "-r",
- "-S",
- ]),
-
- "rollback": (action_rollback,
- "rollback [OPTION...] [PATH]",
- """Rollback previously merged in revisions from PATH. The
- --revision option is mandatory, and specifies which revisions
- will be rolled back. Only the previously integrated merges
- will be rolled back.
-
- When manually rolling back changes, --record-only can be used to
- instruct %s that a manual rollback of a certain revision
- already happened, so that it can record it and offer that
- revision for merge henceforth.""" % (NAME),
- [
- "-f", "-r", "-S", "-M", # import common opts
- ]),
-
- "merge": (action_merge,
- "merge [OPTION...] [PATH]",
- """Merge in revisions into PATH from its source. If --revision is omitted,
- all the available revisions will be merged. In any case, already merged-in
- revisions will NOT be merged again.
-
- When svnmerge is used to bidirectionally merge changes between a
- branch and its source, it is necessary to not merge the same changes
- forth and back: e.g., if you committed a merge of a certain
- revision of the branch into the source, you do not want that commit
- to appear as available to merged into the branch (as the code
- originated in the branch itself!). svnmerge will automatically
- exclude these so-called "reflected" revisions.
-
- When manually merging changes across branches, --record-only can
- be used to instruct %s that a manual merge of a certain revision
- already happened, so that it can record it and not offer that
- revision for merge anymore. Conversely, when there are revisions
- which should not be merged, use '%s block'.""" % (NAME, NAME),
- [
- "-b", "-f", "-r", "-S", "-M", # import common opts
- ]),
-
- "block": (action_block,
- "block [OPTION...] [PATH]",
- """Block revisions within PATH so that they disappear from the available
- list. This is useful to hide revisions which will not be integrated.
- If --revision is omitted, it defaults to all the available revisions.
-
- Do not use this option to hide revisions that were manually merged
- into the branch. Instead, use '%s merge --record-only', which
- records that a merge happened (as opposed to a merge which should
- not happen).""" % NAME,
- [
- "-f", "-r", "-S", # import common opts
- ]),
-
- "unblock": (action_unblock,
- "unblock [OPTION...] [PATH]",
- """Revert the effect of '%s block'. If --revision is omitted, all the
- blocked revisions are unblocked""" % NAME,
- [
- "-f", "-r", "-S", # import common opts
- ]),
-
- "uninit": (action_uninit,
- "uninit [OPTION...] [PATH]",
- """Remove merge tracking information from PATH. It cleans any kind of merge
- tracking information (including the list of blocked revisions). If there
- are multiple sources, use --source to indicate which source you want to
- forget about.""",
- [
- "-f", "-S", # import common opts
- ]),
-}
-
-
-def main(args):
- global opts
-
- # Initialize default options
- opts = default_opts.copy()
- logs.clear()
-
- optsparser = CommandOpts(global_opts, common_opts, command_table,
- version="%%prog r%s\n modified: %s\n\n"
- "Copyright (C) 2004,2005 Awarix Inc.\n"
- "Copyright (C) 2005, Giovanni Bajo"
- % (__revision__, __date__))
-
- cmd, args, state = optsparser.parse(args)
- opts.update(state)
-
- source = opts.get("source", None)
- branch_dir = "."
-
- if str(cmd) == "init":
- if len(args) == 1:
- source = args[0]
- elif len(args) > 1:
- optsparser.error("wrong number of parameters", cmd)
- elif str(cmd) in command_table.keys():
- if len(args) == 1:
- branch_dir = args[0]
- elif len(args) > 1:
- optsparser.error("wrong number of parameters", cmd)
- else:
- assert False, "command not handled: %s" % cmd
-
- # Validate branch_dir
- if not is_wc(branch_dir):
- if str(cmd) == "avail":
- info = None
- # it should be noted here that svn info does not error exit
- # if an invalid target is specified to it (as is
- # intuitive). so the try, except code is not absolutely
- # necessary. but, I retain it to indicate the intuitive
- # handling.
- try:
- info = get_svninfo(branch_dir)
- except LaunchError:
- pass
- # test that we definitely targeted a subversion directory,
- # mirroring the purpose of the earlier is_wc() call
- if info is None or not info.has_key("Node Kind") or info["Node Kind"] != "directory":
- error('"%s" is neither a valid URL, nor a working directory' % branch_dir)
- else:
- error('"%s" is not a subversion working directory' % branch_dir)
-
- # give out some hints as to potential pathids
- PathIdentifier.hint(branch_dir)
- if source: PathIdentifier.hint(source)
-
- # Extract the integration info for the branch_dir
- branch_props = get_merge_props(branch_dir)
-
- # Calculate source_url and source_path
- report("calculate source path for the branch")
- if not source:
- if str(cmd) == "init":
- cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(branch_dir)
- if not cf_source:
- error('no copyfrom info available. '
- 'Explicit source argument (-S/--source) required.')
- opts["source-url"] = get_repo_root(branch_dir) + cf_source
- opts["source-pathid"] = PathIdentifier.from_target(opts["source-url"])
-
- if not opts["revision"]:
- opts["revision"] = "1-" + cf_rev
- else:
- opts["source-pathid"] = get_default_source(branch_dir, branch_props)
- opts["source-url"] = opts["source-pathid"].get_url()
-
- assert is_pathid(opts["source-pathid"])
- assert is_url(opts["source-url"])
- else:
- # The source was given as a command line argument and is stored in
- # SOURCE. Ensure that the specified source does not end in a /,
- # otherwise it's easy to have the same source path listed more
- # than once in the integrated version properties, with and without
- # trailing /'s.
- source = rstrip(source, "/")
- if not is_wc(source) and not is_url(source):
- # Check if it is a substring of a pathid recorded
- # within the branch properties.
- found = []
- for pathid in branch_props.keys():
- if pathid.match_substring(source):
- found.append(pathid)
- if len(found) == 1:
- # (assumes pathid is a repository-relative-path)
- source_pathid = found[0]
- source = source_pathid.get_url()
- else:
- error('"%s" is neither a valid URL, nor an unambiguous '
- 'substring of a repository path, nor a working directory'
- % source)
- else:
- source_pathid = PathIdentifier.from_target(source)
-
- source_pathid = PathIdentifier.from_target(source)
- if str(cmd) == "init" and \
- source_pathid == PathIdentifier.from_target("."):
- error("cannot init integration source path '%s'\n"
- "Its repository-relative path must differ from the "
- "repository-relative path of the current directory."
- % source_pathid)
- opts["source-pathid"] = source_pathid
- opts["source-url"] = target_to_url(source)
-
- # Sanity check source_url
- assert is_url(opts["source-url"])
- # SVN does not support non-normalized URL (and we should not
- # have created them)
- assert opts["source-url"].find("/..") < 0
-
- report('source is "%s"' % opts["source-url"])
-
- # Get previously merged revisions (except when command is init)
- if str(cmd) != "init":
- opts["merged-revs"] = merge_props_to_revision_set(branch_props,
- opts["source-pathid"])
-
- # Perform the action
- cmd(branch_dir, branch_props)
-
-
-if __name__ == "__main__":
- try:
- main(sys.argv[1:])
- except LaunchError, (ret, cmd, out):
- err_msg = "command execution failed (exit code: %d)\n" % ret
- err_msg += cmd + "\n"
- err_msg += "".join(out)
- error(err_msg)
- except KeyboardInterrupt:
- # Avoid traceback on CTRL+C
- print "aborted by user"
- sys.exit(1)
diff --git a/staging/subversion/svnserve b/staging/subversion/svnserve
deleted file mode 100755
index 670fee742..000000000
--- a/staging/subversion/svnserve
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-. /etc/rc.conf
-. /etc/rc.d/functions
-. /etc/conf.d/svnserve
-
-PID=`pidof -o %PPID /usr/bin/svnserve`
-case "$1" in
- start)
- stat_busy "Starting svnserve"
- if [ -z "$PID" ]; then
- if [ -n "$SVNSERVE_USER" ]; then
- su -s '/bin/sh' $SVNSERVE_USER -c "/usr/bin/svnserve -d $SVNSERVE_ARGS" &
- else
- /usr/bin/svnserve -d $SVNSERVE_ARGS &
- fi
- fi
- if [ ! -z "$PID" -o $? -gt 0 ]; then
- stat_fail
- else
- add_daemon svnserve
- stat_done
- fi
- ;;
- stop)
- stat_busy "Stopping svnserve"
- [ ! -z "$PID" ] && kill $PID &> /dev/null
- if [ $? -gt 0 ]; then
- stat_fail
- else
- rm_daemon svnserve
- stat_done
- fi
- ;;
- restart)
- $0 stop
- sleep 1
- $0 start
- ;;
- *)
- echo "usage: $0 {start|stop|restart}"
-esac
diff --git a/staging/subversion/svnserve.conf b/staging/subversion/svnserve.conf
deleted file mode 100644
index 37fb7ea10..000000000
--- a/staging/subversion/svnserve.conf
+++ /dev/null
@@ -1,7 +0,0 @@
-#
-# Parameters to be passed to svnserve
-#
-#SVNSERVE_ARGS="-r /path/to/some/repos"
-SVNSERVE_ARGS=""
-
-#SVNSERVE_USER="svn"
diff --git a/staging/wireshark/PKGBUILD b/staging/wireshark/PKGBUILD
deleted file mode 100644
index a83355164..000000000
--- a/staging/wireshark/PKGBUILD
+++ /dev/null
@@ -1,54 +0,0 @@
-# $Id: PKGBUILD 121168 2011-04-29 06:39:53Z stephane $
-# Maintainer: Guillaume ALAUX <guillaume at alaux dot net>
-# Contributor: Florian Pritz <bluewind at jabber dot ccc dot de>
-pkgname=(wireshark-cli wireshark-gtk)
-pkgbase=wireshark
-pkgver=1.4.6
-pkgrel=2
-arch=('i686' 'x86_64')
-license=('GPL2')
-makedepends=('bison' 'flex' 'gtk2' 'krb5' 'libpcap' 'bash' 'gnutls' 'libcap')
-url="http://www.wireshark.org/"
-options=(!libtool)
-source=(http://www.wireshark.org/download/src/${pkgbase}-${pkgver}.tar.bz2)
-md5sums=('fd301004ebc5fac8e56c2f0d4ef6173f')
-
-build() {
- cd "${srcdir}/${pkgbase}-${pkgver}"
- export CFLAGS="-fno-unit-at-a-time ${CFLAGS}"
-
- ./configure --prefix=/usr --with-ssl --with-zlib=no
- make all
-}
-
-package_wireshark-cli() {
- pkgdesc="A free network protocol analyzer for Unix/Linux and Windows - CLI version"
- depends=('krb5' 'libpcap' 'bash' 'gnutls' 'libcap' 'glib2')
- install=wireshark.install
- conflicts=(wireshark)
-
- cd "${srcdir}/${pkgbase}-${pkgver}"
-
- make DESTDIR="${pkgdir}" install
-
- #wireshark uid group is 150
- chgrp 150 "${pkgdir}/usr/bin/dumpcap"
- chmod 754 "${pkgdir}/usr/bin/dumpcap"
- rm "$pkgdir/usr/bin/wireshark"
-}
-
-package_wireshark-gtk() {
- pkgdesc="A free network protocol analyzer for Unix/Linux and Windows - GTK frontend"
- depends=('gtk2' 'wireshark-cli')
- install=wireshark-gtk.install
- replaces=(wireshark)
- conflicts=(wireshark)
-
- cd "${srcdir}/${pkgbase}-${pkgver}"
-
- install -Dm755 .libs/wireshark "$pkgdir/usr/bin/wireshark"
- for d in 16 32 48; do
- install -Dm644 image/hi${d}-app-wireshark.png ${pkgdir}/usr/share/icons/hicolor/${d}x${d}/apps/wireshark.png
- done
- install -Dm644 wireshark.desktop ${pkgdir}/usr/share/applications/wireshark.desktop
-}
diff --git a/staging/wireshark/wireshark-gtk.install b/staging/wireshark/wireshark-gtk.install
deleted file mode 100644
index 6b1b64bdd..000000000
--- a/staging/wireshark/wireshark-gtk.install
+++ /dev/null
@@ -1,11 +0,0 @@
-post_install() {
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
-}
-
-post_upgrade() {
- post_install
-}
-
-post_remove() {
- post_install
-}
diff --git a/staging/wireshark/wireshark.install b/staging/wireshark/wireshark.install
deleted file mode 100644
index 6585ba272..000000000
--- a/staging/wireshark/wireshark.install
+++ /dev/null
@@ -1,18 +0,0 @@
-post_install() {
- getent group wireshark >/dev/null 2>&1 || usr/sbin/groupadd -g 150 wireshark &>/dev/null
-
- setcap 'CAP_NET_RAW+eip CAP_NET_ADMIN+eip' usr/bin/dumpcap
- echo "NOTE: To run wireshark as normal user you have to add yourself into wireshark group"
-}
-
-post_upgrade() {
- getent group wireshark >/dev/null 2>&1 || usr/sbin/groupadd -g 150 wireshark &>/dev/null
-
- setcap 'CAP_NET_RAW+eip CAP_NET_ADMIN+eip' usr/bin/dumpcap
-}
-
-post_remove() {
- if getent group wireshark >/dev/null 2>&1; then
- groupdel wireshark
- fi
-}
diff --git a/staging/xfprint/PKGBUILD b/staging/xfprint/PKGBUILD
deleted file mode 100644
index 1d622e190..000000000
--- a/staging/xfprint/PKGBUILD
+++ /dev/null
@@ -1,35 +0,0 @@
-# $Id: PKGBUILD 122851 2011-05-06 14:01:10Z stephane $
-# Maintainer: Tobias Kieslich <tobias funnychar archlinux.org>
-
-pkgname=xfprint
-pkgver=4.6.1
-pkgrel=5
-pkgdesc="A print dialog and a printer manager for Xfce"
-arch=(i686 x86_64)
-license=('GPL2')
-url="http://www.xfce.org/"
-groups=('xfce4')
-depends=("libxfcegui4>=$pkgver" "xfconf>=$pkgver" 'libcups>=1.3.10' 'a2ps' 'krb5'
- 'gnutls>=2.6.5' 'hicolor-icon-theme')
-makedepends=('pkgconfig' 'intltool')
-options=('!libtool')
-install=${pkgname}.install
-source=(http://archive.xfce.org/xfce/${pkgver}/src/${pkgname}-${pkgver}.tar.bz2
- xfprint-manager-fix.diff)
-md5sums=('d92fca97a42816085080baf07a99a62e'
- '69b3619a285e94d602a1d0ac08f4ca06')
-
-build() {
- cd ${srcdir}/${pkgname}-${pkgver}
- # fix xfprint-manager not showing cups printers
- # see http://bugs.archlinux.org/task/19965, http://bugs.gentoo.org/278047, http://bugzilla.xfce.org/show_bug.cgi?id=6089
- patch -Np0 -i ${srcdir}/xfprint-manager-fix.diff
- ./configure --prefix=/usr --sysconfdir=/etc --libexecdir=/usr/lib \
- --localstatedir=/var --disable-static
- make
-}
-
-package() {
- cd ${srcdir}/${pkgname}-${pkgver}
- make DESTDIR=${pkgdir} install
-}
diff --git a/staging/xfprint/xfprint-manager-fix.diff b/staging/xfprint/xfprint-manager-fix.diff
deleted file mode 100644
index 1bf0725f8..000000000
--- a/staging/xfprint/xfprint-manager-fix.diff
+++ /dev/null
@@ -1,29 +0,0 @@
---- xfprint-manager/main.c 2009-02-24 21:34:16.000000000 +0000
-+++ xfprint-manager/main.c.new 2010-07-15 21:30:51.098745852 +0000
-@@ -41,6 +41,7 @@
- #include <libxfprint/printer-list-window.h>
-
- #define CHANNEL "xfprint"
-+#define PROP_PRINTING_SYSTEM "/printing-system"
-
- extern void mainwin_setup (void);
-
-@@ -66,15 +67,15 @@
- channel = xfconf_channel_new (CHANNEL);
- if (channel) {
-
-- if (xfconf_channel_has_property (channel, "/XfPrint/system")) {
-- const gchar *system_name = xfconf_channel_get_string (channel, "/XfPrint/system", "none");
-+ if (xfconf_channel_has_property (channel, PROP_PRINTING_SYSTEM)) {
-+ const gchar *system_name = xfconf_channel_get_string (channel, PROP_PRINTING_SYSTEM, "none");
- if (g_ascii_strcasecmp (system_name, "none") != 0) {
- ps = printing_system_new (system_name);
- if (ps == NULL);
- g_warning ("Unable to load printing system module %s", system_name);
- }
- } else {
-- g_warning ("%s: XfPrint/system is not set", PACKAGE);
-+ g_warning ("%s: XfPrint/printing-system is not set", PACKAGE);
- }
- }
-
diff --git a/staging/xfprint/xfprint.install b/staging/xfprint/xfprint.install
deleted file mode 100644
index 21b79d2d4..000000000
--- a/staging/xfprint/xfprint.install
+++ /dev/null
@@ -1,11 +0,0 @@
-post_install() {
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
-}
-
-post_upgrade() {
- post_install $1
-}
-
-post_remove() {
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
-}