summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorroot <root@rshg047.dnsready.net>2011-04-29 22:32:36 +0000
committerroot <root@rshg047.dnsready.net>2011-04-29 22:32:36 +0000
commit693b5793b8c615601135bc04216a2ca0966087c5 (patch)
tree1c1cf0bc5020b0226efff0b39c7847b3414efaa6
parent19f42937be8f6619a85663cb71e24c05b7e8b6d2 (diff)
Fri Apr 29 22:32:36 UTC 2011
-rw-r--r--community/denemo/PKGBUILD9
-rw-r--r--community/gecko-mediaplayer/PKGBUILD12
-rw-r--r--community/gnome-mplayer/PKGBUILD10
-rw-r--r--community/gnome-mplayer/gnome-mplayer.install17
-rw-r--r--community/pbzip2/PKGBUILD11
-rw-r--r--community/yajl/PKGBUILD12
-rw-r--r--extra/curl/PKGBUILD11
-rw-r--r--extra/kismet/PKGBUILD18
-rw-r--r--extra/llvm/PKGBUILD225
-rw-r--r--extra/llvm/cindexer-clang-path.patch10
-rw-r--r--extra/llvm/clang-plugin-loader-registry.patch11
-rw-r--r--extra/llvm/clang-pure64.patch38
-rw-r--r--extra/llvm/clang-toolchains-gcc-versions.patch12
-rw-r--r--extra/llvm/enable-lto.patch36
-rw-r--r--extra/openmpi/PKGBUILD4
-rw-r--r--extra/python-cairo/PKGBUILD14
-rw-r--r--libre/aufs2-libre/PKGBUILD17
-rw-r--r--libre/aufs2-libre/aufs2-libre.install2
-rw-r--r--libre/aufs2-libre/create-tarball.sh10
-rw-r--r--libre/kernel26-libre-lts/PKGBUILD34
-rw-r--r--libre/kernel26-libre-lts/config.i68614
-rw-r--r--libre/kernel26-libre-lts/config.x86_6414
-rw-r--r--staging/alpine/2.00-lpam.patch13
-rw-r--r--staging/alpine/CVE-2008-5514.patch20
-rw-r--r--staging/alpine/PKGBUILD44
-rw-r--r--staging/cups/PKGBUILD110
-rwxr-xr-xstaging/cups/cups38
-rw-r--r--staging/cups/cups-avahi.patch1089
-rw-r--r--staging/cups/cups.install15
-rw-r--r--staging/cups/cups.logrotate8
-rw-r--r--staging/cups/cups.pam3
-rw-r--r--staging/cvs/PKGBUILD39
-rw-r--r--staging/cvs/cvs-1.11.23-cve-2010-3846.patch167
-rw-r--r--staging/cvs/cvs-1.11.23-getline64.patch34
-rw-r--r--staging/cvs/cvs.install20
-rw-r--r--staging/krb5/CVE-2010-4022.patch19
-rw-r--r--staging/krb5/CVE-2011-0281.0282.0283.patch126
-rw-r--r--staging/krb5/CVE-2011-0284.patch13
-rw-r--r--staging/krb5/CVE-2011-0285.patch39
-rw-r--r--staging/krb5/PKGBUILD77
-rw-r--r--staging/krb5/kadmind.rc40
-rw-r--r--staging/krb5/krb5-kdc.rc40
-rw-r--r--staging/mutt/PKGBUILD47
-rw-r--r--staging/mutt/install8
-rw-r--r--staging/neon/PKGBUILD29
-rw-r--r--staging/openssh/PKGBUILD55
-rwxr-xr-xstaging/openssh/sshd48
-rw-r--r--staging/openssh/sshd.confd4
-rw-r--r--staging/openssh/sshd.pam10
-rw-r--r--staging/subversion/PKGBUILD92
-rw-r--r--staging/subversion/subversion.rpath.fix.patch10
-rw-r--r--staging/subversion/subversion.suppress.deprecation.warnings.patch22
-rw-r--r--staging/subversion/svn11
-rw-r--r--staging/subversion/svnmerge.py2370
-rwxr-xr-xstaging/subversion/svnserve42
-rw-r--r--staging/subversion/svnserve.conf7
-rw-r--r--testing/blender/PKGBUILD84
-rw-r--r--testing/gnome-desktop-sharp/PKGBUILD35
-rw-r--r--testing/gnome-desktop-sharp/gnome-desktop-sharp-lib-target.patch9
-rw-r--r--testing/gnome-desktop2/PKGBUILD32
60 files changed, 5266 insertions, 144 deletions
diff --git a/community/denemo/PKGBUILD b/community/denemo/PKGBUILD
index 365ce5ef4..1be1aa213 100644
--- a/community/denemo/PKGBUILD
+++ b/community/denemo/PKGBUILD
@@ -1,11 +1,11 @@
-# $Id: PKGBUILD 38446 2011-01-27 15:07:46Z spupykin $
+# $Id: PKGBUILD 45779 2011-04-28 21:38:58Z spupykin $
# Maintainer: Sergej Pupykin <pupykin.s+arch@gmail.com>
# Contributor: Philipp Sandhaus <philipp.sandhaus@gmx.de>
# Contributor: Robert Emil Berge <filoktetes@linuxophic.org>
# Contributor: Gnud <ach.gnud@gmail.com>
pkgname=denemo
-pkgver=0.8.22
+pkgver=0.9.0
pkgrel=1
pkgdesc="A music score editor"
arch=('i686' 'x86_64')
@@ -14,12 +14,13 @@ license=('GPL')
depends=('aubio' 'portaudio' 'lilypond' 'gtk2' 'libxml2' 'guile' 'fftw' 'gtksourceview2'
'librsvg' 'fluidsynth')
options=('!libtool')
+backup=('etc/denemo/denemo.conf')
source=("http://ftp.gnu.org/gnu/${pkgname}/${pkgname}-${pkgver}.tar.gz")
-md5sums=('4645ae66d21fc62b362ae554b10c487e')
+md5sums=('7679622302e5fec9776501a5e2777888')
build() {
cd $srcdir/$pkgname-$pkgver
- ./configure --prefix=/usr
+ ./configure --prefix=/usr --sysconfdir=/etc
make
make DESTDIR=$pkgdir install
}
diff --git a/community/gecko-mediaplayer/PKGBUILD b/community/gecko-mediaplayer/PKGBUILD
index 728d3b057..4d013c6ef 100644
--- a/community/gecko-mediaplayer/PKGBUILD
+++ b/community/gecko-mediaplayer/PKGBUILD
@@ -1,12 +1,12 @@
-# $Id: PKGBUILD 43379 2011-03-25 11:39:12Z jelle $
+# $Id: PKGBUILD 45744 2011-04-28 09:33:53Z lfleischer $
# Maintainer: Lukas Fleischer <archlinux at cryptocrack dot de>
# Contributor: Allan McRae <mcrae_allan@hotmail.com>
# Contributor: fancris3 <fancris3 at aol.com>
# Contributor: Daniel J Griffiths <ghost1227@archlinux.us>
pkgname=gecko-mediaplayer
-pkgver=1.0.0
-pkgrel=3
+pkgver=1.0.3
+pkgrel=1
pkgdesc='Browser plugin that uses gnome-mplayer to play media in a web browser.'
arch=('i686' 'x86_64')
url='http://code.google.com/p/gecko-mediaplayer/'
@@ -14,14 +14,12 @@ license=('GPL')
depends=("gnome-mplayer>=${pkgver}" 'dbus-glib')
makedepends=('xulrunner' 'pkgconfig')
replaces=('mplayer-plugin')
-source=("http://gecko-mediaplayer.googlecode.com/files/${pkgname}-${pkgver}.tar.gz" "gecko-mediaplayer-xulrunner2.0.patch")
-md5sums=('80ccb671aea90153be9f9e6dc41b7eae'
- '36e4332f67d74605530c12a804966d4f')
+source=("http://gecko-mediaplayer.googlecode.com/files/${pkgname}-${pkgver}.tar.gz")
+md5sums=('4996b243ed720dc30f5dcc9bc253bf68')
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
- patch -Np0 -i $srcdir/gecko-mediaplayer-xulrunner2.0.patch
./configure --prefix=/usr --sysconfdir=/etc --without-gconf
make
}
diff --git a/community/gnome-mplayer/PKGBUILD b/community/gnome-mplayer/PKGBUILD
index f75d0dd82..090043d00 100644
--- a/community/gnome-mplayer/PKGBUILD
+++ b/community/gnome-mplayer/PKGBUILD
@@ -1,20 +1,20 @@
-# $Id: PKGBUILD 41527 2011-03-07 14:09:14Z lfleischer $
+# $Id: PKGBUILD 45739 2011-04-28 09:17:53Z lfleischer $
# Maintainer: Lukas Fleischer <archlinux at cryptocrack dot de>
# Contributor: Allan McRae <mcrae_allan@hotmail.com>
# Contributor: Daniel J Griffiths <ghost1227@archlinux.us>
pkgname=gnome-mplayer
-pkgver=1.0.2
-pkgrel=2
+pkgver=1.0.3
+pkgrel=1
pkgdesc='A simple MPlayer GUI.'
arch=('i686' 'x86_64')
url='http://gnome-mplayer.googlecode.com/'
license=('GPL')
-depends=('mplayer' 'dbus-glib' 'libnotify')
+depends=('mplayer' 'dbus-glib' 'libnotify' 'gtk2' 'dconf' 'hicolor-icon-theme')
makedepends=('gnome-power-manager' 'nautilus')
install="${pkgname}.install"
source=("http://gnome-mplayer.googlecode.com/files/${pkgname}-${pkgver}.tar.gz")
-md5sums=('2cbba8838ecaa03a4c3a0190dcabfade')
+md5sums=('72a1c7d640a67eb2d60f2671108919c2')
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
diff --git a/community/gnome-mplayer/gnome-mplayer.install b/community/gnome-mplayer/gnome-mplayer.install
index 6b1b64bdd..35db81614 100644
--- a/community/gnome-mplayer/gnome-mplayer.install
+++ b/community/gnome-mplayer/gnome-mplayer.install
@@ -1,11 +1,22 @@
post_install() {
- gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
+ glib-compile-schemas usr/share/glib-2.0/schemas
+ gtk-update-icon-cache -q -t -f usr/share/icons/hicolor
+ update-desktop-database -q
+}
+
+pre_upgrade() {
+ pre_remove $1
}
post_upgrade() {
- post_install
+ post_install $1
+}
+
+pre_remove() {
+ glib-compile-schemas usr/share/glib-2.0/schemas
}
post_remove() {
- post_install
+ update-desktop-database -q
+ gtk-update-icon-cache -q -f -t usr/share/icons/hicolor
}
diff --git a/community/pbzip2/PKGBUILD b/community/pbzip2/PKGBUILD
index 970938dd2..8a2e27ab2 100644
--- a/community/pbzip2/PKGBUILD
+++ b/community/pbzip2/PKGBUILD
@@ -1,9 +1,9 @@
-# $Id: PKGBUILD 43691 2011-03-29 08:17:25Z foutrelis $
+# $Id: PKGBUILD 45756 2011-04-28 15:32:42Z foutrelis $
# Contributor: Vasco Costa <vasco dot costa at geekslot dot com>
# Maintainer: Evangelos Foutras <foutrelis@gmail.com>
pkgname=pbzip2
-pkgver=1.1.3
+pkgver=1.1.4
pkgrel=1
pkgdesc="Parallel implementation of the bzip2 block-sorting file compressor"
arch=(i686 x86_64)
@@ -11,12 +11,17 @@ url="http://compression.ca/pbzip2/"
license=('BSD')
depends=('bzip2' 'gcc-libs')
source=(http://compression.ca/$pkgname/$pkgname-$pkgver.tar.gz)
-md5sums=('beb27a8055723f5f84364182d137ed1a')
+md5sums=('797e3ae5c6293a55e3e97fefb11cf494')
build() {
cd "$srcdir/$pkgname-$pkgver"
make
+}
+
+package() {
+ cd "$srcdir/$pkgname-$pkgver"
+
make PREFIX="$pkgdir/usr" install
# Fix location of man pages
diff --git a/community/yajl/PKGBUILD b/community/yajl/PKGBUILD
index 425947720..829ffb7fa 100644
--- a/community/yajl/PKGBUILD
+++ b/community/yajl/PKGBUILD
@@ -4,24 +4,20 @@
# Contributor: Andrej Gelenberg <andrej.gelenberg@udo.edu>
pkgname=yajl
-pkgver=2.0.0
-pkgrel=2
+pkgver=2.0.1
+pkgrel=1
pkgdesc='Yet Another JSON Library.'
arch=('i686' 'x86_64')
url='http://lloyd.github.com/yajl/'
license=('ISC')
makedepends=('cmake')
-source=("$pkgname-$pkgver.tar.gz::https://github.com/lloyd/$pkgname/tarball/$pkgver"
- "handle-null-case.patch")
-md5sums=('ff3bb8531d1f3c48441a57220919c8e7'
- 'dd6ad7386f6d19f1d68357d8a2d48f3e')
+source=("$pkgname-$pkgver.tar.gz::https://github.com/lloyd/$pkgname/tarball/$pkgver")
+md5sums=('df6a751e7797b9c2182efd91b5d64017')
build() {
dirname=$(tar tf "$srcdir/$pkgname-$pkgver.tar.gz" | sed 1q);
cd "$dirname"
- patch -Np1 < "$srcdir/handle-null-case.patch"
-
cmake -DCMAKE_INSTALL_PREFIX=/usr .
make
diff --git a/extra/curl/PKGBUILD b/extra/curl/PKGBUILD
index 06fe3ab42..2e1f2dd4e 100644
--- a/extra/curl/PKGBUILD
+++ b/extra/curl/PKGBUILD
@@ -1,12 +1,12 @@
-# $Id: PKGBUILD 110504 2011-02-19 17:12:53Z angvp $
+# $Id: PKGBUILD 120474 2011-04-24 01:23:51Z angvp $
# Maintainer: Angel Velasquez <angvp@archlinux.org>
# Contributor: Eric Belanger <eric@archlinux.org>
# Contributor: Lucien Immink <l.immink@student.fnt.hvu.nl>
# Contributor: Daniel J Griffiths <ghost1227@archlinux.us>
pkgname=curl
-pkgver=7.21.4
-pkgrel=2
+pkgver=7.21.6
+pkgrel=1
pkgdesc="An URL retrival utility and library"
arch=('i686' 'x86_64')
url="http://curl.haxx.se"
@@ -15,11 +15,12 @@ depends=('zlib' 'openssl' 'bash' 'ca-certificates')
options=('!libtool')
source=(http://curl.haxx.se/download/${pkgname}-${pkgver}.tar.bz2
curlbuild.h)
-md5sums=('e2dd20c88a7d01c745af9d788545ab2a'
- '751bd433ede935c8fae727377625a8ae')
[[ $CARCH == "x86_64" ]] && _curlbuild=curlbuild-64.h
[[ $CARCH == "i686" ]] && _curlbuild=curlbuild-32.h
+md5sums=('6611989a81ebd7b03a35adc3001ddee0'
+ '751bd433ede935c8fae727377625a8ae')
+
build() {
cd ${srcdir}/${pkgname}-${pkgver}
diff --git a/extra/kismet/PKGBUILD b/extra/kismet/PKGBUILD
index b381469ad..3333a8e22 100644
--- a/extra/kismet/PKGBUILD
+++ b/extra/kismet/PKGBUILD
@@ -1,9 +1,9 @@
-# $Id: PKGBUILD 94735 2010-10-10 15:05:47Z guillaume $
+# $Id: PKGBUILD 119587 2011-04-12 15:18:27Z angvp $
# Contributer: Jason Chu <jason@archlinux.org>
# Maintainer: Juergen Hoetzel <jason@archlinux.org>
pkgname=kismet
-pkgver=2010_07_R1
+pkgver=2011_03_R2
_realver="${pkgver//_/-}"
pkgrel=1
pkgdesc="802.11b sniffing software"
@@ -15,24 +15,24 @@ url="http://www.kismetwireless.net/"
backup=('etc/kismet.conf' 'etc/kismet_drone.conf')
options=('!makeflags')
source=(http://www.kismetwireless.net/code/kismet-${pkgver//_/-}.tar.gz)
-md5sums=('85e59186eb529889118b5635f35cf57d')
+md5sums=('8bf077e8111e6dc8c12cadefdf40aadd')
build() {
cd "$srcdir/kismet-$_realver"
./configure --prefix=/usr \
--sysconfdir=/etc \
- --localstatedir=/var || return 1
+ --localstatedir=/var
- make dep || return 1
- make || return 1
- sed -i 's/prism2/wlanng/g' conf/kismet.conf || return 1
- make DESTDIR="$pkgdir" install || return 1
+ make dep
+ make
+ sed -i 's/prism2/wlanng/g' conf/kismet.conf
+ make DESTDIR="$pkgdir" install
chown root:root "$pkgdir"/usr/share/man/man{1,5}/*
install -D -m 644 "$srcdir/kismet-$_realver/README" "$pkgdir/usr/share/kismet/README"
# Fix the *.conf unexpanded ${prefix} problems
- sed -i 's%\${prefix}%/usr%' "$pkgdir"/etc/*.conf || return 1
+ sed -i 's%\${prefix}%/usr%' "$pkgdir"/etc/*.conf
}
diff --git a/extra/llvm/PKGBUILD b/extra/llvm/PKGBUILD
new file mode 100644
index 000000000..3587a7775
--- /dev/null
+++ b/extra/llvm/PKGBUILD
@@ -0,0 +1,225 @@
+# $Id: PKGBUILD 121057 2011-04-28 21:09:26Z heftig $
+# Maintainer: Evangelos Foutras <foutrelis@gmail.com>
+# Contributor: Jan "heftig" Steffens <jan.steffens@gmail.com>
+# Contributor: Sebastian Nowicki <sebnow@gmail.com>
+# Contributor: Devin Cofer <ranguvar{AT]archlinux[DOT}us>
+# Contributor: Tobias Kieslich <tobias@justdreams.de>
+# Contributor: Geoffroy Carrier <geoffroy.carrier@aur.archlinux.org>
+# Contributor: Tomas Lindquist Olsen <tomas@famolsen.dk>
+# Contributor: Roberto Alsina <ralsina@kde.org>
+# Contributor: Gerardo Exequiel Pozzi <vmlinuz386@yahoo.com.ar>
+
+pkgname=('llvm' 'llvm-ocaml' 'clang' 'clang-analyzer')
+pkgver=2.9
+_gcc_ver=4.6.0
+pkgrel=4
+arch=('i686' 'x86_64')
+url="http://llvm.org/"
+license=('custom:University of Illinois/NCSA Open Source License')
+makedepends=('gcc-libs' 'libffi' 'python2' 'ocaml' "gcc=$_gcc_ver")
+source=(http://llvm.org/releases/$pkgver/$pkgname-$pkgver.tgz
+ http://llvm.org/releases/$pkgver/clang-$pkgver.tgz
+ ftp://ftp.archlinux.org/other/community/clang/gcc-headers-4.5.2.tar.xz
+ clang-plugin-loader-registry.patch
+ cindexer-clang-path.patch
+ clang-toolchains-gcc-versions.patch
+ clang-pure64.patch
+ enable-lto.patch)
+md5sums=('793138412d2af2c7c7f54615f8943771'
+ '634de18d04b7a4ded19ec4c17d23cfca'
+ '70e23a3dc2b38ecb2bb4d2c48f47295d'
+ '02c23b4aaca3445b8bf39fddb2f9906e'
+ '87a7162dbe99e9ffce6c40bd09f5f4f0'
+ '8da236120a9a287a977b575b8b905c93'
+ '225ee6b531f8327f34f344a18cb4ec81'
+ '8f7582d7440e4a8342c3aea9ec714fb4')
+
+build() {
+ cd "$srcdir/$pkgname-$pkgver"
+
+ # At the present, clang must reside inside the LLVM source code tree to build
+ # See http://llvm.org/bugs/show_bug.cgi?id=4840
+ rm -rf tools/clang
+ cp -r "$srcdir/clang-$pkgver" tools/clang
+
+ # Fix symbolic links from OCaml bindings to LLVM libraries
+ sed -i 's:\$(PROJ_libdir):/usr/lib/llvm:' bindings/ocaml/Makefile.ocaml
+
+ # Fix installation directories, ./configure doesn't seem to set them right
+ sed -i -e 's:\$(PROJ_prefix)/etc/llvm:/etc/llvm:' \
+ -e 's:\$(PROJ_prefix)/lib:$(PROJ_prefix)/lib/llvm:' \
+ -e 's:\$(PROJ_prefix)/docs/llvm:$(PROJ_prefix)/share/doc/llvm:' \
+ Makefile.config.in
+
+ # Fix insecure rpath (http://bugs.archlinux.org/task/14017)
+ sed -i 's:$(RPATH) -Wl,$(\(ToolDir\|LibDir\|ExmplDir\))::g' Makefile.rules
+
+ # Get the correct list of symbols to export
+ # See http://lists.cs.uiuc.edu/pipermail/cfe-dev/2010-April/008559.html
+ patch -Np1 -i "$srcdir/clang-plugin-loader-registry.patch"
+
+ # Fix clang path in CIndexer.cpp (https://bugs.archlinux.org/task/22799)
+ patch -d tools/clang -Np0 -i "$srcdir/cindexer-clang-path.patch"
+
+ # Add GCC 4.6.0 to GccVersions (FS#23631)
+ patch -d tools/clang -Np1 -i "$srcdir/clang-toolchains-gcc-versions.patch"
+
+ if [[ $CARCH == x86_64 ]]; then
+ # Adjust lib paths
+ patch -d tools/clang -Np0 -i "$srcdir/clang-pure64.patch"
+ fi
+
+ # Make -flto work
+ # Use gold instead of default linker, and always use the plugin
+ patch -d tools/clang -Np0 -i "$srcdir/enable-lto.patch"
+
+ # Apply strip option to configure
+ _optimized_switch="enable"
+ [[ $(check_option strip) == n ]] && _optimized_switch="disable"
+
+ # Include location of libffi headers in CPPFLAGS
+ export CPPFLAGS="$CPPFLAGS $(pkg-config --cflags libffi)"
+
+ # TODO: Uncomment when clang works with GCC 4.6+
+ #_cxx_headers="/usr/include/c++/$_gcc_ver"
+ #if [[ ! -d $_cxx_headers ]]; then
+ # error "Couldn't find the C++ headers, PKGBUILD needs fixing!"
+ # return 1
+ #fi
+ _cxx_headers="/usr/include/c++/clang-$pkgver"
+
+ _32bit_headers=""
+ if [[ $CARCH == x86_64 ]]; then
+ # Important for multilib
+ _32bit_headers="32"
+ fi
+
+ ./configure \
+ --prefix=/usr \
+ --libdir=/usr/lib/llvm \
+ --sysconfdir=/etc \
+ --enable-shared \
+ --enable-libffi \
+ --enable-targets=all \
+ --disable-expensive-checks \
+ --disable-debug-runtime \
+ --disable-assertions \
+ --with-binutils-include=/usr/include \
+ --with-cxx-include-root=$_cxx_headers \
+ --with-cxx-include-arch=$CHOST \
+ --with-cxx-include-32bit-dir=$_32bit_headers \
+ --$_optimized_switch-optimized
+
+ make REQUIRES_RTTI=1
+}
+
+package_llvm() {
+ pkgdesc="Low Level Virtual Machine"
+ depends=('perl' 'libffi')
+
+ cd "$srcdir/$pkgname-$pkgver"
+
+ # We move the clang directory out of the tree so it won't get installed and
+ # then we bring it back in for the clang package
+ mv tools/clang "$srcdir"
+ # -j1 is due to race conditions during the installation of the OCaml bindings
+ make -j1 DESTDIR="$pkgdir" install
+ mv "$srcdir/clang" tools
+
+ # OCaml bindings go to a separate package
+ rm -rf "$srcdir"/{ocaml,ocamldoc}
+ mv "$pkgdir"/usr/{lib/ocaml,share/doc/llvm/ocamldoc} "$srcdir"
+
+ # Remove duplicate files installed by the OCaml bindings
+ rm "$pkgdir"/usr/{lib/llvm/libllvm*,share/doc/llvm/ocamldoc.tar.gz}
+
+ # Fix permissions of static libs
+ chmod -x "$pkgdir"/usr/lib/llvm/*.a
+
+ # Fix libdir in llvm-config (http://bugs.archlinux.org/task/14487)
+ sed -i 's:\(ABS_RUN_DIR/lib\):\1/llvm:' "$pkgdir/usr/bin/llvm-config"
+
+ # Get rid of example Hello transformation
+ rm "$pkgdir"/usr/lib/llvm/*LLVMHello.*
+
+ # Symlink the gold plugin where clang expects it
+ ln -s llvm/LLVMgold.so "$pkgdir/usr/lib/LLVMgold.so"
+
+ # Add ld.so.conf.d entry
+ install -d "$pkgdir/etc/ld.so.conf.d"
+ echo /usr/lib/llvm >"$pkgdir/etc/ld.so.conf.d/llvm.conf"
+
+ install -Dm644 LICENSE.TXT "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
+}
+
+package_llvm-ocaml() {
+ pkgdesc="OCaml bindings for LLVM"
+ depends=("llvm=$pkgver-$pkgrel" 'ocaml')
+
+ cd "$srcdir/llvm-$pkgver"
+
+ install -d "$pkgdir"/{usr/lib,usr/share/doc/llvm}
+ cp -r "$srcdir/ocaml" "$pkgdir/usr/lib"
+ cp -r "$srcdir/ocamldoc" "$pkgdir/usr/share/doc/llvm"
+
+ # Remove execute bit from static libraries
+ chmod -x "$pkgdir"/usr/lib/ocaml/libllvm*.a
+
+ install -Dm644 LICENSE.TXT "$pkgdir/usr/share/licenses/llvm-ocaml/LICENSE"
+}
+
+package_clang() {
+ pkgdesc="C language family frontend for LLVM"
+ url="http://clang.llvm.org/"
+ # It looks like clang still needs GCC to assemble and link object files
+ # See http://old.nabble.com/%22clang--v%22-shows-a-GCC-call-td28378453.html
+ depends=("llvm=$pkgver-$pkgrel" "gcc=$_gcc_ver")
+
+ # Fix installation path for clang docs
+ sed -i 's:$(PROJ_prefix)/share/doc/llvm:$(PROJ_prefix)/share/doc/clang:' \
+ "$srcdir/llvm-$pkgver/Makefile.config"
+
+ cd "$srcdir/llvm-$pkgver/tools/clang"
+ make DESTDIR="$pkgdir" install
+
+ # Fix permissions of static libs
+ chmod -x "$pkgdir"/usr/lib/llvm/*.a
+
+ # Revert the path change in case we want to do a repackage later
+ sed -i 's:$(PROJ_prefix)/share/doc/clang:$(PROJ_prefix)/share/doc/llvm:' \
+ "$srcdir/llvm-$pkgver/Makefile.config"
+
+ # Install old libstdc++ headers. Contains combined headers from
+ # gcc 4.5.2-6-i686 and gcc-multilib-4.5.2-6-x86_64
+ install -d "$pkgdir/usr/include/c++"
+ cp -rd "$srcdir/gcc-headers-4.5.2" "$pkgdir/usr/include/c++/clang-$pkgver"
+
+ install -Dm644 LICENSE.TXT "$pkgdir/usr/share/licenses/clang/LICENSE"
+}
+
+package_clang-analyzer() {
+ pkgdesc="A source code analysis framework"
+ url="http://clang-analyzer.llvm.org/"
+ depends=("clang=$pkgver-$pkgrel" 'python2')
+
+ cd "$srcdir/llvm-$pkgver/tools/clang"
+
+ install -d "$pkgdir"/usr/{bin,lib/clang-analyzer}
+ for _tool in scan-{build,view}; do
+ cp -r tools/$_tool "$pkgdir/usr/lib/clang-analyzer/"
+ ln -s /usr/lib/clang-analyzer/$_tool/$_tool "$pkgdir/usr/bin/"
+ done
+
+ # Use Python 2
+ sed -i 's/env python$/\02/' \
+ "$pkgdir/usr/lib/clang-analyzer/scan-view/scan-view" \
+ "$pkgdir/usr/lib/clang-analyzer/scan-build/set-xcode-analyzer"
+
+ # Compile Python scripts
+ python2 -m compileall "$pkgdir/usr/lib/clang-analyzer"
+ python2 -O -m compileall "$pkgdir/usr/lib/clang-analyzer"
+
+ install -Dm644 LICENSE.TXT "$pkgdir/usr/share/licenses/clang-analyzer/LICENSE"
+}
+
+# vim:set ts=2 sw=2 et:
diff --git a/extra/llvm/cindexer-clang-path.patch b/extra/llvm/cindexer-clang-path.patch
new file mode 100644
index 000000000..ddaab690e
--- /dev/null
+++ b/extra/llvm/cindexer-clang-path.patch
@@ -0,0 +1,10 @@
+--- tools/libclang/CIndexer.cpp.orig 2011-04-07 13:08:24.000000000 +0300
++++ tools/libclang/CIndexer.cpp 2011-04-07 13:11:52.224884642 +0300
+@@ -80,6 +80,7 @@ std::string CIndexer::getClangResourcesP
+
+ // We now have the CIndex directory, locate clang relative to it.
+ LibClangPath.eraseComponent();
++ LibClangPath.eraseComponent();
+ #endif
+
+ LibClangPath.appendComponent("clang");
diff --git a/extra/llvm/clang-plugin-loader-registry.patch b/extra/llvm/clang-plugin-loader-registry.patch
new file mode 100644
index 000000000..f46eb9fce
--- /dev/null
+++ b/extra/llvm/clang-plugin-loader-registry.patch
@@ -0,0 +1,11 @@
+diff -upr llvm-2.7.orig/autoconf/ExportMap.map llvm-2.7/autoconf/ExportMap.map
+--- llvm-2.7.orig/autoconf/ExportMap.map 2010-02-25 00:33:41.000000000 +0200
++++ llvm-2.7/autoconf/ExportMap.map 2010-05-10 14:14:22.000000000 +0300
+@@ -2,6 +2,7 @@
+ global: main;
+ __progname;
+ environ;
++ _ZN4llvm8RegistryIN5clang14FrontendActionENS_14RegistryTraitsIS2_EEE4HeadE;
+
+ local: *;
+ };
diff --git a/extra/llvm/clang-pure64.patch b/extra/llvm/clang-pure64.patch
new file mode 100644
index 000000000..da6178519
--- /dev/null
+++ b/extra/llvm/clang-pure64.patch
@@ -0,0 +1,38 @@
+Index: lib/Driver/Tools.cpp
+===================================================================
+--- lib/Driver/Tools.cpp (revision 123373)
++++ lib/Driver/Tools.cpp (working copy)
+@@ -3306,7 +3306,7 @@
+ else if (ToolChain.getArch() == llvm::Triple::arm)
+ CmdArgs.push_back("/lib/ld-linux.so.3");
+ else
+- CmdArgs.push_back("/lib64/ld-linux-x86-64.so.2");
++ CmdArgs.push_back("/lib/ld-linux-x86-64.so.2");
+ }
+
+ CmdArgs.push_back("-o");
+Index: lib/Driver/ToolChains.cpp
+===================================================================
+--- lib/Driver/ToolChains.cpp (revision 123373)
++++ lib/Driver/ToolChains.cpp (working copy)
+@@ -1317,18 +1317,10 @@
+ if (Arch == llvm::Triple::x86)
+ Suffix64 = "/64";
+
+- std::string Lib32 = "lib";
+-
+- bool Exists;
+- if (!llvm::sys::fs::exists("/lib32", Exists) && Exists)
+- Lib32 = "lib32";
+-
++ std::string Lib32 = "lib32";
+ std::string Lib64 = "lib";
+- bool Symlink;
+- if (!llvm::sys::fs::exists("/lib64", Exists) && Exists &&
+- (llvm::sys::fs::is_symlink("/lib64", Symlink) || !Symlink))
+- Lib64 = "lib64";
+
++ bool Exists;
+ std::string GccTriple = "";
+ if (Arch == llvm::Triple::arm) {
+ if (!llvm::sys::fs::exists("/usr/lib/gcc/arm-linux-gnueabi", Exists) &&
diff --git a/extra/llvm/clang-toolchains-gcc-versions.patch b/extra/llvm/clang-toolchains-gcc-versions.patch
new file mode 100644
index 000000000..2e527300d
--- /dev/null
+++ b/extra/llvm/clang-toolchains-gcc-versions.patch
@@ -0,0 +1,12 @@
+diff -upr clang-2.9.orig/lib/Driver/ToolChains.cpp clang-2.9/lib/Driver/ToolChains.cpp
+--- clang-2.9.orig/lib/Driver/ToolChains.cpp 2011-03-21 23:29:27.000000000 +0200
++++ clang-2.9/lib/Driver/ToolChains.cpp 2011-04-08 00:03:34.000000000 +0300
+@@ -1449,7 +1449,7 @@ Linux::Linux(const HostInfo &Host, const
+ GccTriple = "i586-suse-linux";
+ }
+
+- const char* GccVersions[] = {"4.5.2", "4.5.1", "4.5", "4.4.5", "4.4.4",
++ const char* GccVersions[] = {"4.6.0", "4.5.2", "4.5.1", "4.5", "4.4.5", "4.4.4",
+ "4.4.3", "4.4", "4.3.4", "4.3.3", "4.3.2",
+ "4.3", "4.2.4", "4.2.3", "4.2.2", "4.2.1",
+ "4.2"};
diff --git a/extra/llvm/enable-lto.patch b/extra/llvm/enable-lto.patch
new file mode 100644
index 000000000..40d93104a
--- /dev/null
+++ b/extra/llvm/enable-lto.patch
@@ -0,0 +1,36 @@
+Index: lib/Driver/ToolChains.cpp
+===================================================================
+--- lib/Driver/ToolChains.cpp (revision 123373)
++++ lib/Driver/ToolChains.cpp (working copy)
+@@ -1398,11 +1398,11 @@
+ Lib = Lib64;
+ }
+
+- llvm::sys::Path LinkerPath(Base + "/../../../../" + GccTriple + "/bin/ld");
++ llvm::sys::Path LinkerPath(Base + "/../../../../" + GccTriple + "/bin/ld.gold");
+ if (!llvm::sys::fs::exists(LinkerPath.str(), Exists) && Exists)
+ Linker = LinkerPath.str();
+ else
+- Linker = GetProgramPath("ld");
++ Linker = GetProgramPath("ld.gold");
+
+ LinuxDistro Distro = DetectLinuxDistro(Arch);
+
+Index: lib/Driver/Tools.cpp
+===================================================================
+--- lib/Driver/Tools.cpp (revision 123373)
++++ lib/Driver/Tools.cpp (working copy)
+@@ -3412,11 +3412,11 @@
+ }
+ }
+
+- if (Args.hasArg(options::OPT_use_gold_plugin)) {
++ // if (Args.hasArg(options::OPT_use_gold_plugin)) {
+ CmdArgs.push_back("-plugin");
+ std::string Plugin = ToolChain.getDriver().Dir + "/../lib/LLVMgold.so";
+ CmdArgs.push_back(Args.MakeArgString(Plugin));
+- }
++ // }
+
+ C.addCommand(new Command(JA, *this, ToolChain.Linker.c_str(), CmdArgs));
+ }
diff --git a/extra/openmpi/PKGBUILD b/extra/openmpi/PKGBUILD
index f08d2a4e3..32d538665 100644
--- a/extra/openmpi/PKGBUILD
+++ b/extra/openmpi/PKGBUILD
@@ -1,8 +1,8 @@
-# $Id: PKGBUILD 115803 2011-03-21 16:10:23Z stephane $
+# $Id: PKGBUILD 121049 2011-04-28 16:03:46Z stephane $
# Maintainer: Stéphane Gaudreault <stephane@archlinux.org>
pkgname=openmpi
pkgver=1.5.3
-pkgrel=2
+pkgrel=3
pkgdesc="High performance message passing library (MPI)"
arch=('i686' 'x86_64')
url="http://www.open-mpi.org"
diff --git a/extra/python-cairo/PKGBUILD b/extra/python-cairo/PKGBUILD
index 864c6bb86..458bfca4d 100644
--- a/extra/python-cairo/PKGBUILD
+++ b/extra/python-cairo/PKGBUILD
@@ -1,20 +1,19 @@
-# $Id: PKGBUILD 110756 2011-02-22 02:50:48Z stephane $
+# $Id: PKGBUILD 121116 2011-04-29 03:02:02Z angvp $
+# Maintainer: Angel Velasquez <angvp@archlinux.org>
# Maintainer: Jan de Groot <jgc@archlinux.org>
pkgname=python-cairo
-pkgver=1.8.10
-pkgrel=2
+pkgver=1.10.0
+pkgrel=1
pkgdesc="Python bindings for the cairo graphics library"
arch=('i686' 'x86_64')
license=('LGPL3')
depends=('python' 'cairo')
makedepends=('pkg-config')
options=('!libtool')
-source=(http://cairographics.org/releases/pycairo-${pkgver}.tar.bz2
- pycairo-1.8.10-pypath.patch)
+source=(http://cairographics.org/releases/pycairo-${pkgver}.tar.bz2)
url="http://www.cairographics.org/pycairo"
-md5sums=('ddc544943d791e3c22ca8f019e10e1e3'
- '047cfe0a98cfa73a156cd70e70082325')
+md5sums=('e6fd3f2f1e6a72e0db0868c4985669c5')
build() {
cd "${srcdir}/pycairo-${pkgver}"
@@ -31,7 +30,6 @@ build() {
# Patch the unpacked version of waf:
pushd .waf3*
- patch -Np0 -i ../../pycairo-1.8.10-pypath.patch
popd
./waf configure --prefix=/usr
diff --git a/libre/aufs2-libre/PKGBUILD b/libre/aufs2-libre/PKGBUILD
index 7e092717b..bd177ef9c 100644
--- a/libre/aufs2-libre/PKGBUILD
+++ b/libre/aufs2-libre/PKGBUILD
@@ -2,22 +2,23 @@
# Maintainer: Paul Mattal <pjmattal@elys.com>
pkgname=aufs2-libre
-pkgver=2.6.37_20110124
-pkgrel=3
+pkgver=2.6.38_20110314
+pkgrel=4
#_kernver=${pkgver%_*}-LIBRE
-_kernver=2.6.37-LIBRE
+_kernver=2.6.38-LIBRE
pkgdesc="Another Unionfs Implementation for the Linux-Libre kernel"
arch=('i686' 'x86_64')
url="http://aufs.sourceforge.net/"
license=('GPL2')
-depends=('kernel26-libre>=2.6.37.3' 'kernel26-libre<2.6.38')
-makedepends=('kernel26-libre-headers>=2.6.37' 'kernel26-libre-headers<2.6.38')
+depends=('kernel26-libre>=2.6.38' 'kernel26-libre<2.6.39')
+makedepends=('kernel26-libre-headers>=2.6.38' 'kernel26-libre-headers<2.6.39')
replaces=('aufs' 'aufs2')
conflicts=('aufs2')
provides=("aufs2=$pkgver")
install=${pkgname}.install
source=("ftp://ftp.archlinux.org/other/aufs2/${pkgname%-libre}-${pkgver}.tar.gz")
-options=('!makeflags')
+options=('!makeflags' '!strip')
+md5sums=('60649fa47d3f499dfbb4b68a2afde47c')
build() {
cd ${srcdir}/${pkgname%-libre}-${pkgver}
@@ -61,7 +62,3 @@ package() {
# gzip -9 all modules to safe 100MB
find "$pkgdir" -name '*.ko' -exec gzip -9 {} \;
}
-
-md5sums=('fa64d7893490ef9c82310ebd5f730443')
-
-md5sums=('fa64d7893490ef9c82310ebd5f730443')
diff --git a/libre/aufs2-libre/aufs2-libre.install b/libre/aufs2-libre/aufs2-libre.install
index 4c95ad76e..883882a54 100644
--- a/libre/aufs2-libre/aufs2-libre.install
+++ b/libre/aufs2-libre/aufs2-libre.install
@@ -1,5 +1,5 @@
post_install() {
- KERNEL_VERSION=2.6.37-ARCH
+ KERNEL_VERSION=2.6.38-LIBRE
depmod $KERNEL_VERSION
}
diff --git a/libre/aufs2-libre/create-tarball.sh b/libre/aufs2-libre/create-tarball.sh
index a460a8b3b..6801f00bd 100644
--- a/libre/aufs2-libre/create-tarball.sh
+++ b/libre/aufs2-libre/create-tarball.sh
@@ -1,12 +1,12 @@
#!/bin/sh
-AUFS2VERSION="-37"
-KERNELVERSION=2.6.37
-GITSNAPSHOT=20110124
+AUFS2VERSION="-38"
+KERNELVERSION=2.6.38
+GITSNAPSHOT=20110314
# aufs2 (no -xx) for the latest -rc version.
git clone http://git.c3sl.ufpr.br/pub/scm/aufs/aufs2-standalone.git aufs2-standalone.git
cd aufs2-standalone.git
-git checkout origin/aufs2.1${AUFS2VERSION}
-#git checkout origin/aufs2.1
+#git checkout origin/aufs2.1${AUFS2VERSION}
+git checkout origin/aufs2.1
#*** apply "aufs2-base.patch" and "aufs2-standalone.patch" to your kernel source files.
cd ..
rm -rf aufs2-${KERNELVERSION}_${GITSNAPSHOT}
diff --git a/libre/kernel26-libre-lts/PKGBUILD b/libre/kernel26-libre-lts/PKGBUILD
index a253ae483..55cde1f43 100644
--- a/libre/kernel26-libre-lts/PKGBUILD
+++ b/libre/kernel26-libre-lts/PKGBUILD
@@ -7,7 +7,7 @@ pkgname=('kernel26-libre-lts' 'kernel26-libre-lts-headers') # Build stock -lts k
_kernelname=${pkgname#kernel26-libre}
_basekernel=2.6.32
_preset=kernel26-lts.preset
-pkgver=${_basekernel}.36
+pkgver=${_basekernel}.39
pkgrel=1
arch=('i686' 'x86_64')
license=('GPL2')
@@ -18,7 +18,8 @@ source=(http://www.fsfla.org/svnwiki/selibre/linux-libre/download/releases/${pkg
config.i686 config.x86_64
# standard config files for mkinitcpio ramdisk
${_preset}
- ${pkgname}.install)
+ ${pkgname}.install
+ buildfix-gcc46.diff)
options=(!strip)
build() {
@@ -27,8 +28,10 @@ build() {
# fix 22343 udev crashes
patch -Np1 -i ${srcdir}/libata-alignment.patch
- cat ../config.$CARCH >./.config
+ # fix build with gcc46 - http://git.kernel.org/?p=linux/kernel/git/stable/linux-2.6.38.y.git;a=commitdiff;h=b1d670f1#patch1
+ patch -Np1 -i ${srcdir}/buildfix-gcc46.diff
+ cat ../config.$CARCH >./.config
if [ "${_kernelname}" != "" ]; then
sed -i "s|CONFIG_LOCALVERSION=.*|CONFIG_LOCALVERSION=\"${_kernelname}\"|g" ./.config
fi
@@ -118,7 +121,7 @@ package_kernel26-libre-lts-headers() {
${pkgdir}/usr/src/linux-${_kernver}/.config
mkdir -p ${pkgdir}/usr/src/linux-${_kernver}/include
- for i in acpi asm-{generic,x86} config linux math-emu media net pcmcia scsi sound trace video; do
+ for i in acpi asm-{generic,x86} config crypto drm linux math-emu media net pcmcia scsi sound trace video; do
cp -a include/$i ${pkgdir}/usr/src/linux-${_kernver}/include/
done
@@ -189,20 +192,7 @@ package_kernel26-libre-lts-headers() {
mkdir -p ${pkgdir}/usr/src/linux-${_kernver}/fs/xfs
mkdir -p ${pkgdir}/usr/src/linux-${_kernver}/mm
cp fs/xfs/xfs_sb.h ${pkgdir}/usr/src/linux-${_kernver}/fs/xfs/xfs_sb.h
- # add headers vor virtualbox
- # in reference to:
- # http://bugs.archlinux.org/task/14568
- cp -a include/drm $pkgdir/usr/src/linux-${_kernver}/include/
- # add headers for broadcom wl
- # in reference to:
- # http://bugs.archlinux.org/task/14568
- cp -a include/trace $pkgdir/usr/src/linux-${_kernver}/include/
- # add headers for crypto modules
- # in reference to:
- # http://bugs.archlinux.org/task/22081
- cp -a include/crypto $pkgdir/usr/src/linux-${_kernver}/include/
-
# copy in Kconfig files
for i in `find . -name "Kconfig*"`; do
mkdir -p ${pkgdir}/usr/src/linux-${_kernver}/`echo $i | sed 's|/Kconfig.*||'`
@@ -229,10 +219,10 @@ package_kernel26-libre-lts-headers() {
# remove unneeded architectures
rm -rf ${pkgdir}/usr/src/linux-${_kernver}/arch/{alpha,arm,arm26,avr32,blackfin,cris,frv,h8300,ia64,m32r,m68k,m68knommu,mips,microblaze,mn10300,parisc,powerpc,ppc,s390,sh,sh64,sparc,sparc64,um,v850,xtensa}
}
-
-md5sums=('44f553d98efbab305ec490334560b097'
+md5sums=('9502a2248b2024da4f30a5d52a90322d'
'30851deee235a7486bd408cbe8eb2d71'
- 'c0bd6a068a32636e52bf4d887bb2b722'
- 'a5a3af2b95f9a8b3c43d1a74fa5fd82c'
+ '3298f542840c52a8b8abc1b3795e455b'
+ 'cd1a62e81eb9e910779f3eb18a6e28c6'
'2cbfeb3e2a18d45f82f613e97fc23355'
- 'fbfc7a1af3208c7b729055d91e1c149a')
+ 'fbfc7a1af3208c7b729055d91e1c149a'
+ 'bb7410d7b740dfad0385a07b66e6621f')
diff --git a/libre/kernel26-libre-lts/config.i686 b/libre/kernel26-libre-lts/config.i686
index d2bba6bee..2865c58fa 100644
--- a/libre/kernel26-libre-lts/config.i686
+++ b/libre/kernel26-libre-lts/config.i686
@@ -1,7 +1,7 @@
#
# Automatically generated make config: don't edit
# Linux kernel version: 2.6.32
-# Fri Feb 18 17:22:19 2011
+# Mon Apr 18 12:31:05 2011
#
# CONFIG_64BIT is not set
CONFIG_X86_32=y
@@ -2308,9 +2308,9 @@ CONFIG_TOUCHSCREEN_TOUCHRIGHT=m
CONFIG_TOUCHSCREEN_TOUCHWIN=m
CONFIG_TOUCHSCREEN_UCB1400=m
CONFIG_TOUCHSCREEN_WM97XX=m
-# CONFIG_TOUCHSCREEN_WM9705 is not set
-# CONFIG_TOUCHSCREEN_WM9712 is not set
-# CONFIG_TOUCHSCREEN_WM9713 is not set
+CONFIG_TOUCHSCREEN_WM9705=y
+CONFIG_TOUCHSCREEN_WM9712=y
+CONFIG_TOUCHSCREEN_WM9713=y
CONFIG_TOUCHSCREEN_USB_COMPOSITE=m
CONFIG_TOUCHSCREEN_USB_EGALAX=y
CONFIG_TOUCHSCREEN_USB_PANJIT=y
@@ -2324,10 +2324,10 @@ CONFIG_TOUCHSCREEN_USB_IDEALTEK=y
CONFIG_TOUCHSCREEN_USB_GENERAL_TOUCH=y
CONFIG_TOUCHSCREEN_USB_GOTOP=y
CONFIG_TOUCHSCREEN_USB_JASTEC=y
-# CONFIG_TOUCHSCREEN_USB_E2I is not set
+CONFIG_TOUCHSCREEN_USB_E2I=y
CONFIG_TOUCHSCREEN_TOUCHIT213=m
CONFIG_TOUCHSCREEN_TSC2007=m
-# CONFIG_TOUCHSCREEN_PCAP is not set
+CONFIG_TOUCHSCREEN_PCAP=m
CONFIG_INPUT_MISC=y
CONFIG_INPUT_PCSPKR=m
CONFIG_INPUT_APANEL=m
@@ -4457,7 +4457,7 @@ CONFIG_DLM=m
# Kernel hacking
#
CONFIG_TRACE_IRQFLAGS_SUPPORT=y
-# CONFIG_PRINTK_TIME is not set
+CONFIG_PRINTK_TIME=y
CONFIG_ENABLE_WARN_DEPRECATED=y
# CONFIG_ENABLE_MUST_CHECK is not set
CONFIG_FRAME_WARN=1024
diff --git a/libre/kernel26-libre-lts/config.x86_64 b/libre/kernel26-libre-lts/config.x86_64
index 494bb3eff..30cc5aefa 100644
--- a/libre/kernel26-libre-lts/config.x86_64
+++ b/libre/kernel26-libre-lts/config.x86_64
@@ -1,7 +1,7 @@
#
# Automatically generated make config: don't edit
# Linux kernel version: 2.6.32
-# Mon Mar 21 12:20:44 2011
+# Mon Apr 18 14:25:56 2011
#
CONFIG_64BIT=y
# CONFIG_X86_32 is not set
@@ -2188,9 +2188,9 @@ CONFIG_TOUCHSCREEN_TOUCHRIGHT=m
CONFIG_TOUCHSCREEN_TOUCHWIN=m
CONFIG_TOUCHSCREEN_UCB1400=m
CONFIG_TOUCHSCREEN_WM97XX=m
-# CONFIG_TOUCHSCREEN_WM9705 is not set
-# CONFIG_TOUCHSCREEN_WM9712 is not set
-# CONFIG_TOUCHSCREEN_WM9713 is not set
+CONFIG_TOUCHSCREEN_WM9705=y
+CONFIG_TOUCHSCREEN_WM9712=y
+CONFIG_TOUCHSCREEN_WM9713=y
CONFIG_TOUCHSCREEN_USB_COMPOSITE=m
CONFIG_TOUCHSCREEN_USB_EGALAX=y
CONFIG_TOUCHSCREEN_USB_PANJIT=y
@@ -2204,10 +2204,10 @@ CONFIG_TOUCHSCREEN_USB_IDEALTEK=y
CONFIG_TOUCHSCREEN_USB_GENERAL_TOUCH=y
CONFIG_TOUCHSCREEN_USB_GOTOP=y
CONFIG_TOUCHSCREEN_USB_JASTEC=y
-# CONFIG_TOUCHSCREEN_USB_E2I is not set
+CONFIG_TOUCHSCREEN_USB_E2I=y
CONFIG_TOUCHSCREEN_TOUCHIT213=m
CONFIG_TOUCHSCREEN_TSC2007=m
-# CONFIG_TOUCHSCREEN_PCAP is not set
+CONFIG_TOUCHSCREEN_PCAP=m
CONFIG_INPUT_MISC=y
CONFIG_INPUT_PCSPKR=m
CONFIG_INPUT_APANEL=m
@@ -4270,7 +4270,7 @@ CONFIG_DLM=m
# Kernel hacking
#
CONFIG_TRACE_IRQFLAGS_SUPPORT=y
-# CONFIG_PRINTK_TIME is not set
+CONFIG_PRINTK_TIME=y
CONFIG_ENABLE_WARN_DEPRECATED=y
# CONFIG_ENABLE_MUST_CHECK is not set
CONFIG_FRAME_WARN=2048
diff --git a/staging/alpine/2.00-lpam.patch b/staging/alpine/2.00-lpam.patch
new file mode 100644
index 000000000..69e66d7ef
--- /dev/null
+++ b/staging/alpine/2.00-lpam.patch
@@ -0,0 +1,13 @@
+--- alpine-2.00.orig/alpine/Makefile.in
++++ alpine-2.00/alpine/Makefile.in
+@@ -169,7 +169,7 @@
+ LIBICONV = @LIBICONV@
+ LIBINTL = @LIBINTL@
+ LIBOBJS = @LIBOBJS@
+-LIBS = @LIBS@
++LIBS = @LIBS@ -lpam
+ LIBTOOL = @LIBTOOL@
+ LN = @LN@
+ LN_S = @LN_S@
+ LTLIBICONV = @LTLIBICONV@
+ LTLIBINTL = @LTLIBINTL@
diff --git a/staging/alpine/CVE-2008-5514.patch b/staging/alpine/CVE-2008-5514.patch
new file mode 100644
index 000000000..594bea0b2
--- /dev/null
+++ b/staging/alpine/CVE-2008-5514.patch
@@ -0,0 +1,20 @@
+--- alpine-2.00/imap/src/c-client/rfc822.c
++++ alpine-2.00/imap/src/c-client/rfc822.c
+@@ -1351,6 +1351,7 @@
+
+ static long rfc822_output_char (RFC822BUFFER *buf,int c)
+ {
++ if ((buf->cur == buf->end) && !rfc822_output_flush (buf)) return NIL;
+ *buf->cur++ = c; /* add character, soutr buffer if full */
+ return (buf->cur == buf->end) ? rfc822_output_flush (buf) : LONGT;
+ }
+@@ -1374,7 +1375,8 @@
+ len -= i;
+ }
+ /* soutr buffer now if full */
+- if (len && !rfc822_output_flush (buf)) return NIL;
++ if ((len || (buf->cur == buf->end)) && !rfc822_output_flush (buf))
++ return NIL;
+ }
+ return LONGT;
+ }
diff --git a/staging/alpine/PKGBUILD b/staging/alpine/PKGBUILD
new file mode 100644
index 000000000..6b2f9ee73
--- /dev/null
+++ b/staging/alpine/PKGBUILD
@@ -0,0 +1,44 @@
+# $Id: PKGBUILD 121113 2011-04-29 02:56:31Z eric $
+# Maintainer: Eric Bélanger <eric@archlinux.org>
+
+pkgname=alpine
+pkgver=2.00
+pkgrel=13
+_patchlevel=79
+pkgdesc="The Apache-licensed PINE (a tool for reading, sending, and managing electronic messages)"
+arch=('i686' 'x86_64')
+url="http://www.washington.edu/alpine/"
+license=('APACHE')
+depends=('libldap' 'krb5' 'gettext')
+optdepends=('aspell: for spell-checking support')
+provides=('pine')
+conflicts=('pine')
+replaces=('pine')
+options=('!makeflags')
+source=(ftp://ftp.cac.washington.edu/${pkgname}/${pkgname}.tar.bz2 \
+ http://staff.washington.edu/chappa/alpine/patches/alpine-${pkgver}/all_${_patchlevel}.patch.gz \
+ 2.00-lpam.patch CVE-2008-5514.patch)
+md5sums=('84e44cbf71ed674800a5d57eed9c1c52'
+ 'd7dffd121c9a1cac4c458c0ff71df1ce'
+ 'cd3911c16fc6a072e853c0ccfc35857c'
+ '1b52a54a656979116c09fb1d948a4325')
+sha1sums=('dcbd3c5419954f484ccf706feaba31ce48cdebc4'
+ 'd3acbf0e46c50feb2e822ef3bdc0a0f43c007294'
+ '1b39525f91ebd5a9de5a1e04f5554f6fa5f58ae3'
+ 'bc61d76a237ff42b00b3f60f2e6fc5c45e261dbb')
+
+build() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ patch -p1 < ../all_${_patchlevel}.patch
+ patch -p1 < ../2.00-lpam.patch
+ patch -p1 < ../CVE-2008-5514.patch
+ ./configure --prefix=/usr --without-passfile --without-tcl \
+ --disable-shared --with-system-pinerc=/etc/alpine.d/pine.conf \
+ --with-system-fixed-pinerc=/etc/alpine.d/pine.conf.fixed
+ make
+}
+
+package() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ make DESTDIR="${pkgdir}" install
+}
diff --git a/staging/cups/PKGBUILD b/staging/cups/PKGBUILD
new file mode 100644
index 000000000..64ab5d109
--- /dev/null
+++ b/staging/cups/PKGBUILD
@@ -0,0 +1,110 @@
+# $Id: PKGBUILD 121078 2011-04-29 00:03:07Z stephane $
+# Maintainer: Andreas Radke <andyrtr@archlinux.org>
+
+pkgbase="cups"
+pkgname=('libcups' 'cups')
+pkgver=1.4.6
+pkgrel=2
+arch=('i686' 'x86_64')
+license=('GPL')
+url="http://www.cups.org/"
+makedepends=('libtiff>=3.9.2-2' 'libpng>=1.4.0' 'acl' 'openslp' 'pam' 'xdg-utils' 'krb5' 'gnutls>=2.8.3' 'poppler>=0.12.3'
+ 'xinetd' 'gzip' 'autoconf' 'php' 'libusb-compat' 'dbus-core' 'avahi' 'hicolor-icon-theme')
+source=(ftp://ftp.easysw.com/pub/cups/${pkgver}/cups-${pkgver}-source.tar.bz2
+ cups-avahi.patch
+ cups cups.logrotate cups.pam)
+#options=('!emptydirs')
+md5sums=('de8fb5a29c36554925c0c6a6e2c0dae1'
+ '8ebd390197501ffd709f0ee546937fd5'
+ '5c85b7d8d2ddd02c2c64955cebbf55ea'
+ 'f861b18f4446c43918c8643dcbbd7f6d'
+ '96f82c38f3f540b53f3e5144900acf17')
+
+# move client.conf man page for next update to the client pkg.
+
+build() {
+ cd ${srcdir}/${pkgbase}-${pkgver}
+ # Avahi support in the dnssd backend. patch from Fedora
+ patch -Np1 -i ${srcdir}/cups-avahi.patch || return 1
+
+ # Rebuild configure script for --enable-avahi.
+ aclocal -I config-scripts
+ autoconf -I config-scripts
+
+ ./configure --prefix=/usr --sysconfdir=/etc --localstatedir=/var \
+ --with-logdir=/var/log/cups -with-docdir=/usr/share/cups/doc \
+ --with-cups-user=daemon --with-cups-group=lp --enable-pam=yes \
+ --disable-ldap --libdir=/usr/lib --enable-raw-printing \
+ --enable-dbus --with-dbusdir=/etc/dbus-1 --enable-ssl=yes --enable-gnutls --disable-threads --enable-avahi\
+ --with-php=/usr/bin/php-cgi --with-pdftops=pdftops --with-optim="$CFLAGS"
+ make || return 1
+}
+
+package_libcups() {
+pkgdesc="The CUPS Printing System - client libraries and headers"
+depends=('gnutls>=2.8.3' 'libtiff>=3.9.2-2' 'libpng>=1.4.0' 'krb5' 'avahi')
+
+ cd ${srcdir}/${pkgbase}-${pkgver}
+ make BUILDROOT=${pkgdir} install-headers install-libs || return 1
+ # put this into the libs pkg to make other software find the libs(no pkg-config file included)
+ mkdir -p ${pkgdir}/usr/bin
+ install -m755 ${srcdir}/${pkgbase}-${pkgver}/cups-config ${pkgdir}/usr/bin/cups-config
+}
+
+package_cups() {
+pkgdesc="The CUPS Printing System - deamon package"
+install=cups.install
+backup=(etc/cups/cupsd.conf
+ etc/cups/mime.convs
+ etc/cups/mime.types
+ etc/cups/snmp.conf
+ etc/cups/printers.conf
+ etc/cups/classes.conf
+ etc/cups/client.conf
+ etc/cups/subscriptions.conf
+ etc/dbus-1/system.d/cups.conf
+ etc/logrotate.d/cups
+ etc/pam.d/cups
+ etc/xinetd.d/cups-lpd)
+depends=('acl' 'openslp' 'pam' "libcups>=${pkgver}" 'xdg-utils' 'poppler>=0.12.3' 'libusb-compat' 'dbus-core' 'hicolor-icon-theme')
+optdepends=('php: for included phpcups.so module')
+
+ cd ${srcdir}/${pkgbase}-${pkgver}
+ make BUILDROOT=${pkgdir} install-data install-exec || return 1
+
+ # this one we ship in the libcups pkg
+ rm -f ${pkgdir}/usr/bin/cups-config
+
+ # kill the sysv stuff
+ rm -rf ${pkgdir}/etc/rc*.d
+ rm -rf ${pkgdir}/etc/init.d
+ install -D -m755 ../cups ${pkgdir}/etc/rc.d/cups
+ install -D -m644 ../cups.logrotate ${pkgdir}/etc/logrotate.d/cups
+ install -D -m644 ../cups.pam ${pkgdir}/etc/pam.d/cups
+
+ # fix perms on /var/spool and /etc
+ chmod 755 ${pkgdir}/var/spool
+ chmod 755 ${pkgdir}/etc
+
+ # serial backend needs to run as root (http://bugs.archlinux.org/task/20396)
+ chmod 700 ${pkgdir}/usr/lib/cups/backend/serial
+
+ # install ssl directory where to store the certs, solves some samba issues
+ install -dm700 -g lp ${pkgdir}/etc/cups/ssl
+ install -dm511 -g lp ${pkgdir}/var/run/cups/certs
+
+ # install some more configuration files that will get filled by cupsd
+ touch ${pkgdir}/etc/cups/printers.conf
+ touch ${pkgdir}/etc/cups/classes.conf
+ touch ${pkgdir}/etc/cups/client.conf
+ echo "# see 'man client.conf'" >> ${pkgdir}/etc/cups/client.conf
+ echo "ServerName /var/run/cups/cups.sock # alternative: ServerName hostname-or-ip-address[:port] of a remote server" >> ${pkgdir}/etc/cups/client.conf
+ touch ${pkgdir}/etc/cups/subscriptions.conf
+ chgrp lp ${pkgdir}/etc/cups/{printers.conf,classes.conf,client.conf,subscriptions.conf}
+
+ # fix .desktop file
+ sed -i 's|^Exec=htmlview http://localhost:631/|Exec=xdg-open http://localhost:631/|g' ${pkgdir}/usr/share/applications/cups.desktop
+
+ # compress some driver files, adopted from Fedora
+ find ${pkgdir}/usr/share/cups/model -name "*.ppd" | xargs gzip -n9f
+}
diff --git a/staging/cups/cups b/staging/cups/cups
new file mode 100755
index 000000000..4afaf5a7c
--- /dev/null
+++ b/staging/cups/cups
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+. /etc/rc.conf
+. /etc/rc.d/functions
+
+PID=`pidof -o %PPID /usr/sbin/cupsd`
+case "$1" in
+ start)
+ stat_busy "Starting CUPS Daemon"
+ [ -z "$PID" ] && /usr/sbin/cupsd
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ echo $(pidof -o %PPID -x /usr/sbin/cupsd) > /var/run/cups.pid
+ add_daemon cups
+ stat_done
+ fi
+ ;;
+ stop)
+ stat_busy "Stopping CUPS Daemon"
+ [ ! -z "$PID" ] && kill $PID &> /dev/null
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ rm /var/run/cups.pid
+ rm_daemon cups
+ stat_done
+ fi
+ ;;
+ restart)
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "usage: $0 {start|stop|restart}"
+esac
+exit 0
diff --git a/staging/cups/cups-avahi.patch b/staging/cups/cups-avahi.patch
new file mode 100644
index 000000000..cf1056a75
--- /dev/null
+++ b/staging/cups/cups-avahi.patch
@@ -0,0 +1,1089 @@
+diff -up cups-1.4.5/backend/dnssd.c.avahi cups-1.4.5/backend/dnssd.c
+--- cups-1.4.5/backend/dnssd.c.avahi 2009-08-08 00:27:12.000000000 +0200
++++ cups-1.4.5/backend/dnssd.c 2010-11-12 13:13:31.000000000 +0100
+@@ -22,6 +22,7 @@
+ * exec_backend() - Execute the backend that corresponds to the
+ * resolved service name.
+ * get_device() - Create or update a device.
++* find_device()
+ * query_callback() - Process query data.
+ * sigterm_handler() - Handle termination signals...
+ * unquote() - Unquote a name string.
+@@ -33,7 +34,18 @@
+
+ #include "backend-private.h"
+ #include <cups/array.h>
+-#include <dns_sd.h>
++#ifdef HAVE_DNSSD
++# include <dns_sd.h>
++#endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++# include <avahi-client/client.h>
++# include <avahi-client/lookup.h>
++# include <avahi-common/simple-watch.h>
++# include <avahi-common/domain.h>
++# include <avahi-common/error.h>
++# include <avahi-common/malloc.h>
++#define kDNSServiceMaxDomainName AVAHI_DOMAIN_NAME_MAX
++#endif /* HAVE_AVAHI */
+
+
+ /*
+@@ -52,7 +64,12 @@ typedef enum
+
+ typedef struct
+ {
++#ifdef HAVE_DNSSD
+ DNSServiceRef ref; /* Service reference for resolve */
++#endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++ int resolved; /* Did we resolve the device? */
++#endif /* HAVE_AVAHI */
+ char *name, /* Service name */
+ *domain, /* Domain name */
+ *fullName, /* Full name */
+@@ -64,6 +81,20 @@ typedef struct
+ sent; /* Did we list the device? */
+ } cups_device_t;
+
++typedef struct
++{
++ char key[256];
++ char value[256];
++
++#ifdef HAVE_DNSSD
++ const uint8_t *data;
++ const uint8_t *datanext;
++ const uint8_t *dataend;
++#else /* HAVE_AVAHI */
++ AvahiStringList *txt;
++#endif /* HAVE_DNSSD */
++} cups_txt_records_t;
++
+
+ /*
+ * Local globals...
+@@ -77,6 +108,7 @@ static int job_canceled = 0;
+ * Local functions...
+ */
+
++#ifdef HAVE_DNSSD
+ static void browse_callback(DNSServiceRef sdRef,
+ DNSServiceFlags flags,
+ uint32_t interfaceIndex,
+@@ -92,12 +124,6 @@ static void browse_local_callback(DNSSe
+ const char *regtype,
+ const char *replyDomain,
+ void *context);
+-static int compare_devices(cups_device_t *a, cups_device_t *b);
+-static void exec_backend(char **argv);
+-static cups_device_t *get_device(cups_array_t *devices,
+- const char *serviceName,
+- const char *regtype,
+- const char *replyDomain);
+ static void query_callback(DNSServiceRef sdRef,
+ DNSServiceFlags flags,
+ uint32_t interfaceIndex,
+@@ -106,9 +132,111 @@ static void query_callback(DNSServiceRe
+ uint16_t rrclass, uint16_t rdlen,
+ const void *rdata, uint32_t ttl,
+ void *context);
++#endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++static void avahi_client_callback (AvahiClient *client,
++ AvahiClientState state,
++ void *context);
++static void avahi_browse_callback (AvahiServiceBrowser *browser,
++ AvahiIfIndex interface,
++ AvahiProtocol protocol,
++ AvahiBrowserEvent event,
++ const char *serviceName,
++ const char *regtype,
++ const char *replyDomain,
++ AvahiLookupResultFlags flags,
++ void *context);
++#endif /* HAVE_AVAHI */
++
++static cups_device_t * find_device (cups_array_t *devices,
++ cups_txt_records_t *txt,
++ cups_device_t *dkey);
++static int compare_devices(cups_device_t *a, cups_device_t *b);
++static void exec_backend(char **argv);
++static cups_device_t *get_device(cups_array_t *devices,
++ const char *serviceName,
++ const char *regtype,
++ const char *replyDomain);
+ static void sigterm_handler(int sig);
+ static void unquote(char *dst, const char *src, size_t dstsize);
+
++#ifdef HAVE_AVAHI
++static AvahiSimplePoll *simple_poll = NULL;
++static int avahi_got_callback;
++#endif /* HAVE_AVAHI */
++
++
++/*
++ * cups_txt_records_t access functions
++ */
++static cups_txt_records_t *
++next_txt_record (cups_txt_records_t *txt)
++{
++#ifdef HAVE_DNSSD
++ txt->data = txt->datanext;
++#else /* HAVE_AVAHI */
++ txt->txt = avahi_string_list_get_next (txt->txt);
++ if (txt->txt == NULL)
++ return NULL;
++#endif /* HAVE_DNSSD */
++
++ return txt;
++}
++
++static int
++parse_txt_record_pair (cups_txt_records_t *txt)
++{
++#ifdef HAVE_DNSSD
++ uint8_t datalen;
++ uint8_t *data = txt->data;
++ char *ptr;
++
++ /*
++ * Read a key/value pair starting with an 8-bit length. Since the
++ * length is 8 bits and the size of the key/value buffers is 256, we
++ * don't need to check for overflow...
++ */
++
++ datalen = *data++;
++ if (!datalen || (data + datalen) >= txt->dataend)
++ return NULL;
++ txt->datanext = data + datalen;
++
++ for (ptr = txt->key; data < txt->datanext && *data != '='; data ++)
++ *ptr++ = *data;
++ *ptr = '\0';
++
++ if (data < txt->datanext && *data == '=')
++ {
++ data++;
++
++ if (data < datanext)
++ memcpy (txt->value, data, txt->datanext - data);
++ value[txt->datanext - data] = '\0';
++ }
++ else
++ return 1;
++#else /* HAVE_AVAHI */
++ char *key, *value;
++ size_t len;
++ avahi_string_list_get_pair (txt->txt, &key, &value, &len);
++ if (len > sizeof (txt->value) - 1)
++ len = sizeof (txt->value) - 1;
++
++ memcpy (txt->value, value, len);
++ txt->value[len] = '\0';
++ len = strlen (key);
++ if (len > sizeof (txt->key) - 1)
++ len = sizeof (txt->key) - 1;
++
++ memcpy (txt->key, key, len);
++ txt->key[len] = '\0';
++ avahi_free (key);
++ avahi_free (value);
++#endif /* HAVE_AVAHI */
++
++ return 0;
++}
+
+ /*
+ * 'main()' - Browse for printers.
+@@ -119,6 +247,13 @@ main(int argc, /* I - Number of comm
+ char *argv[]) /* I - Command-line arguments */
+ {
+ const char *name; /* Backend name */
++ cups_array_t *devices; /* Device array */
++ cups_device_t *device; /* Current device */
++ char uriName[1024]; /* Unquoted fullName for URI */
++#ifdef HAVE_DNSSD
++ int fd; /* Main file descriptor */
++ fd_set input; /* Input set for select() */
++ struct timeval timeout; /* Timeout for select() */
+ DNSServiceRef main_ref, /* Main service reference */
+ fax_ipp_ref, /* IPP fax service reference */
+ ipp_ref, /* IPP service reference */
+@@ -130,12 +265,11 @@ main(int argc, /* I - Number of comm
+ pdl_datastream_ref, /* AppSocket service reference */
+ printer_ref, /* LPD service reference */
+ riousbprint_ref; /* Remote IO service reference */
+- int fd; /* Main file descriptor */
+- fd_set input; /* Input set for select() */
+- struct timeval timeout; /* Timeout for select() */
+- cups_array_t *devices; /* Device array */
+- cups_device_t *device; /* Current device */
+- char uriName[1024]; /* Unquoted fullName for URI */
++#endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++ AvahiClient *client;
++ int error;
++#endif /* HAVE_AVAHI */
+ #if defined(HAVE_SIGACTION) && !defined(HAVE_SIGSET)
+ struct sigaction action; /* Actions for POSIX signals */
+ #endif /* HAVE_SIGACTION && !HAVE_SIGSET */
+@@ -194,6 +328,49 @@ main(int argc, /* I - Number of comm
+ * Browse for different kinds of printers...
+ */
+
++#ifdef HAVE_AVAHI
++ if ((simple_poll = avahi_simple_poll_new ()) == NULL)
++ {
++ perror ("ERROR: Unable to create avahi simple poll object");
++ return (1);
++ }
++
++ client = avahi_client_new (avahi_simple_poll_get (simple_poll),
++ 0, avahi_client_callback, NULL, &error);
++ if (!client)
++ {
++ perror ("ERROR: Unable to create avahi client");
++ return (1);
++ }
++
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_fax-ipp._tcp", NULL, 0,
++ avahi_browse_callback, devices);
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_ipp._tcp", NULL, 0,
++ avahi_browse_callback, devices);
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_ipp-tls._tcp", NULL, 0,
++ avahi_browse_callback, devices);
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_pdl-datastream._tcp",
++ NULL, 0,
++ avahi_browse_callback,
++ devices);
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_printer._tcp", NULL, 0,
++ avahi_browse_callback, devices);
++ avahi_service_browser_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC,
++ "_riousbprint._tcp", NULL, 0,
++ avahi_browse_callback, devices);
++#endif /* HAVE_AVAHI */
++#ifdef HAVE_DNSSD
+ if (DNSServiceCreateConnection(&main_ref) != kDNSServiceErr_NoError)
+ {
+ perror("ERROR: Unable to create service connection");
+@@ -245,6 +422,7 @@ main(int argc, /* I - Number of comm
+ riousbprint_ref = main_ref;
+ DNSServiceBrowse(&riousbprint_ref, kDNSServiceFlagsShareConnection, 0,
+ "_riousbprint._tcp", NULL, browse_callback, devices);
++#endif /* HAVE_DNSSD */
+
+ /*
+ * Loop until we are killed...
+@@ -252,6 +430,9 @@ main(int argc, /* I - Number of comm
+
+ while (!job_canceled)
+ {
++ int announce = 0;
++
++#ifdef HAVE_DNSSD
+ FD_ZERO(&input);
+ FD_SET(fd, &input);
+
+@@ -271,11 +452,35 @@ main(int argc, /* I - Number of comm
+ }
+ else
+ {
++ announce = 1;
++ }
++#else /* HAVE_AVAHI */
++ int r;
++ avahi_got_callback = 0;
++ r = avahi_simple_poll_iterate (simple_poll, 1);
++ if (r != 0 && r != EINTR)
++ {
++ /*
++ * We've been told to exit the loop. Perhaps the connection to
++ * avahi failed.
++ */
++
++ break;
++ }
++
++ if (avahi_got_callback)
++ announce = 1;
++#endif /* HAVE_DNSSD */
++
++ if (announce)
++ {
+ /*
+ * Announce any devices we've found...
+ */
+
++#ifdef HAVE_DNSSD
+ DNSServiceErrorType status; /* DNS query status */
++#endif /* HAVE_DNSSD */
+ cups_device_t *best; /* Best matching device */
+ char device_uri[1024]; /* Device URI */
+ int count; /* Number of queries */
+@@ -285,6 +490,7 @@ main(int argc, /* I - Number of comm
+ best = NULL, count = 0;
+ device;
+ device = (cups_device_t *)cupsArrayNext(devices))
++#ifdef HAVE_DNSSD
+ if (!device->ref && !device->sent)
+ {
+ /*
+@@ -313,14 +519,23 @@ main(int argc, /* I - Number of comm
+ count ++;
+ }
+ }
+- else if (!device->sent)
++ else
++#endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++ if (!device->resolved)
++ continue;
++ else
++#endif /* HAVE_AVAHI */
++ if (!device->sent)
+ {
++#ifdef HAVE_DNSSD
+ /*
+ * Got the TXT records, now report the device...
+ */
+
+ DNSServiceRefDeallocate(device->ref);
+ device->ref = 0;
++#endif /* HAVE_DNSSD */
+
+ if (!best)
+ best = device;
+@@ -372,6 +587,7 @@ main(int argc, /* I - Number of comm
+ * 'browse_callback()' - Browse devices.
+ */
+
++#ifdef HAVE_DNSSD
+ static void
+ browse_callback(
+ DNSServiceRef sdRef, /* I - Service reference */
+@@ -405,12 +621,14 @@ browse_callback(
+
+ get_device((cups_array_t *)context, serviceName, regtype, replyDomain);
+ }
++#endif /* HAVE_DNSSD */
+
+
+ /*
+ * 'browse_local_callback()' - Browse local devices.
+ */
+
++#ifdef HAVE_DNSSD
+ static void
+ browse_local_callback(
+ DNSServiceRef sdRef, /* I - Service reference */
+@@ -456,6 +674,7 @@ browse_local_callback(
+ device->fullName);
+ device->sent = 1;
+ }
++#endif /* HAVE_DNSSD */
+
+
+ /*
+@@ -528,6 +747,32 @@ exec_backend(char **argv) /* I - Comman
+ exit(CUPS_BACKEND_STOP);
+ }
+
++static int
++device_type (const char *regtype)
++{
++#ifdef HAVE_AVAHI
++ if (!strcmp(regtype, "_ipp._tcp") ||
++ !strcmp(regtype, "_ipp-tls._tcp"))
++ return (CUPS_DEVICE_IPP);
++ else if (!strcmp(regtype, "_fax-ipp._tcp"))
++ return (CUPS_DEVICE_FAX_IPP);
++ else if (!strcmp(regtype, "_printer._tcp"))
++ return (CUPS_DEVICE_PDL_DATASTREAM);
++#else
++ if (!strcmp(regtype, "_ipp._tcp.") ||
++ !strcmp(regtype, "_ipp-tls._tcp."))
++ return (CUPS_DEVICE_IPP);
++ else if (!strcmp(regtype, "_fax-ipp._tcp."))
++ return (CUPS_DEVICE_FAX_IPP);
++ else if (!strcmp(regtype, "_printer._tcp."))
++ return (CUPS_DEVICE_PRINTER);
++ else if (!strcmp(regtype, "_pdl-datastream._tcp."))
++ return (CUPS_DEVICE_PDL_DATASTREAM);
++#endif /* HAVE_AVAHI */
++
++ return (CUPS_DEVICE_RIOUSBPRINT);
++}
++
+
+ /*
+ * 'get_device()' - Create or update a device.
+@@ -550,18 +795,7 @@ get_device(cups_array_t *devices, /* I -
+ */
+
+ key.name = (char *)serviceName;
+-
+- if (!strcmp(regtype, "_ipp._tcp.") ||
+- !strcmp(regtype, "_ipp-tls._tcp."))
+- key.type = CUPS_DEVICE_IPP;
+- else if (!strcmp(regtype, "_fax-ipp._tcp."))
+- key.type = CUPS_DEVICE_FAX_IPP;
+- else if (!strcmp(regtype, "_printer._tcp."))
+- key.type = CUPS_DEVICE_PRINTER;
+- else if (!strcmp(regtype, "_pdl-datastream._tcp."))
+- key.type = CUPS_DEVICE_PDL_DATASTREAM;
+- else
+- key.type = CUPS_DEVICE_RIOUSBPRINT;
++ key.type = device_type (regtype);
+
+ for (device = cupsArrayFind(devices, &key);
+ device;
+@@ -581,8 +815,14 @@ get_device(cups_array_t *devices, /* I -
+ free(device->domain);
+ device->domain = strdup(replyDomain);
+
++#ifdef HAVE_DNSSD
+ DNSServiceConstructFullName(fullName, device->name, regtype,
+ replyDomain);
++#else /* HAVE_AVAHI */
++ avahi_service_name_join (fullName, kDNSServiceMaxDomainName,
++ serviceName, regtype, replyDomain);
++#endif /* HAVE_DNSSD */
++
+ free(device->fullName);
+ device->fullName = strdup(fullName);
+ }
+@@ -602,6 +842,9 @@ get_device(cups_array_t *devices, /* I -
+ device->domain = strdup(replyDomain);
+ device->type = key.type;
+ device->priority = 50;
++#ifdef HAVE_AVAHI
++ device->resolved = 0;
++#endif /* HAVE_AVAHI */
+
+ cupsArrayAdd(devices, device);
+
+@@ -609,7 +852,13 @@ get_device(cups_array_t *devices, /* I -
+ * Set the "full name" of this service, which is used for queries...
+ */
+
++#ifdef HAVE_DNSSD
+ DNSServiceConstructFullName(fullName, serviceName, regtype, replyDomain);
++#else /* HAVE_AVAHI */
++ avahi_service_name_join (fullName, kDNSServiceMaxDomainName,
++ serviceName, regtype, replyDomain);
++#endif /* HAVE_DNSSD */
++
+ device->fullName = strdup(fullName);
+
+ return (device);
+@@ -620,6 +869,7 @@ get_device(cups_array_t *devices, /* I -
+ * 'query_callback()' - Process query data.
+ */
+
++#ifdef HAVE_DNSSD
+ static void
+ query_callback(
+ DNSServiceRef sdRef, /* I - Service reference */
+@@ -639,7 +889,7 @@ query_callback(
+ *ptr; /* Pointer into string */
+ cups_device_t dkey, /* Search key */
+ *device; /* Device */
+-
++ cups_txt_records_t txt;
+
+ fprintf(stderr, "DEBUG2: query_callback(sdRef=%p, flags=%x, "
+ "interfaceIndex=%d, errorCode=%d, fullName=\"%s\", "
+@@ -673,84 +923,212 @@ query_callback(
+ if ((ptr = strstr(name, "._")) != NULL)
+ *ptr = '\0';
+
+- if (strstr(fullName, "_ipp._tcp.") ||
+- strstr(fullName, "_ipp-tls._tcp."))
+- dkey.type = CUPS_DEVICE_IPP;
+- else if (strstr(fullName, "_fax-ipp._tcp."))
+- dkey.type = CUPS_DEVICE_FAX_IPP;
+- else if (strstr(fullName, "_printer._tcp."))
+- dkey.type = CUPS_DEVICE_PRINTER;
+- else if (strstr(fullName, "_pdl-datastream._tcp."))
+- dkey.type = CUPS_DEVICE_PDL_DATASTREAM;
++ dkey.type = device_type (fullName);
++
++ txt.data = rdata;
++ txt.dataend = rdata + rdlen;
++ device = find_device ((cups_array_t *) context, &txt, &dkey);
++ if (!device)
++ fprintf(stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", fullName);
++}
++#endif /* HAVE_DNSSD */
++
++#ifdef HAVE_AVAHI
++static void
++avahi_client_callback(AvahiClient *client,
++ AvahiClientState state,
++ void *context)
++{
++ /*
++ * If the connection drops, quit.
++ */
++
++ if (state == AVAHI_CLIENT_FAILURE)
++ {
++ fprintf (stderr, "ERROR: Avahi connection failed\n");
++ avahi_simple_poll_quit (simple_poll);
++ }
++}
++
++static void
++avahi_query_callback(AvahiServiceResolver *resolver,
++ AvahiIfIndex interface,
++ AvahiProtocol protocol,
++ AvahiResolverEvent event,
++ const char *name,
++ const char *type,
++ const char *domain,
++ const char *host_name,
++ const AvahiAddress *address,
++ uint16_t port,
++ AvahiStringList *txt,
++ AvahiLookupResultFlags flags,
++ void *context)
++{
++ AvahiClient *client;
++ cups_device_t key,
++ *device;
++ char uqname[1024],
++ *ptr;
++ cups_txt_records_t txtr;
++
++ client = avahi_service_resolver_get_client (resolver);
++ if (event != AVAHI_RESOLVER_FOUND)
++ {
++ if (event == AVAHI_RESOLVER_FAILURE)
++ {
++ fprintf (stderr, "ERROR: %s\n",
++ avahi_strerror (avahi_client_errno (client)));
++ }
++
++ avahi_service_resolver_free (resolver);
++ return;
++ }
++
++ /*
++ * Set search key for device.
++ */
++
++ key.name = uqname;
++ unquote (uqname, name, sizeof (uqname));
++ if ((ptr = strstr(name, "._")) != NULL)
++ *ptr = '\0';
++
++ key.domain = (char *) domain;
++ key.type = device_type (type);
++
++ /*
++ * Find the device and the the TXT information.
++ */
++
++ txtr.txt = txt;
++ device = find_device ((cups_array_t *) context, &txtr, &key);
++ if (device)
++ {
++ /*
++ * Let the main loop know to announce the device.
++ */
++
++ device->resolved = 1;
++ avahi_got_callback = 1;
++ }
+ else
+- dkey.type = CUPS_DEVICE_RIOUSBPRINT;
++ fprintf (stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", name);
++
++ avahi_service_resolver_free (resolver);
++}
++
++static void
++avahi_browse_callback(AvahiServiceBrowser *browser,
++ AvahiIfIndex interface,
++ AvahiProtocol protocol,
++ AvahiBrowserEvent event,
++ const char *name,
++ const char *type,
++ const char *domain,
++ AvahiLookupResultFlags flags,
++ void *context)
++{
++ AvahiClient *client = avahi_service_browser_get_client (browser);
++
++ switch (event)
++ {
++ case AVAHI_BROWSER_FAILURE:
++ fprintf (stderr, "ERROR: %s\n",
++ avahi_strerror (avahi_client_errno (client)));
++ avahi_simple_poll_quit (simple_poll);
++ return;
++
++ case AVAHI_BROWSER_NEW:
++ /*
++ * This object is new on the network.
++ */
++
++ if (flags & AVAHI_LOOKUP_RESULT_LOCAL)
++ {
++ /*
++ * This comes from the local machine so ignore it.
++ */
++
++ fprintf (stderr, "DEBUG: ignoring local service %s\n", name);
++ }
++ else
++ {
++ /*
++ * Create a device entry for it if it doesn't yet exist.
++ */
++
++ get_device ((cups_array_t *)context, name, type, domain);
++
++ /*
++ * Now look for a TXT entry.
++ */
++
++ if (avahi_service_resolver_new (client, interface, protocol,
++ name, type, domain,
++ AVAHI_PROTO_UNSPEC, 0,
++ avahi_query_callback, context) == NULL)
++ {
++ fprintf (stderr, "ERROR: failed to resolve service %s: %s\n",
++ name, avahi_strerror (avahi_client_errno (client)));
++ }
++ }
++
++ break;
+
+- for (device = cupsArrayFind(devices, &dkey);
++ case AVAHI_BROWSER_REMOVE:
++ case AVAHI_BROWSER_ALL_FOR_NOW:
++ case AVAHI_BROWSER_CACHE_EXHAUSTED:
++ break;
++ }
++}
++#endif /* HAVE_AVAHI */
++
++static cups_device_t *
++find_device (cups_array_t *devices,
++ cups_txt_records_t *txt,
++ cups_device_t *dkey)
++{
++ cups_device_t *device;
++ char *ptr;
++
++ for (device = cupsArrayFind(devices, dkey);
+ device;
+ device = cupsArrayNext(devices))
+ {
+- if (strcasecmp(device->name, dkey.name) ||
+- strcasecmp(device->domain, dkey.domain))
++ if (strcasecmp(device->name, dkey->name) ||
++ strcasecmp(device->domain, dkey->domain))
+ {
+ device = NULL;
+ break;
+ }
+- else if (device->type == dkey.type)
++ else if (device->type == dkey->type)
+ {
+ /*
+ * Found it, pull out the priority and make and model from the TXT
+ * record and save it...
+ */
+
+- const uint8_t *data, /* Pointer into data */
+- *datanext, /* Next key/value pair */
+- *dataend; /* End of entire TXT record */
+- uint8_t datalen; /* Length of current key/value pair */
+- char key[256], /* Key string */
+- value[256], /* Value string */
+- make_and_model[512],
++ char make_and_model[512],
+ /* Manufacturer and model */
+ model[256], /* Model */
+- device_id[2048];/* 1284 device ID */
+-
++ device_id[2048]; /* 1284 device ID */
+
+ device_id[0] = '\0';
+ make_and_model[0] = '\0';
+
+ strcpy(model, "Unknown");
+
+- for (data = rdata, dataend = data + rdlen;
+- data < dataend;
+- data = datanext)
++ for (;;)
+ {
+- /*
+- * Read a key/value pair starting with an 8-bit length. Since the
+- * length is 8 bits and the size of the key/value buffers is 256, we
+- * don't need to check for overflow...
+- */
+-
+- datalen = *data++;
+-
+- if (!datalen || (data + datalen) >= dataend)
+- break;
+-
+- datanext = data + datalen;
++ char *key;
++ char *value;
+
+- for (ptr = key; data < datanext && *data != '='; data ++)
+- *ptr++ = *data;
+- *ptr = '\0';
+-
+- if (data < datanext && *data == '=')
+- {
+- data ++;
+-
+- if (data < datanext)
+- memcpy(value, data, datanext - data);
+- value[datanext - data] = '\0';
+- }
+- else
+- continue;
++ if (parse_txt_record_pair (txt))
++ goto next;
+
++ key = txt->key;
++ value = txt->value;
+ if (!strncasecmp(key, "usb_", 4))
+ {
+ /*
+@@ -805,6 +1183,10 @@ query_callback(
+ if (device->type == CUPS_DEVICE_PRINTER)
+ device->sent = 1;
+ }
++
++ next:
++ if (next_txt_record (txt) == NULL)
++ break;
+ }
+
+ if (device->device_id)
+@@ -854,11 +1236,9 @@ query_callback(
+ }
+ }
+
+- if (!device)
+- fprintf(stderr, "DEBUG: Ignoring TXT record for \"%s\"...\n", fullName);
++ return device;
+ }
+
+-
+ /*
+ * 'sigterm_handler()' - Handle termination signals...
+ */
+diff -up cups-1.4.5/config.h.in.avahi cups-1.4.5/config.h.in
+--- cups-1.4.5/config.h.in.avahi 2010-08-13 06:11:46.000000000 +0200
++++ cups-1.4.5/config.h.in 2010-11-12 13:13:31.000000000 +0100
+@@ -344,6 +344,13 @@
+
+
+ /*
++ * Do we have Avahi for DNS Service Discovery?
++ */
++
++#undef HAVE_AVAHI
++
++
++/*
+ * Do we have <sys/ioctl.h>?
+ */
+
+diff -up cups-1.4.5/config-scripts/cups-dnssd.m4.avahi cups-1.4.5/config-scripts/cups-dnssd.m4
+--- cups-1.4.5/config-scripts/cups-dnssd.m4.avahi 2009-08-29 00:54:34.000000000 +0200
++++ cups-1.4.5/config-scripts/cups-dnssd.m4 2010-11-12 13:13:31.000000000 +0100
+@@ -27,6 +27,21 @@ AC_ARG_WITH(dnssd-includes, [ --with-dn
+ DNSSDLIBS=""
+ DNSSD_BACKEND=""
+
++AC_ARG_ENABLE(avahi, [ --enable-avahi turn on DNS Service Discovery support, default=no],
++ [if test x$enable_avahi = xyes; then
++ AC_MSG_CHECKING(for Avahi)
++ if $PKGCONFIG --exists avahi-client; then
++ AC_MSG_RESULT(yes)
++ CFLAGS="$CFLAGS `$PKGCONFIG --cflags avahi-client`"
++ DNSSDLIBS="`$PKGCONFIG --libs avahi-client`"
++ DNSSD_BACKEND="dnssd"
++ AC_DEFINE(HAVE_AVAHI)
++ enable_dnssd=no
++ else
++ AC_MSG_RESULT(no)
++ fi
++ fi])
++
+ if test x$enable_dnssd != xno; then
+ AC_CHECK_HEADER(dns_sd.h, [
+ case "$uname" in
+diff -up cups-1.4.5/cups/http-support.c.avahi cups-1.4.5/cups/http-support.c
+--- cups-1.4.5/cups/http-support.c.avahi 2010-10-02 00:40:38.000000000 +0200
++++ cups-1.4.5/cups/http-support.c 2010-11-12 13:28:45.000000000 +0100
+@@ -55,6 +55,11 @@
+ # include <dns_sd.h>
+ # include <poll.h>
+ #endif /* HAVE_DNSSD */
++#ifdef HAVE_AVAHI
++# include <avahi-client/client.h>
++# include <avahi-client/lookup.h>
++# include <avahi-common/simple-watch.h>
++#endif /* HAVE_AVAHI */
+
+
+ /*
+@@ -121,6 +126,24 @@ static void resolve_callback(DNSService
+ void *context);
+ #endif /* HAVE_DNSSD */
+
++#ifdef HAVE_AVAHI
++static void avahi_resolve_uri_client_cb(AvahiClient *client,
++ AvahiClientState state,
++ void *simple_poll);
++static void avahi_resolve_uri_resolver_cb(AvahiServiceResolver *resolver,
++ AvahiIfIndex interface,
++ AvahiProtocol protocol,
++ AvahiResolverEvent event,
++ const char *name,
++ const char *type,
++ const char *domain,
++ const char *host_name,
++ const AvahiAddress *address,
++ uint16_t port,
++ AvahiStringList *txt,
++ AvahiLookupResultFlags flags,
++ void *context);
++#endif /* HAVE_AVAHI */
+
+ /*
+ * 'httpAssembleURI()' - Assemble a uniform resource identifier from its
+@@ -1351,16 +1374,27 @@ _httpResolveURI(
+
+ if (strstr(hostname, "._tcp"))
+ {
++#if defined(HAVE_DNSSD) || defined(HAVE_AVAHI)
++ char *regtype, /* Pointer to type in hostname */
++ *domain; /* Pointer to domain in hostname */
+ #ifdef HAVE_DNSSD
+ DNSServiceRef ref, /* DNS-SD master service reference */
+ domainref, /* DNS-SD service reference for domain */
+ localref; /* DNS-SD service reference for .local */
+ int domainsent = 0, /* Send the domain resolve? */
+ offline = 0; /* offline-report state set? */
+- char *regtype, /* Pointer to type in hostname */
+- *domain; /* Pointer to domain in hostname */
+ _http_uribuf_t uribuf; /* URI buffer */
+ struct pollfd polldata; /* Polling data */
++#else /* HAVE_AVAHI */
++ AvahiSimplePoll *simple_poll;
++ AvahiClient *client;
++ int error;
++ struct
++ {
++ AvahiSimplePoll *poll;
++ _http_uribuf_t uribuf;
++ } user_data;
++#endif /* HAVE_DNSSD */
+
+
+ if (logit)
+@@ -1398,8 +1432,13 @@ _httpResolveURI(
+ if (domain)
+ *domain++ = '\0';
+
++#ifdef HAVE_DNSSD
+ uribuf.buffer = resolved_uri;
+ uribuf.bufsize = resolved_size;
++#else
++ user_data.uribuf.buffer = resolved_uri;
++ user_data.uribuf.bufsize = resolved_size;
++#endif
+
+ resolved_uri[0] = '\0';
+
+@@ -1414,6 +1453,7 @@ _httpResolveURI(
+
+ uri = NULL;
+
++#ifdef HAVE_DNSSD
+ if (DNSServiceCreateConnection(&ref) == kDNSServiceErr_NoError)
+ {
+ localref = ref;
+@@ -1500,6 +1540,36 @@ _httpResolveURI(
+
+ DNSServiceRefDeallocate(ref);
+ }
++#else /* HAVE_AVAHI */
++ if ((simple_poll = avahi_simple_poll_new ()) != NULL)
++ {
++ if ((client = avahi_client_new (avahi_simple_poll_get (simple_poll),
++ 0, avahi_resolve_uri_client_cb,
++ &simple_poll, &error)) != NULL)
++ {
++ user_data.poll = simple_poll;
++ if (avahi_service_resolver_new (client, AVAHI_IF_UNSPEC,
++ AVAHI_PROTO_UNSPEC, hostname,
++ regtype, domain, AVAHI_PROTO_UNSPEC, 0,
++ avahi_resolve_uri_resolver_cb,
++ &user_data) != NULL)
++ {
++ avahi_simple_poll_loop (simple_poll);
++
++ /*
++ * Collect the result.
++ */
++
++ if (resolved_uri[0])
++ uri = resolved_uri;
++ }
++
++ avahi_client_free (client);
++ }
++
++ avahi_simple_poll_free (simple_poll);
++ }
++#endif /* HAVE_DNSSD */
+
+ if (logit)
+ {
+@@ -1511,13 +1581,13 @@ _httpResolveURI(
+ fputs("STATE: -connecting-to-device,offline-report\n", stderr);
+ }
+
+-#else
++#else /* HAVE_DNSSD || HAVE_AVAHI */
+ /*
+ * No DNS-SD support...
+ */
+
+ uri = NULL;
+-#endif /* HAVE_DNSSD */
++#endif /* HAVE_DNSSD || HAVE_AVAHI */
+
+ if (logit && !uri)
+ _cupsLangPuts(stderr, _("Unable to find printer!\n"));
+@@ -1722,6 +1792,105 @@ resolve_callback(
+ }
+ #endif /* HAVE_DNSSD */
+
++#ifdef HAVE_AVAHI
++static void
++avahi_resolve_uri_client_cb (AvahiClient *client,
++ AvahiClientState state,
++ void *simple_poll)
++{
++ DEBUG_printf(("avahi_resolve_uri_client_callback(client=%p, state=%d, "
++ "simple_poll=%p)\n", client, state, simple_poll));
++
++ /*
++ * If the connection drops, quit.
++ */
++
++ if (state == AVAHI_CLIENT_FAILURE)
++ avahi_simple_poll_quit (simple_poll);
++}
++
++static void
++avahi_resolve_uri_resolver_cb (AvahiServiceResolver *resolver,
++ AvahiIfIndex interface,
++ AvahiProtocol protocol,
++ AvahiResolverEvent event,
++ const char *name,
++ const char *type,
++ const char *domain,
++ const char *host_name,
++ const AvahiAddress *address,
++ uint16_t port,
++ AvahiStringList *txt,
++ AvahiLookupResultFlags flags,
++ void *context)
++{
++ const char *scheme; /* URI scheme */
++ char rp[256]; /* Remote printer */
++ AvahiStringList *pair;
++ char *value;
++ size_t valueLen = 0;
++ char addr[AVAHI_ADDRESS_STR_MAX];
++ struct
++ {
++ AvahiSimplePoll *poll;
++ _http_uribuf_t uribuf;
++ } *poll_uribuf = context;
++
++ DEBUG_printf(("avahi_resolve_uri_resolver_callback(resolver=%p, "
++ "interface=%d, protocol=%d, event=%d, name=\"%s\", "
++ "type=\"%s\", domain=\"%s\", host_name=\"%s\", address=%p, "
++ "port=%d, txt=%p, flags=%d, context=%p)\n",
++ resolver, interface, protocol, event, name, type, domain,
++ host_name, address, port, txt, flags, context));
++
++ if (event != AVAHI_RESOLVER_FOUND)
++ {
++ avahi_service_resolver_free (resolver);
++ avahi_simple_poll_quit (poll_uribuf->poll);
++ return;
++ }
++
++ /*
++ * Figure out the scheme from the full name...
++ */
++
++ if (strstr(type, "_ipp."))
++ scheme = "ipp";
++ else if (strstr(type, "_printer."))
++ scheme = "lpd";
++ else if (strstr(type, "_pdl-datastream."))
++ scheme = "socket";
++ else
++ scheme = "riousbprint";
++
++ /*
++ * Extract the "remote printer key from the TXT record...
++ */
++
++ if ((pair = avahi_string_list_find (txt, "rp")) != NULL)
++ {
++ avahi_string_list_get_pair (pair, NULL, &value, &valueLen);
++ rp[0] = '/';
++ memcpy (rp + 1, value, valueLen);
++ rp[valueLen + 1] = '\0';
++ }
++ else
++ rp[0] = '\0';
++
++ /*
++ * Assemble the final device URI...
++ */
++
++ avahi_address_snprint (addr, AVAHI_ADDRESS_STR_MAX, address);
++ httpAssembleURI(HTTP_URI_CODING_ALL, poll_uribuf->uribuf.buffer,
++ poll_uribuf->uribuf.bufsize, scheme, NULL,
++ addr, port, rp);
++ DEBUG_printf(("avahi_resolve_uri_resolver_callback: Resolved URI is \"%s\"\n",
++ poll_uribuf->uribuf.buffer));
++ avahi_simple_poll_quit (poll_uribuf->poll);
++}
++#endif /* HAVE_AVAHI */
++
+
+ /*
+ * End of "$Id: http-support.c 9322 2010-10-01 22:40:38Z mike $".
diff --git a/staging/cups/cups.install b/staging/cups/cups.install
new file mode 100644
index 000000000..e92e17ed3
--- /dev/null
+++ b/staging/cups/cups.install
@@ -0,0 +1,15 @@
+post_install() {
+ xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
+ echo ">> If you use an HTTPS connection to CUPS, the first time you access"
+ echo ">> the interface it may take a very long time before the site comes up."
+ echo ">> This is because the first request triggers the generation of the CUPS"
+ echo ">> SSL certificates which can be a very time-consuming job."
+}
+
+post_upgrade() {
+ xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
+}
+
+post_remove() {
+ xdg-icon-resource forceupdate --theme hicolor 2> /dev/null
+}
diff --git a/staging/cups/cups.logrotate b/staging/cups/cups.logrotate
new file mode 100644
index 000000000..9c49bbdaf
--- /dev/null
+++ b/staging/cups/cups.logrotate
@@ -0,0 +1,8 @@
+/var/log/cups/*log {
+ missingok
+ notifempty
+ delaycompress
+ postrotate
+ /bin/kill -HUP `cat /var/run/cups.pid 2>/dev/null` 2>/dev/null || true
+ endscript
+}
diff --git a/staging/cups/cups.pam b/staging/cups/cups.pam
new file mode 100644
index 000000000..53724d1f8
--- /dev/null
+++ b/staging/cups/cups.pam
@@ -0,0 +1,3 @@
+auth required pam_unix.so
+account required pam_unix.so
+session required pam_unix.so
diff --git a/staging/cvs/PKGBUILD b/staging/cvs/PKGBUILD
new file mode 100644
index 000000000..76f71a2a1
--- /dev/null
+++ b/staging/cvs/PKGBUILD
@@ -0,0 +1,39 @@
+# $Id: PKGBUILD 121072 2011-04-28 22:54:58Z stephane $
+# Contributor: dorphell <dorphell@archlinux.org>
+
+pkgname=cvs
+pkgver=1.11.23
+pkgrel=7
+pkgdesc="Concurrent Versions System - a source control system"
+arch=(i686 x86_64)
+url="http://cvs.nongnu.org"
+license=('GPL')
+depends=('krb5')
+optdepends=('openssh: for using cvs over ssh' 'inetutils: for using cvs over rsh')
+install=cvs.install
+source=(ftp://ftp.gnu.org/non-gnu/cvs/source/stable/${pkgver}/${pkgname}-${pkgver}.tar.bz2
+ cvs-1.11.23-getline64.patch
+ cvs-1.11.23-cve-2010-3846.patch)
+sha256sums=('400f51b59d85116e79b844f2d5dbbad4759442a789b401a94aa5052c3d7a4aa9'
+ '9126d7992ace943980ad8a10d5a09aeb6f1eeeb9b921fc796fe31de7b1c220cf'
+ 'c6506d0a5efc7b0cab6415f26e070ec214fb9781fac8d295506f4d0825431a8f')
+
+build() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ unset EDITOR VISUAL
+
+ patch -Np1 -i ../cvs-1.11.23-getline64.patch
+
+ # CVE-2010-3864, see https://www.redhat.com/security/data/cve/CVE-2010-3846.html
+ patch -Np1 -i ../cvs-1.11.23-cve-2010-3846.patch
+
+ ./configure --prefix=/usr
+ make
+}
+
+package() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+
+ make DESTDIR="${pkgdir}" install
+ rm "${pkgdir}"/usr/share/info/dir
+}
diff --git a/staging/cvs/cvs-1.11.23-cve-2010-3846.patch b/staging/cvs/cvs-1.11.23-cve-2010-3846.patch
new file mode 100644
index 000000000..e1560cef8
--- /dev/null
+++ b/staging/cvs/cvs-1.11.23-cve-2010-3846.patch
@@ -0,0 +1,167 @@
+From b122edcb68ff05bb6eb22f6e50423e7f1050841b Mon Sep 17 00:00:00 2001
+From: Larry Jones <lawrence.jones@siemens.com>
+Date: Thu, 21 Oct 2010 10:08:16 +0200
+Subject: [PATCH] Fix for CVE-2010-3846
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Mallformed RCS revision (delete after the end of input file, or overlayed
+deleted regions) screws output file image size computation. This leads to
+write attempt after the allocated memory opening hiden memory corruption
+driven by CVS server.
+
+Signed-off-by: Petr Písař <ppisar@redhat.com>
+---
+ src/rcs.c | 52 +++++++++++++++++++++++++++++-----------------------
+ 1 files changed, 29 insertions(+), 23 deletions(-)
+
+diff --git a/src/rcs.c b/src/rcs.c
+index 7d0d078..2f88f85 100644
+--- a/src/rcs.c
++++ b/src/rcs.c
+@@ -7128,7 +7128,7 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ struct deltafrag *dfhead;
+ struct deltafrag **dftail;
+ struct deltafrag *df;
+- unsigned long numlines, lastmodline, offset;
++ unsigned long numlines, offset;
+ struct linevector lines;
+ int err;
+
+@@ -7202,12 +7202,12 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+
+ /* New temp data structure to hold new org before
+ copy back into original structure. */
+- lines.nlines = lines.lines_alloced = numlines;
++ lines.lines_alloced = numlines;
+ lines.vector = xmalloc (numlines * sizeof *lines.vector);
+
+ /* We changed the list order to first to last -- so the
+ list never gets larger than the size numlines. */
+- lastmodline = 0;
++ lines.nlines = 0;
+
+ /* offset created when adding/removing lines
+ between new and original structure */
+@@ -7216,25 +7216,24 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ for (df = dfhead; df != NULL; )
+ {
+ unsigned int ln;
+- unsigned long deltaend;
++ unsigned long newpos = df->pos - offset;
+
+- if (df->pos > orig_lines->nlines)
++ if (newpos < lines.nlines || newpos > numlines)
+ err = 1;
+
+ /* On error, just free the rest of the list. */
+ if (!err)
+ {
+- /* Here we need to get to the line where the next insert will
++ /* Here we need to get to the line where the next change will
+ begin, which is DF->pos in ORIG_LINES. We will fill up to
+ DF->pos - OFFSET in LINES with original items. */
+- for (deltaend = df->pos - offset;
+- lastmodline < deltaend;
+- lastmodline++)
++ while (lines.nlines < newpos)
+ {
+ /* we need to copy from the orig structure into new one */
+- lines.vector[lastmodline] =
+- orig_lines->vector[lastmodline + offset];
+- lines.vector[lastmodline]->refcount++;
++ lines.vector[lines.nlines] =
++ orig_lines->vector[lines.nlines + offset];
++ lines.vector[lines.nlines]->refcount++;
++ lines.nlines++;
+ }
+
+ switch (df->type)
+@@ -7246,7 +7245,12 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ struct line *q;
+ int nextline_newline;
+ size_t nextline_len;
+-
++
++ if (newpos + df->nlines > numlines)
++ {
++ err = 1;
++ break;
++ }
+ textend = df->new_lines + df->len;
+ nextline_newline = 0;
+ nextline_text = df->new_lines;
+@@ -7271,8 +7275,7 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ q->has_newline = nextline_newline;
+ q->refcount = 1;
+ memcpy (q->text, nextline_text, nextline_len);
+- lines.vector[lastmodline++] = q;
+- offset--;
++ lines.vector[lines.nlines++] = q;
+
+ nextline_text = (char *)p + 1;
+ nextline_newline = 0;
+@@ -7286,11 +7289,11 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ q->has_newline = nextline_newline;
+ q->refcount = 1;
+ memcpy (q->text, nextline_text, nextline_len);
+- lines.vector[lastmodline++] = q;
++ lines.vector[lines.nlines++] = q;
+
+ /* For each line we add the offset between the #'s
+ decreases. */
+- offset--;
++ offset -= df->nlines;
+ break;
+ }
+
+@@ -7301,7 +7304,9 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ if (df->pos + df->nlines > orig_lines->nlines)
+ err = 1;
+ else if (delvers)
++ {
+ for (ln = df->pos; ln < df->pos + df->nlines; ++ln)
++ {
+ if (orig_lines->vector[ln]->refcount > 1)
+ /* Annotate needs this but, since the original
+ * vector is disposed of before returning from
+@@ -7309,6 +7314,8 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ * there are multiple references.
+ */
+ orig_lines->vector[ln]->vers = delvers;
++ }
++ }
+ break;
+ }
+ }
+@@ -7328,21 +7335,20 @@ apply_rcs_changes (orig_lines, diffbuf, difflen, name, addvers, delvers)
+ else
+ {
+ /* add the rest of the remaining lines to the data vector */
+- for (; lastmodline < numlines; lastmodline++)
++ while (lines.nlines < numlines)
+ {
+ /* we need to copy from the orig structure into new one */
+- lines.vector[lastmodline] = orig_lines->vector[lastmodline
++ lines.vector[lines.nlines] = orig_lines->vector[lines.nlines
+ + offset];
+- lines.vector[lastmodline]->refcount++;
++ lines.vector[lines.nlines]->refcount++;
++ lines.nlines++;
+ }
+
+ /* Move the lines vector to the original structure for output,
+ * first deleting the old.
+ */
+ linevector_free (orig_lines);
+- orig_lines->vector = lines.vector;
+- orig_lines->lines_alloced = numlines;
+- orig_lines->nlines = lines.nlines;
++ *orig_lines = lines;
+ }
+
+ return !err;
+--
+1.7.2.3
+
diff --git a/staging/cvs/cvs-1.11.23-getline64.patch b/staging/cvs/cvs-1.11.23-getline64.patch
new file mode 100644
index 000000000..99942e058
--- /dev/null
+++ b/staging/cvs/cvs-1.11.23-getline64.patch
@@ -0,0 +1,34 @@
+--- cvs-1.11.23/lib/getline.c 2005-04-04 22:46:05.000000000 +0200
++++ cvs-1.11.23/lib/getline.c.old 2008-06-03 19:06:25.000000000 +0200
+@@ -154,7 +154,7 @@
+ return ret;
+ }
+
+-int
++ssize_t
+ getline (lineptr, n, stream)
+ char **lineptr;
+ size_t *n;
+@@ -163,7 +163,7 @@
+ return getstr (lineptr, n, stream, '\n', 0, GETLINE_NO_LIMIT);
+ }
+
+-int
++ssize_t
+ getline_safe (lineptr, n, stream, limit)
+ char **lineptr;
+ size_t *n;
+--- cvs-1.11.23/lib/getline.h 2005-04-04 22:46:05.000000000 +0200
++++ cvs-1.11.23/lib/getline.h.old 2008-06-03 19:06:27.000000000 +0200
+@@ -11,9 +11,9 @@
+
+ #define GETLINE_NO_LIMIT -1
+
+-int
++ssize_t
+ getline __PROTO ((char **_lineptr, size_t *_n, FILE *_stream));
+-int
++ssize_t
+ getline_safe __PROTO ((char **_lineptr, size_t *_n, FILE *_stream,
+ int limit));
+ int
diff --git a/staging/cvs/cvs.install b/staging/cvs/cvs.install
new file mode 100644
index 000000000..f1cdd1f3e
--- /dev/null
+++ b/staging/cvs/cvs.install
@@ -0,0 +1,20 @@
+infodir=/usr/share/info
+filelist=(cvs.info cvs-info-1 cvs-info-2 cvsclient.info)
+
+post_install() {
+ for file in ${filelist[@]}; do
+ install-info $infodir/$file $infodir/dir 2> /dev/null
+ done
+}
+
+post_upgrade() {
+ post_install $1
+}
+
+pre_remove() {
+ for file in ${filelist[@]}; do
+ install-info --delete $infodir/$file $infodir/dir 2> /dev/null
+ done
+}
+
+# vim:set ts=2 sw=2 et:
diff --git a/staging/krb5/CVE-2010-4022.patch b/staging/krb5/CVE-2010-4022.patch
new file mode 100644
index 000000000..30ebf9638
--- /dev/null
+++ b/staging/krb5/CVE-2010-4022.patch
@@ -0,0 +1,19 @@
+diff -up krb5/src/slave/kpropd.c krb5/src/slave/kpropd.c
+--- krb5/src/slave/kpropd.c 2010-12-17 11:14:26.000000000 -0500
++++ krb5/src/slave/kpropd.c 2010-12-17 11:41:19.000000000 -0500
+@@ -404,11 +404,11 @@ retry:
+ }
+
+ close(s);
+- if (iproprole == IPROP_SLAVE)
++ if (iproprole == IPROP_SLAVE) {
+ close(finet);
+-
+- if ((ret = WEXITSTATUS(status)) != 0)
+- return (ret);
++ if ((ret = WEXITSTATUS(status)) != 0)
++ return (ret);
++ }
+ }
+ if (iproprole == IPROP_SLAVE)
+ break;
diff --git a/staging/krb5/CVE-2011-0281.0282.0283.patch b/staging/krb5/CVE-2011-0281.0282.0283.patch
new file mode 100644
index 000000000..e4623e910
--- /dev/null
+++ b/staging/krb5/CVE-2011-0281.0282.0283.patch
@@ -0,0 +1,126 @@
+diff --git a/src/kdc/dispatch.c b/src/kdc/dispatch.c
+index 63ff3b3..b4a90bb 100644
+--- a/src/kdc/dispatch.c
++++ b/src/kdc/dispatch.c
+@@ -115,7 +115,8 @@ dispatch(void *cb, struct sockaddr *local_saddr, const krb5_fulladdr *from,
+ kdc_insert_lookaside(pkt, *response);
+ #endif
+
+- if (is_tcp == 0 && (*response)->length > max_dgram_reply_size) {
++ if (is_tcp == 0 && *response != NULL &&
++ (*response)->length > max_dgram_reply_size) {
+ too_big_for_udp:
+ krb5_free_data(kdc_context, *response);
+ retval = make_too_big_error(response);
+diff --git a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
+index d677bb2..a356907 100644
+--- a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
++++ b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap.h
+@@ -102,14 +102,18 @@ extern void prepend_err_str (krb5_context ctx, const char *s, krb5_error_code er
+ #define LDAP_SEARCH(base, scope, filter, attrs) LDAP_SEARCH_1(base, scope, filter, attrs, CHECK_STATUS)
+
+ #define LDAP_SEARCH_1(base, scope, filter, attrs, status_check) \
+- do { \
+- st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, NULL, NULL, &timelimit, LDAP_NO_LIMIT, &result); \
+- if (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR) { \
+- tempst = krb5_ldap_rebind(ldap_context, &ldap_server_handle); \
+- if (ldap_server_handle) \
+- ld = ldap_server_handle->ldap_handle; \
+- } \
+- }while (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR && tempst == 0); \
++ tempst = 0; \
++ st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, NULL, \
++ NULL, &timelimit, LDAP_NO_LIMIT, &result); \
++ if (translate_ldap_error(st, OP_SEARCH) == KRB5_KDB_ACCESS_ERROR) { \
++ tempst = krb5_ldap_rebind(ldap_context, &ldap_server_handle); \
++ if (ldap_server_handle) \
++ ld = ldap_server_handle->ldap_handle; \
++ if (tempst == 0) \
++ st = ldap_search_ext_s(ld, base, scope, filter, attrs, 0, \
++ NULL, NULL, &timelimit, \
++ LDAP_NO_LIMIT, &result); \
++ } \
+ \
+ if (status_check != IGNORE_STATUS) { \
+ if (tempst != 0) { \
+diff --git a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
+index 82b0333..84e80ee 100644
+--- a/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
++++ b/src/plugins/kdb/ldap/libkdb_ldap/kdb_ldap_conn.c
+@@ -302,6 +302,7 @@ krb5_ldap_rebind(krb5_ldap_context *ldap_context,
+ {
+ krb5_ldap_server_handle *handle = *ldap_server_handle;
+
++ ldap_unbind_ext_s(handle->ldap_handle, NULL, NULL);
+ if ((ldap_initialize(&handle->ldap_handle, handle->server_info->server_name) != LDAP_SUCCESS)
+ || (krb5_ldap_bind(ldap_context, handle) != LDAP_SUCCESS))
+ return krb5_ldap_request_next_handle_from_pool(ldap_context, ldap_server_handle);
+diff --git a/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c b/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
+index 86fa4d1..0f49c86 100644
+--- a/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
++++ b/src/plugins/kdb/ldap/libkdb_ldap/ldap_misc.c
+@@ -487,12 +487,11 @@ is_principal_in_realm(krb5_ldap_context *ldap_context,
+ * portion, then the first portion of the principal name SHOULD be
+ * "krbtgt". All this check is done in the immediate block.
+ */
+- if (searchfor->length == 2)
+- if ((strncasecmp(searchfor->data[0].data, "krbtgt",
+- FIND_MAX(searchfor->data[0].length, strlen("krbtgt"))) == 0) &&
+- (strncasecmp(searchfor->data[1].data, defrealm,
+- FIND_MAX(searchfor->data[1].length, defrealmlen)) == 0))
++ if (searchfor->length == 2) {
++ if (data_eq_string(searchfor->data[0], "krbtgt") &&
++ data_eq_string(searchfor->data[1], defrealm))
+ return 0;
++ }
+
+ /* first check the length, if they are not equal, then they are not same */
+ if (strlen(defrealm) != searchfor->realm.length)
+diff --git a/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c b/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
+index 140db1a..552e39a 100644
+--- a/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
++++ b/src/plugins/kdb/ldap/libkdb_ldap/ldap_principal2.c
+@@ -78,10 +78,10 @@ krb5_error_code
+ krb5_ldap_get_principal(krb5_context context, krb5_const_principal searchfor,
+ unsigned int flags, krb5_db_entry **entry_ptr)
+ {
+- char *user=NULL, *filter=NULL, **subtree=NULL;
++ char *user=NULL, *filter=NULL, *filtuser=NULL;
+ unsigned int tree=0, ntrees=1, princlen=0;
+ krb5_error_code tempst=0, st=0;
+- char **values=NULL, *cname=NULL;
++ char **values=NULL, **subtree=NULL, *cname=NULL;
+ LDAP *ld=NULL;
+ LDAPMessage *result=NULL, *ent=NULL;
+ krb5_ldap_context *ldap_context=NULL;
+@@ -115,12 +115,18 @@ krb5_ldap_get_principal(krb5_context context, krb5_const_principal searchfor,
+ if ((st=krb5_ldap_unparse_principal_name(user)) != 0)
+ goto cleanup;
+
+- princlen = strlen(FILTER) + strlen(user) + 2 + 1; /* 2 for closing brackets */
++ filtuser = ldap_filter_correct(user);
++ if (filtuser == NULL) {
++ st = ENOMEM;
++ goto cleanup;
++ }
++
++ princlen = strlen(FILTER) + strlen(filtuser) + 2 + 1; /* 2 for closing brackets */
+ if ((filter = malloc(princlen)) == NULL) {
+ st = ENOMEM;
+ goto cleanup;
+ }
+- snprintf(filter, princlen, FILTER"%s))", user);
++ snprintf(filter, princlen, FILTER"%s))", filtuser);
+
+ if ((st = krb5_get_subtree_info(ldap_context, &subtree, &ntrees)) != 0)
+ goto cleanup;
+@@ -207,6 +213,9 @@ cleanup:
+ if (user)
+ free(user);
+
++ if (filtuser)
++ free(filtuser);
++
+ if (cname)
+ free(cname);
+
diff --git a/staging/krb5/CVE-2011-0284.patch b/staging/krb5/CVE-2011-0284.patch
new file mode 100644
index 000000000..c97727568
--- /dev/null
+++ b/staging/krb5/CVE-2011-0284.patch
@@ -0,0 +1,13 @@
+diff --git a/src/kdc/do_as_req.c b/src/kdc/do_as_req.c
+index 46b5fa1..464cb6e 100644
+--- a/src/kdc/do_as_req.c
++++ b/src/kdc/do_as_req.c
+@@ -741,6 +741,8 @@ prepare_error_as (struct kdc_request_state *rstate, krb5_kdc_req *request,
+ pad->contents = td[size]->data;
+ pad->length = td[size]->length;
+ pa[size] = pad;
++ td[size]->data = NULL;
++ td[size]->length = 0;
+ }
+ krb5_free_typed_data(kdc_context, td);
+ }
diff --git a/staging/krb5/CVE-2011-0285.patch b/staging/krb5/CVE-2011-0285.patch
new file mode 100644
index 000000000..61039113f
--- /dev/null
+++ b/staging/krb5/CVE-2011-0285.patch
@@ -0,0 +1,39 @@
+diff --git a/src/kadmin/server/schpw.c b/src/kadmin/server/schpw.c
+index 1124445..0056885 100644
+--- a/src/kadmin/server/schpw.c
++++ b/src/kadmin/server/schpw.c
+@@ -52,6 +52,7 @@ process_chpw_request(context, server_handle, realm, keytab,
+
+ ret = 0;
+ rep->length = 0;
++ rep->data = NULL;
+
+ auth_context = NULL;
+ changepw = NULL;
+@@ -76,8 +77,13 @@ process_chpw_request(context, server_handle, realm, keytab,
+ plen = (*ptr++ & 0xff);
+ plen = (plen<<8) | (*ptr++ & 0xff);
+
+- if (plen != req->length)
+- return(KRB5KRB_AP_ERR_MODIFIED);
++ if (plen != req->length) {
++ ret = KRB5KRB_AP_ERR_MODIFIED;
++ numresult = KRB5_KPASSWD_MALFORMED;
++ strlcpy(strresult, "Request length was inconsistent",
++ sizeof(strresult));
++ goto chpwfail;
++ }
+
+ /* verify version number */
+
+@@ -531,6 +537,10 @@ cleanup:
+ if (local_kaddrs != NULL)
+ krb5_free_addresses(server_handle->context, local_kaddrs);
+
++ if ((*response)->data == NULL) {
++ free(*response);
++ *response = NULL;
++ }
+ krb5_kt_close(server_handle->context, kt);
+
+ return ret;
diff --git a/staging/krb5/PKGBUILD b/staging/krb5/PKGBUILD
new file mode 100644
index 000000000..ace8000d4
--- /dev/null
+++ b/staging/krb5/PKGBUILD
@@ -0,0 +1,77 @@
+# $Id: PKGBUILD 121067 2011-04-28 21:24:40Z stephane $
+# Maintainer: Stéphane Gaudreault <stephane@archlinux.org>
+
+pkgname=krb5
+pkgver=1.9
+pkgrel=2
+pkgdesc="The Kerberos network authentication system"
+arch=('i686' 'x86_64')
+url="http://web.mit.edu/kerberos/"
+license=('custom')
+depends=('e2fsprogs' 'libldap' 'keyutils')
+makedepends=('perl')
+provides=('heimdal')
+replaces=('heimdal')
+conflicts=('heimdal')
+backup=('etc/krb5/krb5.conf' 'etc/krb5/kdc.conf')
+source=(http://web.mit.edu/kerberos/dist/${pkgname}/${pkgver}/${pkgname}-${pkgver}-signed.tar
+ kadmind.rc
+ krb5-kdc.rc
+ CVE-2010-4022.patch
+ CVE-2011-0281.0282.0283.patch
+ CVE-2011-0284.patch
+ CVE-2011-0285.patch)
+sha1sums=('a7ad1b4ed37bff4b9087f6c4561b2b222208d779'
+ '640e3046c6558313d2be81cf2252afc8622892b0'
+ '77d2312ecd8bf12a6e72cc8fd871a8ac93b23393'
+ '79ece8b1c140deb2c01bfb64af575636b9bc7704'
+ 'fb2486168ce128cb1a2866bd0df8cd7c4bcd7824'
+ '1c72390c5d629eee592e5cb0c2b600b376e2fdc5'
+ 'b6ae716616ecd5e92f32ec8203a1ab51b5726184')
+options=('!emptydirs')
+
+build() {
+ tar zxvf ${pkgname}-${pkgver}.tar.gz
+ cd "${srcdir}/${pkgname}-${pkgver}/src"
+
+ patch -Np2 -i ../../CVE-2010-4022.patch
+ patch -Np2 -i ../../CVE-2011-0281.0282.0283.patch
+ patch -Np2 -i ../../CVE-2011-0284.patch
+ patch -Np2 -i ../../CVE-2011-0285.patch
+
+ export CFLAGS+=" -fPIC -fno-strict-aliasing -fstack-protector-all"
+ export CPPFLAGS+=" -I/usr/include/et"
+ ./configure --prefix=/usr \
+ --sysconfdir=/etc/krb5 \
+ --mandir=/usr/share/man \
+ --localstatedir=/var/lib \
+ --enable-shared \
+ --with-system-et \
+ --with-system-ss \
+ --disable-rpath \
+ --without-tcl \
+ --enable-dns-for-realm \
+ --with-ldap
+
+ make
+}
+
+check() {
+ # We can't do this in the build directory.
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ make -C src check
+}
+
+package() {
+ cd "${srcdir}/${pkgname}-${pkgver}/src"
+ make DESTDIR="${pkgdir}" EXAMPLEDIR="/usr/share/doc/${pkgname}/examples" install
+
+ install -D -m 644 config-files/kdc.conf "${pkgdir}"/etc/krb5/kdc.conf
+ install -D -m 644 config-files/krb5.conf "${pkgdir}"/etc/krb5/krb5.conf
+
+ install -d -m 755 "${pkgdir}"/etc/rc.d
+ install -m 755 ../../krb5-kdc.rc "${pkgdir}"/etc/rc.d
+ install -m 755 ../../kadmind.rc "${pkgdir}"/etc/rc.d
+
+ install -Dm644 "${srcdir}"/${pkgname}-${pkgver}/NOTICE "${pkgdir}"/usr/share/licenses/${pkgname}/LICENSE
+}
diff --git a/staging/krb5/kadmind.rc b/staging/krb5/kadmind.rc
new file mode 100644
index 000000000..45835e35b
--- /dev/null
+++ b/staging/krb5/kadmind.rc
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+# general config
+. /etc/rc.conf
+. /etc/rc.d/functions
+
+PID=`pidof -o %PPID /usr/sbin/kadmind`
+case "$1" in
+ start)
+ stat_busy "Starting Kerberos Admin Daemon"
+ if [ -z "$PID" ]; then
+ /usr/sbin/kadmind
+ fi
+ if [ ! -z "$PID" -o $? -gt 0 ]; then
+ stat_fail
+ else
+ add_daemon kadmind
+ stat_done
+ fi
+ ;;
+ stop)
+ stat_busy "Stopping Kerberos Admin Daemon"
+ [ ! -z "$PID" ] && kill $PID &> /dev/null
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ rm_daemon kadmind
+ stat_done
+ fi
+ ;;
+ restart)
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "usage: $0 {start|stop|restart}"
+ ;;
+esac
+exit 0
diff --git a/staging/krb5/krb5-kdc.rc b/staging/krb5/krb5-kdc.rc
new file mode 100644
index 000000000..05a03411e
--- /dev/null
+++ b/staging/krb5/krb5-kdc.rc
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+# general config
+. /etc/rc.conf
+. /etc/rc.d/functions
+
+PID=`pidof -o %PPID /usr/sbin/krb5kdc`
+case "$1" in
+ start)
+ stat_busy "Starting Kerberos Authentication"
+ if [ -z "$PID" ]; then
+ /usr/sbin/krb5kdc
+ fi
+ if [ ! -z "$PID" -o $? -gt 0 ]; then
+ stat_fail
+ else
+ add_daemon krb5-kdc
+ stat_done
+ fi
+ ;;
+ stop)
+ stat_busy "Stopping Kerberos Authentication"
+ [ ! -z "$PID" ] && kill $PID &> /dev/null
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ rm_daemon krb5-kdc
+ stat_done
+ fi
+ ;;
+ restart)
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "usage: $0 {start|stop|restart}"
+ ;;
+esac
+exit 0
diff --git a/staging/mutt/PKGBUILD b/staging/mutt/PKGBUILD
new file mode 100644
index 000000000..9a940857c
--- /dev/null
+++ b/staging/mutt/PKGBUILD
@@ -0,0 +1,47 @@
+# $Id: PKGBUILD 121107 2011-04-29 00:34:05Z stephane $
+# Contributor: tobias [tobias [at] archlinux.org]
+# Maintainer: Gaetan Bisson <bisson@archlinux.org>
+
+pkgname=mutt
+pkgver=1.5.21
+pkgrel=5
+pkgdesc='Small but very powerful text-based mail client'
+url='http://www.mutt.org/'
+license=('GPL')
+backup=('etc/Muttrc')
+arch=('i686' 'x86_64')
+depends=('gpgme' 'ncurses' 'openssl' 'libsasl' 'gdbm' 'libidn' 'mime-types' 'krb5')
+source=("ftp://ftp.mutt.org/mutt/devel/${pkgname}-${pkgver}.tar.gz")
+sha1sums=('a8475f2618ce5d5d33bff85c0affdf21ab1d76b9')
+
+install=install
+
+build() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --enable-gpgme \
+ --enable-pop \
+ --enable-imap \
+ --enable-smtp \
+ --enable-hcache \
+ --with-curses=/usr \
+ --with-regex \
+ --with-gss=/usr \
+ --with-ssl=/usr \
+ --with-sasl \
+ --with-idn \
+
+ make
+}
+
+package() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ make DESTDIR="${pkgdir}" install
+
+ rm "${pkgdir}"/usr/bin/{flea,muttbug}
+ rm "${pkgdir}"/usr/share/man/man1/{flea,muttbug}.1
+ rm "${pkgdir}"/etc/mime.types{,.dist}
+ install -Dm644 contrib/gpg.rc "${pkgdir}"/etc/Muttrc.gpg.dist
+}
diff --git a/staging/mutt/install b/staging/mutt/install
new file mode 100644
index 000000000..d65675c06
--- /dev/null
+++ b/staging/mutt/install
@@ -0,0 +1,8 @@
+post_install() {
+ cat <<EOF
+
+==> For GPG support, add the following to your muttrc:
+==> source /etc/Muttrc.gpg.dist
+
+EOF
+}
diff --git a/staging/neon/PKGBUILD b/staging/neon/PKGBUILD
new file mode 100644
index 000000000..c0fe650cf
--- /dev/null
+++ b/staging/neon/PKGBUILD
@@ -0,0 +1,29 @@
+# $Id: PKGBUILD 121086 2011-04-29 00:26:05Z stephane $
+# Contributor: Tom Newsom <Jeepster@gmx.co.uk>
+# Maintainer: Juergen Hoetzel <juergen@archlinux.org>
+
+# KEEP LIBTOOL FILES!
+pkgname=neon
+pkgver=0.29.3
+pkgrel=3
+pkgdesc="HTTP and WebDAV client library with a C interface"
+arch=('i686' 'x86_64')
+license=('GPL' 'LGPL')
+depends=('krb5' 'expat' 'ca-certificates')
+url="http://www.webdav.org/neon/"
+source=("http://www.webdav.org/neon/${pkgname}-${pkgver}.tar.gz")
+md5sums=('ba1015b59c112d44d7797b62fe7bee51')
+options=('libtool')
+
+build() {
+ cd ${srcdir}/${pkgname}-${pkgver}
+ ./configure --prefix=/usr \
+ --with-expat --enable-shared --disable-static \
+ --with-ssl=openssl --with-ca-bundle=/etc/ssl/certs/ca-certificates.crt
+ make
+}
+
+package() {
+ cd ${srcdir}/${pkgname}-${pkgver}
+ make DESTDIR=${pkgdir} install
+}
diff --git a/staging/openssh/PKGBUILD b/staging/openssh/PKGBUILD
new file mode 100644
index 000000000..42443afc2
--- /dev/null
+++ b/staging/openssh/PKGBUILD
@@ -0,0 +1,55 @@
+# $Id: PKGBUILD 121075 2011-04-28 23:41:59Z stephane $
+# Maintainer: Aaron Griffin <aaron@archlinux.org>
+# Contributor: judd <jvinet@zeroflux.org>
+
+pkgname=openssh
+pkgver=5.8p1
+pkgrel=2
+pkgdesc='Free version of the SSH connectivity tools'
+arch=('i686' 'x86_64')
+license=('custom:BSD')
+url='http://www.openssh.org/portable.html'
+backup=('etc/ssh/ssh_config' 'etc/ssh/sshd_config' 'etc/pam.d/sshd' 'etc/conf.d/sshd')
+depends=('tcp_wrappers' 'krb5' 'libedit')
+source=("ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/${pkgname}-${pkgver}.tar.gz"
+ 'sshd.confd'
+ 'sshd.pam'
+ 'sshd')
+sha1sums=('adebb2faa9aba2a3a3c8b401b2b19677ab53f0de'
+ 'ec102deb69cad7d14f406289d2fc11fee6eddbdd'
+ '660092c57bde28bed82078f74011f95fc51c2293'
+ '6b7f8ebf0c1cc37137a7d9a53447ac8a0ee6a2b5')
+
+build() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+
+ ./configure --prefix=/usr --libexecdir=/usr/lib/ssh \
+ --sysconfdir=/etc/ssh --with-tcp-wrappers --with-privsep-user=nobody \
+ --with-md5-passwords --with-pam --with-mantype=man --mandir=/usr/share/man \
+ --with-xauth=/usr/bin/xauth --with-kerberos5=/usr --with-ssl-engine \
+ --with-libedit=/usr/lib --disable-strip # stripping is done by makepkg
+ make
+}
+
+package() {
+ cd "${srcdir}/${pkgname}-${pkgver}"
+ make DESTDIR="${pkgdir}" install
+
+ install -Dm755 ../sshd "${pkgdir}"/etc/rc.d/sshd
+ install -Dm644 ../sshd.pam "${pkgdir}"/etc/pam.d/sshd
+ install -Dm644 ../sshd.confd "${pkgdir}"/etc/conf.d/sshd
+ install -Dm644 LICENCE "${pkgdir}/usr/share/licenses/${pkgname}/LICENCE"
+
+ rm "${pkgdir}"/usr/share/man/man1/slogin.1
+ ln -sf ssh.1.gz "${pkgdir}"/usr/share/man/man1/slogin.1.gz
+
+ # additional contrib scripts that we like
+ install -Dm755 contrib/findssl.sh "${pkgdir}"/usr/bin/findssl.sh
+ install -Dm755 contrib/ssh-copy-id "${pkgdir}"/usr/bin/ssh-copy-id
+ install -Dm644 contrib/ssh-copy-id.1 "${pkgdir}"/usr/share/man/man1/ssh-copy-id.1
+
+ # PAM is a common, standard feature to have
+ sed -i -e '/^#ChallengeResponseAuthentication yes$/c ChallengeResponseAuthentication no' \
+ -e '/^#UsePAM no$/c UsePAM yes' \
+ "$pkgdir"/etc/ssh/sshd_config
+}
diff --git a/staging/openssh/sshd b/staging/openssh/sshd
new file mode 100755
index 000000000..2ee1091f0
--- /dev/null
+++ b/staging/openssh/sshd
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+. /etc/rc.conf
+. /etc/rc.d/functions
+. /etc/conf.d/sshd
+
+PIDFILE=/var/run/sshd.pid
+PID=$(cat $PIDFILE 2>/dev/null)
+if ! readlink -q /proc/$PID/exe | grep -q '^/usr/sbin/sshd'; then
+ PID=
+ rm $PIDFILE 2>/dev/null
+fi
+
+case "$1" in
+ start)
+ stat_busy "Starting Secure Shell Daemon"
+ [ -f /etc/ssh/ssh_host_key ] || { /usr/bin/ssh-keygen -t rsa1 -N "" -f /etc/ssh/ssh_host_key >/dev/null; }
+ [ -f /etc/ssh/ssh_host_rsa_key ] || { /usr/bin/ssh-keygen -t rsa -N "" -f /etc/ssh/ssh_host_rsa_key >/dev/null; }
+ [ -f /etc/ssh/ssh_host_dsa_key ] || { /usr/bin/ssh-keygen -t dsa -N "" -f /etc/ssh/ssh_host_dsa_key >/dev/null; }
+ [ -f /etc/ssh/ssh_host_ecdsa_key ] || { /usr/bin/ssh-keygen -t ecdsa -N "" -f /etc/ssh/ssh_host_ecdsa_key >/dev/null; }
+ [ -d /var/empty ] || mkdir -p /var/empty
+ [ -z "$PID" ] && /usr/sbin/sshd $SSHD_ARGS
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ add_daemon sshd
+ stat_done
+ fi
+ ;;
+ stop)
+ stat_busy "Stopping Secure Shell Daemon"
+ [ ! -z "$PID" ] && kill $PID &> /dev/null
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ rm_daemon sshd
+ stat_done
+ fi
+ ;;
+ restart)
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "usage: $0 {start|stop|restart}"
+esac
+exit 0
diff --git a/staging/openssh/sshd.confd b/staging/openssh/sshd.confd
new file mode 100644
index 000000000..5ce7c0079
--- /dev/null
+++ b/staging/openssh/sshd.confd
@@ -0,0 +1,4 @@
+#
+# Parameters to be passed to sshd
+#
+SSHD_ARGS=""
diff --git a/staging/openssh/sshd.pam b/staging/openssh/sshd.pam
new file mode 100644
index 000000000..ae028ceb5
--- /dev/null
+++ b/staging/openssh/sshd.pam
@@ -0,0 +1,10 @@
+#%PAM-1.0
+#auth required pam_securetty.so #Disable remote root
+auth required pam_unix.so
+auth required pam_env.so
+account required pam_nologin.so
+account required pam_unix.so
+account required pam_time.so
+password required pam_unix.so
+session required pam_unix_session.so
+session required pam_limits.so
diff --git a/staging/subversion/PKGBUILD b/staging/subversion/PKGBUILD
new file mode 100644
index 000000000..0f9c0a70a
--- /dev/null
+++ b/staging/subversion/PKGBUILD
@@ -0,0 +1,92 @@
+# $Id: PKGBUILD 121110 2011-04-29 01:29:30Z stephane $
+# Maintainer: Paul Mattal <paul@archlinux.org>
+# Contributor: Jason Chu <jason@archlinux.org>
+
+pkgname=subversion
+pkgver=1.6.15
+pkgrel=2
+pkgdesc="Replacement for CVS, another versioning system (SVN)"
+arch=('i686' 'x86_64')
+license=('apache' 'bsd')
+depends=('neon' 'apr-util')
+makedepends=('krb5' 'apache' 'python2' 'perl' 'swig' 'ruby' 'java-runtime'
+ 'autoconf' 'sqlite3' 'db' 'e2fsprogs' 'libgnome-keyring' 'kdelibs')
+source=(http://subversion.tigris.org/downloads/$pkgname-$pkgver.tar.bz2
+ svnserve svn svnserve.conf svnmerge.py
+ subversion.rpath.fix.patch
+ subversion.suppress.deprecation.warnings.patch)
+
+backup=('etc/xinetd.d/svn' 'etc/conf.d/svnserve')
+url="http://subversion.apache.org/"
+provides=('svn')
+options=('!makeflags' '!libtool')
+optdepends=('libgnome-keyring' 'kdeutils-kwallet' 'bash-completion: for svn bash completion')
+
+build() {
+ cd ${srcdir}/${pkgname}-${pkgver}
+
+ export PYTHON=/usr/bin/python2
+
+ # apply patches
+ patch -p0 < $srcdir/subversion.rpath.fix.patch
+ patch -p1 -i $srcdir/subversion.suppress.deprecation.warnings.patch
+
+ # configure
+ autoreconf
+ ./configure --prefix=/usr --with-apr=/usr --with-apr-util=/usr \
+ --with-zlib=/usr --with-neon=/usr --with-apxs \
+ --with-sqlite=/usr --with-berkeley-db=:/usr/include/:/usr/lib:db-5.1 \
+ --enable-javahl --with-gnome-keyring --with-kwallet
+
+ # build
+ (make external-all && make LT_LDFLAGS="-L$Fdestdir/usr/lib" local-all )
+}
+
+package() {
+ cd ${srcdir}/${pkgname}-${pkgver}
+
+ # install
+ export LD_LIBRARY_PATH=${pkgdir}/usr/lib:$LD_LIBRARY_PATH
+ make DESTDIR=${pkgdir} install
+
+ make DESTDIR=${pkgdir} swig-py
+ make install-swig-py DESTDIR=${pkgdir}
+
+ install -d ${pkgdir}/usr/lib/python2.7
+ mv ${pkgdir}/usr/lib/svn-python/ ${pkgdir}/usr/lib/python2.7/site-packages
+
+ install -d ${pkgdir}/usr/share/subversion
+ install -d -m 755 tools/hook-scripts ${pkgdir}/usr/share/subversion/
+ rm -f ${pkgdir}/usr/share/subversion/hook-scripts/*.in
+
+ make DESTDIR=${pkgdir} swig-pl
+ make install-swig-pl DESTDIR=${pkgdir} INSTALLDIRS=vendor
+ rm -f ${pkgdir}/usr/lib/perl5/vendor_perl/auto/SVN/_Core/.packlist
+ rm -rf ${pkgdir}/usr/lib/perl5/core_perl
+
+ make DESTDIR=${pkgdir} swig-rb
+ make install-swig-rb DESTDIR=${pkgdir}
+
+ make DESTDIR=${pkgdir} javahl
+ make DESTDIR=${pkgdir} install-javahl
+
+ install -d ${pkgdir}/etc/{rc.d,xinetd.d,conf.d}
+
+ install -m 755 ${srcdir}/svnserve ${pkgdir}/etc/rc.d
+ install -m 644 ${srcdir}/svn ${pkgdir}/etc/xinetd.d
+ install -m 644 ${srcdir}/svnserve.conf ${pkgdir}/etc/conf.d/svnserve
+ install -m 755 ${srcdir}/svnmerge.py ${pkgdir}/usr/bin/svnmerge
+ install -D -m 644 ${srcdir}/subversion-$pkgver/COPYING \
+ ${pkgdir}/usr/share/licenses/$pkgname/LICENSE
+
+ # bash completion
+ install -Dm 644 ${srcdir}/${pkgname}-${pkgver}/tools/client-side/bash_completion \
+ ${pkgdir}/etc/bash_completion.d/subversion
+}
+md5sums=('113fca1d9e4aa389d7dc2b210010fa69'
+ 'a2b029e8385007ffb99b437b30521c90'
+ 'a0db6dd43af33952739b6ec089852630'
+ 'c459e299192552f61578f3438abf0664'
+ 'a6371baeda7e224504629ecdda2749b4'
+ '6b4340ba9d8845cd8497e013ae01be3f'
+ '1166f3b7413d7e7450299b3525680bbe')
diff --git a/staging/subversion/subversion.rpath.fix.patch b/staging/subversion/subversion.rpath.fix.patch
new file mode 100644
index 000000000..ba6ee9e4e
--- /dev/null
+++ b/staging/subversion/subversion.rpath.fix.patch
@@ -0,0 +1,10 @@
+--- Makefile.in.orig 2009-02-16 14:10:48.000000000 -0200
++++ Makefile.in 2009-06-04 00:56:29.000000000 -0300
+@@ -678,6 +678,7 @@
+
+ $(SWIG_PL_DIR)/native/Makefile: $(SWIG_PL_DIR)/native/Makefile.PL
+ cd $(SWIG_PL_DIR)/native; $(PERL) Makefile.PL
++ cd $(SWIG_PL_DIR)/native; sed -i 's|LD_RUN_PATH|DIE_RPATH_DIE|g' Makefile{,.{client,delta,fs,ra,repos,wc}}
+
+ swig-pl_DEPS = autogen-swig-pl libsvn_swig_perl \
+ $(SWIG_PL_DIR)/native/Makefile
diff --git a/staging/subversion/subversion.suppress.deprecation.warnings.patch b/staging/subversion/subversion.suppress.deprecation.warnings.patch
new file mode 100644
index 000000000..94ce89b18
--- /dev/null
+++ b/staging/subversion/subversion.suppress.deprecation.warnings.patch
@@ -0,0 +1,22 @@
+diff -urN subversion-1.6.9/subversion/bindings/swig/python/svn/core.py subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py
+--- subversion-1.6.9/subversion/bindings/swig/python/svn/core.py 2009-02-13 11:22:26.000000000 -0500
++++ subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py 2010-02-08 07:46:29.000000000 -0500
+@@ -19,6 +19,7 @@
+ from libsvn.core import *
+ import libsvn.core as _libsvncore
+ import atexit as _atexit
++import warnings
+
+ class SubversionException(Exception):
+ def __init__(self, message=None, apr_err=None, child=None,
+@@ -44,7 +45,9 @@
+ Exception.__init__(self, *args)
+
+ self.apr_err = apr_err
+- self.message = message
++ with warnings.catch_warnings():
++ warnings.simplefilter("ignore", DeprecationWarning)
++ self.message = message
+ self.child = child
+ self.file = file
+ self.line = line
diff --git a/staging/subversion/svn b/staging/subversion/svn
new file mode 100644
index 000000000..8988aaf63
--- /dev/null
+++ b/staging/subversion/svn
@@ -0,0 +1,11 @@
+service svn
+{
+ flags = REUSE
+ socket_type = stream
+ wait = no
+ user = root
+ server = /usr/bin/svnserve
+ server_args = -i
+ log_on_failure += USERID
+ disable = yes
+}
diff --git a/staging/subversion/svnmerge.py b/staging/subversion/svnmerge.py
new file mode 100644
index 000000000..d8931648f
--- /dev/null
+++ b/staging/subversion/svnmerge.py
@@ -0,0 +1,2370 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005, Giovanni Bajo
+# Copyright (c) 2004-2005, Awarix, Inc.
+# All rights reserved.
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
+#
+# Author: Archie Cobbs <archie at awarix dot com>
+# Rewritten in Python by: Giovanni Bajo <rasky at develer dot com>
+#
+# Acknowledgments:
+# John Belmonte <john at neggie dot net> - metadata and usability
+# improvements
+# Blair Zajac <blair at orcaware dot com> - random improvements
+# Raman Gupta <rocketraman at fastmail dot fm> - bidirectional and transitive
+# merging support
+# Dustin J. Mitchell <dustin at zmanda dot com> - support for multiple
+# location identifier formats
+#
+# $HeadURL$
+# $LastChangedDate$
+# $LastChangedBy$
+# $LastChangedRevision$
+#
+# Requisites:
+# svnmerge.py has been tested with all SVN major versions since 1.1 (both
+# client and server). It is unknown if it works with previous versions.
+#
+# Differences from svnmerge.sh:
+# - More portable: tested as working in FreeBSD and OS/2.
+# - Add double-verbose mode, which shows every svn command executed (-v -v).
+# - "svnmerge avail" now only shows commits in source, not also commits in
+# other parts of the repository.
+# - Add "svnmerge block" to flag some revisions as blocked, so that
+# they will not show up anymore in the available list. Added also
+# the complementary "svnmerge unblock".
+# - "svnmerge avail" has grown two new options:
+# -B to display a list of the blocked revisions
+# -A to display both the blocked and the available revisions.
+# - Improved generated commit message to make it machine parsable even when
+# merging commits which are themselves merges.
+# - Add --force option to skip working copy check
+# - Add --record-only option to "svnmerge merge" to avoid performing
+# an actual merge, yet record that a merge happened.
+# - Can use a variety of location-identifier formats
+#
+# TODO:
+# - Add "svnmerge avail -R": show logs in reverse order
+#
+# Information for Hackers:
+#
+# Identifiers for branches:
+# A branch is identified in three ways within this source:
+# - as a working copy (variable name usually includes 'dir')
+# - as a fully qualified URL
+# - as a path identifier (an opaque string indicating a particular path
+# in a particular repository; variable name includes 'pathid')
+# A "target" is generally user-specified, and may be a working copy or
+# a URL.
+
+import sys, os, getopt, re, types, tempfile, time, locale
+from bisect import bisect
+from xml.dom import pulldom
+
+NAME = "svnmerge"
+if not hasattr(sys, "version_info") or sys.version_info < (2, 0):
+ error("requires Python 2.0 or newer")
+
+# Set up the separator used to separate individual log messages from
+# each revision merged into the target location. Also, create a
+# regular expression that will find this same separator in already
+# committed log messages, so that the separator used for this run of
+# svnmerge.py will have one more LOG_SEPARATOR appended to the longest
+# separator found in all the commits.
+LOG_SEPARATOR = 8 * '.'
+LOG_SEPARATOR_RE = re.compile('^((%s)+)' % re.escape(LOG_SEPARATOR),
+ re.MULTILINE)
+
+# Each line of the embedded log messages will be prefixed by LOG_LINE_PREFIX.
+LOG_LINE_PREFIX = 2 * ' '
+
+# Set python to the default locale as per environment settings, same as svn
+# TODO we should really parse config and if log-encoding is specified, set
+# the locale to match that encoding
+locale.setlocale(locale.LC_ALL, '')
+
+# We want the svn output (such as svn info) to be non-localized
+# Using LC_MESSAGES should not affect localized output of svn log, for example
+if os.environ.has_key("LC_ALL"):
+ del os.environ["LC_ALL"]
+os.environ["LC_MESSAGES"] = "C"
+
+###############################################################################
+# Support for older Python versions
+###############################################################################
+
+# True/False constants are Python 2.2+
+try:
+ True, False
+except NameError:
+ True, False = 1, 0
+
+def lstrip(s, ch):
+ """Replacement for str.lstrip (support for arbitrary chars to strip was
+ added in Python 2.2.2)."""
+ i = 0
+ try:
+ while s[i] == ch:
+ i = i+1
+ return s[i:]
+ except IndexError:
+ return ""
+
+def rstrip(s, ch):
+ """Replacement for str.rstrip (support for arbitrary chars to strip was
+ added in Python 2.2.2)."""
+ try:
+ if s[-1] != ch:
+ return s
+ i = -2
+ while s[i] == ch:
+ i = i-1
+ return s[:i+1]
+ except IndexError:
+ return ""
+
+def strip(s, ch):
+ """Replacement for str.strip (support for arbitrary chars to strip was
+ added in Python 2.2.2)."""
+ return lstrip(rstrip(s, ch), ch)
+
+def rsplit(s, sep, maxsplits=0):
+ """Like str.rsplit, which is Python 2.4+ only."""
+ L = s.split(sep)
+ if not 0 < maxsplits <= len(L):
+ return L
+ return [sep.join(L[0:-maxsplits])] + L[-maxsplits:]
+
+###############################################################################
+
+def kwextract(s):
+ """Extract info from a svn keyword string."""
+ try:
+ return strip(s, "$").strip().split(": ")[1]
+ except IndexError:
+ return "<unknown>"
+
+__revision__ = kwextract('$Rev$')
+__date__ = kwextract('$Date$')
+
+# Additional options, not (yet?) mapped to command line flags
+default_opts = {
+ "svn": "svn",
+ "prop": NAME + "-integrated",
+ "block-prop": NAME + "-blocked",
+ "commit-verbose": True,
+ "verbose": 0,
+}
+logs = {}
+
+def console_width():
+ """Get the width of the console screen (if any)."""
+ try:
+ return int(os.environ["COLUMNS"])
+ except (KeyError, ValueError):
+ pass
+
+ try:
+ # Call the Windows API (requires ctypes library)
+ from ctypes import windll, create_string_buffer
+ h = windll.kernel32.GetStdHandle(-11)
+ csbi = create_string_buffer(22)
+ res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
+ if res:
+ import struct
+ (bufx, bufy,
+ curx, cury, wattr,
+ left, top, right, bottom,
+ maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
+ return right - left + 1
+ except ImportError:
+ pass
+
+ # Parse the output of stty -a
+ if os.isatty(1):
+ out = os.popen("stty -a").read()
+ m = re.search(r"columns (\d+);", out)
+ if m:
+ return int(m.group(1))
+
+ # sensible default
+ return 80
+
+def error(s):
+ """Subroutine to output an error and bail."""
+ print >> sys.stderr, "%s: %s" % (NAME, s)
+ sys.exit(1)
+
+def report(s):
+ """Subroutine to output progress message, unless in quiet mode."""
+ if opts["verbose"]:
+ print "%s: %s" % (NAME, s)
+
+def prefix_lines(prefix, lines):
+ """Given a string representing one or more lines of text, insert the
+ specified prefix at the beginning of each line, and return the result.
+ The input must be terminated by a newline."""
+ assert lines[-1] == "\n"
+ return prefix + lines[:-1].replace("\n", "\n"+prefix) + "\n"
+
+def recode_stdout_to_file(s):
+ if locale.getdefaultlocale()[1] is None or not hasattr(sys.stdout, "encoding") \
+ or sys.stdout.encoding is None:
+ return s
+ u = s.decode(sys.stdout.encoding)
+ return u.encode(locale.getdefaultlocale()[1])
+
+class LaunchError(Exception):
+ """Signal a failure in execution of an external command. Parameters are the
+ exit code of the process, the original command line, and the output of the
+ command."""
+
+try:
+ """Launch a sub-process. Return its output (both stdout and stderr),
+ optionally split by lines (if split_lines is True). Raise a LaunchError
+ exception if the exit code of the process is non-zero (failure).
+
+ This function has two implementations, one based on subprocess (preferred),
+ and one based on popen (for compatibility).
+ """
+ import subprocess
+ import shlex
+
+ def launch(cmd, split_lines=True):
+ # Requiring python 2.4 or higher, on some platforms we get
+ # much faster performance from the subprocess module (where python
+ # doesn't try to close an exhorbitant number of file descriptors)
+ stdout = ""
+ stderr = ""
+ try:
+ if os.name == 'nt':
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, \
+ close_fds=False, stderr=subprocess.PIPE)
+ else:
+ # Use shlex to break up the parameters intelligently,
+ # respecting quotes. shlex can't handle unicode.
+ args = shlex.split(cmd.encode('ascii'))
+ p = subprocess.Popen(args, stdout=subprocess.PIPE, \
+ close_fds=False, stderr=subprocess.PIPE)
+ stdoutAndErr = p.communicate()
+ stdout = stdoutAndErr[0]
+ stderr = stdoutAndErr[1]
+ except OSError, inst:
+ # Using 1 as failure code; should get actual number somehow? For
+ # examples see svnmerge_test.py's TestCase_launch.test_failure and
+ # TestCase_launch.test_failurecode.
+ raise LaunchError(1, cmd, stdout + " " + stderr + ": " + str(inst))
+
+ if p.returncode == 0:
+ if split_lines:
+ # Setting keepends=True for compatibility with previous logic
+ # (where file.readlines() preserves newlines)
+ return stdout.splitlines(True)
+ else:
+ return stdout
+ else:
+ raise LaunchError(p.returncode, cmd, stdout + stderr)
+except ImportError:
+ # support versions of python before 2.4 (slower on some systems)
+ def launch(cmd, split_lines=True):
+ if os.name not in ['nt', 'os2']:
+ import popen2
+ p = popen2.Popen4(cmd)
+ p.tochild.close()
+ if split_lines:
+ out = p.fromchild.readlines()
+ else:
+ out = p.fromchild.read()
+ ret = p.wait()
+ if ret == 0:
+ ret = None
+ else:
+ ret >>= 8
+ else:
+ i,k = os.popen4(cmd)
+ i.close()
+ if split_lines:
+ out = k.readlines()
+ else:
+ out = k.read()
+ ret = k.close()
+
+ if ret is None:
+ return out
+ raise LaunchError(ret, cmd, out)
+
+def launchsvn(s, show=False, pretend=False, **kwargs):
+ """Launch SVN and grab its output."""
+ username = password = configdir = ""
+ if opts.get("username", None):
+ username = "--username=" + opts["username"]
+ if opts.get("password", None):
+ password = "--password=" + opts["password"]
+ if opts.get("config-dir", None):
+ configdir = "--config-dir=" + opts["config-dir"]
+ cmd = ' '.join(filter(None, [opts["svn"], "--non-interactive",
+ username, password, configdir, s]))
+ if show or opts["verbose"] >= 2:
+ print cmd
+ if pretend:
+ return None
+ return launch(cmd, **kwargs)
+
+def svn_command(s):
+ """Do (or pretend to do) an SVN command."""
+ out = launchsvn(s, show=opts["show-changes"] or opts["dry-run"],
+ pretend=opts["dry-run"],
+ split_lines=False)
+ if not opts["dry-run"]:
+ print out
+
+def check_dir_clean(dir):
+ """Check the current status of dir for local mods."""
+ if opts["force"]:
+ report('skipping status check because of --force')
+ return
+ report('checking status of "%s"' % dir)
+
+ # Checking with -q does not show unversioned files or external
+ # directories. Though it displays a debug message for external
+ # directories, after a blank line. So, practically, the first line
+ # matters: if it's non-empty there is a modification.
+ out = launchsvn("status -q %s" % dir)
+ if out and out[0].strip():
+ error('"%s" has local modifications; it must be clean' % dir)
+
+class PathIdentifier:
+ """Abstraction for a path identifier, so that we can start talking
+ about it before we know the form that it takes in the properties (its
+ external_form). Objects are referenced in the class variable 'locobjs',
+ keyed by all known forms."""
+
+ # a map of UUID (or None) to repository root URL.
+ repo_hints = {}
+
+ # a map from any known string form to the corresponding PathIdentifier
+ locobjs = {}
+
+ def __init__(self, repo_relative_path, uuid=None, url=None, external_form=None):
+ self.repo_relative_path = repo_relative_path
+ self.uuid = uuid
+ self.url = url
+ self.external_form = external_form
+
+ def __repr__(self):
+ return "<PathIdentifier " + ', '.join('%s=%r' % i for i in self.__dict__.items()) + '>'
+
+ def __str__(self):
+ """Return a printable string representation"""
+ if self.external_form:
+ return self.external_form
+ if self.url:
+ return self.format('url')
+ if self.uuid:
+ return self.format('uuid')
+ return self.format('path')
+
+ def from_pathid(pathid_str):
+ """convert pathid_str to a PathIdentifier"""
+ if not PathIdentifier.locobjs.has_key(pathid_str):
+ if is_url(pathid_str):
+ # we can determine every form; PathIdentifier.hint knows how to do that
+ PathIdentifier.hint(pathid_str)
+ elif pathid_str[:7] == 'uuid://':
+ mo = re.match('uuid://([^/]*)(.*)', pathid_str)
+ if not mo:
+ error("Invalid path identifier '%s'" % pathid_str)
+ uuid, repo_relative_path = mo.groups()
+ pathid = PathIdentifier(repo_relative_path, uuid=uuid)
+ # we can cache this by uuid:// pathid and by repo-relative path
+ PathIdentifier.locobjs[pathid_str] = PathIdentifier.locobjs[repo_relative_path] = pathid
+ elif pathid_str and pathid_str[0] == '/':
+ # strip any trailing slashes
+ pathid_str = pathid_str.rstrip('/')
+ pathid = PathIdentifier(repo_relative_path=pathid_str)
+ # we can only cache this by repo-relative path
+ PathIdentifier.locobjs[pathid_str] = pathid
+ else:
+ error("Invalid path identifier '%s'" % pathid_str)
+ return PathIdentifier.locobjs[pathid_str]
+ from_pathid = staticmethod(from_pathid)
+
+ def from_target(target):
+ """Convert a target (either a working copy path or an URL) into a
+ path identifier."""
+ # prime the cache first if we don't know about this target yet
+ if not PathIdentifier.locobjs.has_key(target):
+ PathIdentifier.hint(target)
+
+ try:
+ return PathIdentifier.locobjs[target]
+ except KeyError:
+ error("Could not recognize path identifier '%s'" % target)
+ from_target = staticmethod(from_target)
+
+ def hint(target):
+ """Cache some information about target, as it may be referenced by
+ repo-relative path in subversion properties; the cache can help to
+ expand such a relative path to a full path identifier."""
+ if PathIdentifier.locobjs.has_key(target): return
+ if not is_url(target) and not is_wc(target): return
+
+ url = target_to_url(target)
+
+ root = get_repo_root(url)
+ assert root[-1] != "/"
+ assert url[:len(root)] == root, "url=%r, root=%r" % (url, root)
+ repo_relative_path = url[len(root):]
+
+ try:
+ uuid = get_svninfo(target)['Repository UUID']
+ uuid_pathid = 'uuid://%s%s' % (uuid, repo_relative_path)
+ except KeyError:
+ uuid = None
+ uuid_pathid = None
+
+ locobj = PathIdentifier.locobjs.get(url) or \
+ (uuid_pathid and PathIdentifier.locobjs.get(uuid_pathid))
+ if not locobj:
+ locobj = PathIdentifier(repo_relative_path, uuid=uuid, url=url)
+
+ PathIdentifier.repo_hints[uuid] = root # (uuid may be None)
+
+ PathIdentifier.locobjs[target] = locobj
+ PathIdentifier.locobjs[url] = locobj
+ if uuid_pathid:
+ PathIdentifier.locobjs[uuid_pathid] = locobj
+ if not PathIdentifier.locobjs.has_key(repo_relative_path):
+ PathIdentifier.locobjs[repo_relative_path] = locobj
+ hint = staticmethod(hint)
+
+ def format(self, fmt):
+ if fmt == 'path':
+ return self.repo_relative_path
+ elif fmt == 'uuid':
+ return "uuid://%s%s" % (self.uuid, self.repo_relative_path)
+ elif fmt == 'url':
+ return self.url
+ else:
+ error("Unkonwn path type '%s'" % fmt)
+
+ def match_substring(self, str):
+ """Test whether str is a substring of any representation of this
+ PathIdentifier."""
+ if self.repo_relative_path.find(str) >= 0:
+ return True
+
+ if self.uuid:
+ if ("uuid://%s%s" % (self.uuid, self.repo_relative_path)).find(str) >= 0:
+ return True
+
+ if self.url:
+ if (self.url + self.repo_relative_path).find(str) >= 0:
+ return True
+
+ return False
+
+ def get_url(self):
+ """Convert a pathid into a URL. If this is not possible, error out."""
+ if self.url:
+ return self.url
+ # if we have a uuid and happen to know the URL for it, use that
+ elif self.uuid and PathIdentifier.repo_hints.has_key(self.uuid):
+ self.url = PathIdentifier.repo_hints[self.uuid] + self.repo_relative_path
+ PathIdentifier.locobjs[self.url] = self
+ return self.url
+ # if we've only seen one rep, use that (a guess, but an educated one)
+ elif not self.uuid and len(PathIdentifier.repo_hints) == 1:
+ uuid, root = PathIdentifier.repo_hints.items()[0]
+ if uuid:
+ self.uuid = uuid
+ PathIdentifier.locobjs['uuid://%s%s' % (uuid, self.repo_relative_path)] = self
+ self.url = root + self.repo_relative_path
+ PathIdentifier.locobjs[self.url] = self
+ report("Guessing that '%s' refers to '%s'" % (self, self.url))
+ return self.url
+ else:
+ error("Cannot determine URL for '%s'; " % self +
+ "Explicit source argument (-S/--source) required.\n")
+
+class RevisionLog:
+ """
+ A log of the revisions which affected a given URL between two
+ revisions.
+ """
+
+ def __init__(self, url, begin, end, find_propchanges=False):
+ """
+ Create a new RevisionLog object, which stores, in self.revs, a list
+ of the revisions which affected the specified URL between begin and
+ end. If find_propchanges is True, self.propchange_revs will contain a
+ list of the revisions which changed properties directly on the
+ specified URL. URL must be the URL for a directory in the repository.
+ """
+ self.url = url
+
+ # Setup the log options (--quiet, so we don't show log messages)
+ log_opts = '--xml --quiet -r%s:%s "%s"' % (begin, end, url)
+ if find_propchanges:
+ # The --verbose flag lets us grab merge tracking information
+ # by looking at propchanges
+ log_opts = "--verbose " + log_opts
+
+ # Read the log to look for revision numbers and merge-tracking info
+ self.revs = []
+ self.propchange_revs = []
+ repos_pathid = PathIdentifier.from_target(url)
+ for chg in SvnLogParser(launchsvn("log %s" % log_opts,
+ split_lines=False)):
+ self.revs.append(chg.revision())
+ for p in chg.paths():
+ if p.action() == 'M' and p.pathid() == repos_pathid.repo_relative_path:
+ self.propchange_revs.append(chg.revision())
+
+ # Save the range of the log
+ self.begin = int(begin)
+ if end == "HEAD":
+ # If end is not provided, we do not know which is the latest
+ # revision in the repository. So we set 'end' to the latest
+ # known revision.
+ self.end = self.revs[-1]
+ else:
+ self.end = int(end)
+
+ self._merges = None
+ self._blocks = None
+
+ def merge_metadata(self):
+ """
+ Return a VersionedProperty object, with a cached view of the merge
+ metadata in the range of this log.
+ """
+
+ # Load merge metadata if necessary
+ if not self._merges:
+ self._merges = VersionedProperty(self.url, opts["prop"])
+ self._merges.load(self)
+
+ return self._merges
+
+ def block_metadata(self):
+ if not self._blocks:
+ self._blocks = VersionedProperty(self.url, opts["block-prop"])
+ self._blocks.load(self)
+
+ return self._blocks
+
+
+class VersionedProperty:
+ """
+ A read-only, cached view of a versioned property.
+
+ self.revs contains a list of the revisions in which the property changes.
+ self.values stores the new values at each corresponding revision. If the
+ value of the property is unknown, it is set to None.
+
+ Initially, we set self.revs to [0] and self.values to [None]. This
+ indicates that, as of revision zero, we know nothing about the value of
+ the property.
+
+ Later, if you run self.load(log), we cache the value of this property over
+ the entire range of the log by noting each revision in which the property
+ was changed. At the end of the range of the log, we invalidate our cache
+ by adding the value "None" to our cache for any revisions which fall out
+ of the range of our log.
+
+ Once self.revs and self.values are filled, we can find the value of the
+ property at any arbitrary revision using a binary search on self.revs.
+ Once we find the last revision during which the property was changed,
+ we can lookup the associated value in self.values. (If the associated
+ value is None, the associated value was not cached and we have to do
+ a full propget.)
+
+ An example: We know that the 'svnmerge' property was added in r10, and
+ changed in r21. We gathered log info up until r40.
+
+ revs = [0, 10, 21, 40]
+ values = [None, "val1", "val2", None]
+
+ What these values say:
+ - From r0 to r9, we know nothing about the property.
+ - In r10, the property was set to "val1". This property stayed the same
+ until r21, when it was changed to "val2".
+ - We don't know what happened after r40.
+ """
+
+ def __init__(self, url, name):
+ """View the history of a versioned property at URL with name"""
+ self.url = url
+ self.name = name
+
+ # We know nothing about the value of the property. Setup revs
+ # and values to indicate as such.
+ self.revs = [0]
+ self.values = [None]
+
+ # We don't have any revisions cached
+ self._initial_value = None
+ self._changed_revs = []
+ self._changed_values = []
+
+ def load(self, log):
+ """
+ Load the history of property changes from the specified
+ RevisionLog object.
+ """
+
+ # Get the property value before the range of the log
+ if log.begin > 1:
+ self.revs.append(log.begin-1)
+ try:
+ self._initial_value = self.raw_get(log.begin-1)
+ except LaunchError:
+ # The specified URL might not exist before the
+ # range of the log. If so, we can safely assume
+ # that the property was empty at that time.
+ self._initial_value = { }
+ self.values.append(self._initial_value)
+ else:
+ self._initial_value = { }
+ self.values[0] = self._initial_value
+
+ # Cache the property values in the log range
+ old_value = self._initial_value
+ for rev in log.propchange_revs:
+ new_value = self.raw_get(rev)
+ if new_value != old_value:
+ self._changed_revs.append(rev)
+ self._changed_values.append(new_value)
+ self.revs.append(rev)
+ self.values.append(new_value)
+ old_value = new_value
+
+ # Indicate that we know nothing about the value of the property
+ # after the range of the log.
+ if log.revs:
+ self.revs.append(log.end+1)
+ self.values.append(None)
+
+ def raw_get(self, rev=None):
+ """
+ Get the property at revision REV. If rev is not specified, get
+ the property at revision HEAD.
+ """
+ return get_revlist_prop(self.url, self.name, rev)
+
+ def get(self, rev=None):
+ """
+ Get the property at revision REV. If rev is not specified, get
+ the property at revision HEAD.
+ """
+
+ if rev is not None:
+
+ # Find the index using a binary search
+ i = bisect(self.revs, rev) - 1
+
+ # Return the value of the property, if it was cached
+ if self.values[i] is not None:
+ return self.values[i]
+
+ # Get the current value of the property
+ return self.raw_get(rev)
+
+ def changed_revs(self, key=None):
+ """
+ Get a list of the revisions in which the specified dictionary
+ key was changed in this property. If key is not specified,
+ return a list of revisions in which any key was changed.
+ """
+ if key is None:
+ return self._changed_revs
+ else:
+ changed_revs = []
+ old_val = self._initial_value
+ for rev, val in zip(self._changed_revs, self._changed_values):
+ if val.get(key) != old_val.get(key):
+ changed_revs.append(rev)
+ old_val = val
+ return changed_revs
+
+ def initialized_revs(self):
+ """
+ Get a list of the revisions in which keys were added or
+ removed in this property.
+ """
+ initialized_revs = []
+ old_len = len(self._initial_value)
+ for rev, val in zip(self._changed_revs, self._changed_values):
+ if len(val) != old_len:
+ initialized_revs.append(rev)
+ old_len = len(val)
+ return initialized_revs
+
+class RevisionSet:
+ """
+ A set of revisions, held in dictionary form for easy manipulation. If we
+ were to rewrite this script for Python 2.3+, we would subclass this from
+ set (or UserSet). As this class does not include branch
+ information, it's assumed that one instance will be used per
+ branch.
+ """
+ def __init__(self, parm):
+ """Constructs a RevisionSet from a string in property form, or from
+ a dictionary whose keys are the revisions. Raises ValueError if the
+ input string is invalid."""
+
+ self._revs = {}
+
+ revision_range_split_re = re.compile('[-:]')
+
+ if isinstance(parm, types.DictType):
+ self._revs = parm.copy()
+ elif isinstance(parm, types.ListType):
+ for R in parm:
+ self._revs[int(R)] = 1
+ else:
+ parm = parm.strip()
+ if parm:
+ for R in parm.split(","):
+ rev_or_revs = re.split(revision_range_split_re, R)
+ if len(rev_or_revs) == 1:
+ self._revs[int(rev_or_revs[0])] = 1
+ elif len(rev_or_revs) == 2:
+ for rev in range(int(rev_or_revs[0]),
+ int(rev_or_revs[1])+1):
+ self._revs[rev] = 1
+ else:
+ raise ValueError, 'Ill formatted revision range: ' + R
+
+ def sorted(self):
+ revnums = self._revs.keys()
+ revnums.sort()
+ return revnums
+
+ def normalized(self):
+ """Returns a normalized version of the revision set, which is an
+ ordered list of couples (start,end), with the minimum number of
+ intervals."""
+ revnums = self.sorted()
+ revnums.reverse()
+ ret = []
+ while revnums:
+ s = e = revnums.pop()
+ while revnums and revnums[-1] in (e, e+1):
+ e = revnums.pop()
+ ret.append((s, e))
+ return ret
+
+ def __str__(self):
+ """Convert the revision set to a string, using its normalized form."""
+ L = []
+ for s,e in self.normalized():
+ if s == e:
+ L.append(str(s))
+ else:
+ L.append(str(s) + "-" + str(e))
+ return ",".join(L)
+
+ def __contains__(self, rev):
+ return self._revs.has_key(rev)
+
+ def __sub__(self, rs):
+ """Compute subtraction as in sets."""
+ revs = {}
+ for r in self._revs.keys():
+ if r not in rs:
+ revs[r] = 1
+ return RevisionSet(revs)
+
+ def __and__(self, rs):
+ """Compute intersections as in sets."""
+ revs = {}
+ for r in self._revs.keys():
+ if r in rs:
+ revs[r] = 1
+ return RevisionSet(revs)
+
+ def __nonzero__(self):
+ return len(self._revs) != 0
+
+ def __len__(self):
+ """Return the number of revisions in the set."""
+ return len(self._revs)
+
+ def __iter__(self):
+ return iter(self.sorted())
+
+ def __or__(self, rs):
+ """Compute set union."""
+ revs = self._revs.copy()
+ revs.update(rs._revs)
+ return RevisionSet(revs)
+
+def merge_props_to_revision_set(merge_props, pathid):
+ """A converter which returns a RevisionSet instance containing the
+ revisions from PATH as known to BRANCH_PROPS. BRANCH_PROPS is a
+ dictionary of pathid -> revision set branch integration information
+ (as returned by get_merge_props())."""
+ if not merge_props.has_key(pathid):
+ error('no integration info available for path "%s"' % pathid)
+ return RevisionSet(merge_props[pathid])
+
+def dict_from_revlist_prop(propvalue):
+ """Given a property value as a string containing per-source revision
+ lists, return a dictionary whose key is a source path identifier
+ and whose value is the revisions for that source."""
+ prop = {}
+
+ # Multiple sources are separated by any whitespace.
+ for L in propvalue.split():
+ # We use rsplit to play safe and allow colons in pathids.
+ pathid_str, revs = rsplit(L.strip(), ":", 1)
+
+ pathid = PathIdentifier.from_pathid(pathid_str)
+
+ # cache the "external" form we saw
+ pathid.external_form = pathid_str
+
+ prop[pathid] = revs
+ return prop
+
+def get_revlist_prop(url_or_dir, propname, rev=None):
+ """Given a repository URL or working copy path and a property
+ name, extract the values of the property which store per-source
+ revision lists and return a dictionary whose key is a source path
+ identifier, and whose value is the revisions for that source."""
+
+ # Note that propget does not return an error if the property does
+ # not exist, it simply does not output anything. So we do not need
+ # to check for LaunchError here.
+ args = '--strict "%s" "%s"' % (propname, url_or_dir)
+ if rev:
+ args = '-r %s %s' % (rev, args)
+ out = launchsvn('propget %s' % args, split_lines=False)
+
+ return dict_from_revlist_prop(out)
+
+def get_merge_props(dir):
+ """Extract the merged revisions."""
+ return get_revlist_prop(dir, opts["prop"])
+
+def get_block_props(dir):
+ """Extract the blocked revisions."""
+ return get_revlist_prop(dir, opts["block-prop"])
+
+def get_blocked_revs(dir, source_pathid):
+ p = get_block_props(dir)
+ if p.has_key(source_pathid):
+ return RevisionSet(p[source_pathid])
+ return RevisionSet("")
+
+def format_merge_props(props, sep=" "):
+ """Formats the hash PROPS as a string suitable for use as a
+ Subversion property value."""
+ assert sep in ["\t", "\n", " "] # must be a whitespace
+ props = props.items()
+ props.sort()
+ L = []
+ for h, r in props:
+ L.append("%s:%s" % (h, r))
+ return sep.join(L)
+
+def _run_propset(dir, prop, value):
+ """Set the property 'prop' of directory 'dir' to value 'value'. We go
+ through a temporary file to not run into command line length limits."""
+ try:
+ fd, fname = tempfile.mkstemp()
+ f = os.fdopen(fd, "wb")
+ except AttributeError:
+ # Fallback for Python <= 2.3 which does not have mkstemp (mktemp
+ # suffers from race conditions. Not that we care...)
+ fname = tempfile.mktemp()
+ f = open(fname, "wb")
+
+ try:
+ f.write(value)
+ f.close()
+ report("property data written to temp file: %s" % value)
+ svn_command('propset "%s" -F "%s" "%s"' % (prop, fname, dir))
+ finally:
+ os.remove(fname)
+
+def set_props(dir, name, props):
+ props = format_merge_props(props)
+ if props:
+ _run_propset(dir, name, props)
+ else:
+ # Check if NAME exists on DIR before trying to delete it.
+ # As of 1.6 propdel no longer supports deleting a
+ # non-existent property.
+ out = launchsvn('propget "%s" "%s"' % (name, dir))
+ if out:
+ svn_command('propdel "%s" "%s"' % (name, dir))
+
+def set_merge_props(dir, props):
+ set_props(dir, opts["prop"], props)
+
+def set_block_props(dir, props):
+ set_props(dir, opts["block-prop"], props)
+
+def set_blocked_revs(dir, source_pathid, revs):
+ props = get_block_props(dir)
+ if revs:
+ props[source_pathid] = str(revs)
+ elif props.has_key(source_pathid):
+ del props[source_pathid]
+ set_block_props(dir, props)
+
+def is_url(url):
+ """Check if url looks like a valid url."""
+ return re.search(r"^[a-zA-Z][-+\.\w]*://[^\s]+$", url) is not None and url[:4] != 'uuid'
+
+def check_url(url):
+ """Similar to is_url, but actually invoke get_svninfo to find out"""
+ return get_svninfo(url) != {}
+
+def is_pathid(pathid):
+ return isinstance(pathid, PathIdentifier)
+
+def is_wc(dir):
+ """Check if a directory is a working copy."""
+ return os.path.isdir(os.path.join(dir, ".svn")) or \
+ os.path.isdir(os.path.join(dir, "_svn"))
+
+_cache_svninfo = {}
+def get_svninfo(target):
+ """Extract the subversion information for a target (through 'svn info').
+ This function uses an internal cache to let clients query information
+ many times."""
+ if _cache_svninfo.has_key(target):
+ return _cache_svninfo[target]
+ info = {}
+ for L in launchsvn('info "%s"' % target):
+ L = L.strip()
+ if not L:
+ continue
+ key, value = L.split(": ", 1)
+ info[key] = value.strip()
+ _cache_svninfo[target] = info
+ return info
+
+def target_to_url(target):
+ """Convert working copy path or repos URL to a repos URL."""
+ if is_wc(target):
+ info = get_svninfo(target)
+ return info["URL"]
+ return target
+
+_cache_reporoot = {}
+def get_repo_root(target):
+ """Compute the root repos URL given a working-copy path, or a URL."""
+ # Try using "svn info WCDIR". This works only on SVN clients >= 1.3
+ if not is_url(target):
+ try:
+ info = get_svninfo(target)
+ root = info["Repository Root"]
+ _cache_reporoot[root] = None
+ return root
+ except KeyError:
+ pass
+ url = target_to_url(target)
+ assert url[-1] != '/'
+ else:
+ url = target
+
+ # Go through the cache of the repository roots. This avoids extra
+ # server round-trips if we are asking the root of different URLs
+ # in the same repository (the cache in get_svninfo() cannot detect
+ # that of course and would issue a remote command).
+ assert is_url(url)
+ for r in _cache_reporoot:
+ if url.startswith(r):
+ return r
+
+ # Try using "svn info URL". This works only on SVN clients >= 1.2
+ try:
+ info = get_svninfo(url)
+ # info may be {}, in which case we'll see KeyError here
+ root = info["Repository Root"]
+ _cache_reporoot[root] = None
+ return root
+ except (KeyError, LaunchError):
+ pass
+
+ # Constrained to older svn clients, we are stuck with this ugly
+ # trial-and-error implementation. It could be made faster with a
+ # binary search.
+ while url:
+ temp = os.path.dirname(url)
+ try:
+ launchsvn('proplist "%s"' % temp)
+ except LaunchError:
+ _cache_reporoot[url] = None
+ return rstrip(url, "/")
+ url = temp
+
+ error("svn repos root of %s not found" % target)
+
+class SvnLogParser:
+ """
+ Parse the "svn log", going through the XML output and using pulldom (which
+ would even allow streaming the command output).
+ """
+ def __init__(self, xml):
+ self._events = pulldom.parseString(xml)
+ def __getitem__(self, idx):
+ for event, node in self._events:
+ if event == pulldom.START_ELEMENT and node.tagName == "logentry":
+ self._events.expandNode(node)
+ return self.SvnLogRevision(node)
+ raise IndexError, "Could not find 'logentry' tag in xml"
+
+ class SvnLogRevision:
+ def __init__(self, xmlnode):
+ self.n = xmlnode
+ def revision(self):
+ return int(self.n.getAttribute("revision"))
+ def author(self):
+ return self.n.getElementsByTagName("author")[0].firstChild.data
+ def paths(self):
+ return [self.SvnLogPath(n)
+ for n in self.n.getElementsByTagName("path")]
+
+ class SvnLogPath:
+ def __init__(self, xmlnode):
+ self.n = xmlnode
+ def action(self):
+ return self.n.getAttribute("action")
+ def pathid(self):
+ return self.n.firstChild.data
+ def copyfrom_rev(self):
+ try: return self.n.getAttribute("copyfrom-rev")
+ except KeyError: return None
+ def copyfrom_pathid(self):
+ try: return self.n.getAttribute("copyfrom-path")
+ except KeyError: return None
+
+def get_copyfrom(target):
+ """Get copyfrom info for a given target (it represents the
+ repository-relative path from where it was branched). NOTE:
+ repos root has no copyfrom info. In this case None is returned.
+
+ Returns the:
+ - source file or directory from which the copy was made
+ - revision from which that source was copied
+ - revision in which the copy was committed
+ """
+ repos_path = PathIdentifier.from_target(target).repo_relative_path
+ for chg in SvnLogParser(launchsvn('log -v --xml --stop-on-copy "%s"'
+ % target, split_lines=False)):
+ for p in chg.paths():
+ if p.action() == 'A' and p.pathid() == repos_path:
+ # These values will be None if the corresponding elements are
+ # not found in the log.
+ return p.copyfrom_pathid(), p.copyfrom_rev(), chg.revision()
+ return None,None,None
+
+def get_latest_rev(url):
+ """Get the latest revision of the repository of which URL is part."""
+ try:
+ info = get_svninfo(url)
+ if not info.has_key("Revision"):
+ error("Not a valid URL: %s" % url)
+ return info["Revision"]
+ except LaunchError:
+ # Alternative method for latest revision checking (for svn < 1.2)
+ report('checking latest revision of "%s"' % url)
+ L = launchsvn('proplist --revprop -r HEAD "%s"' % opts["source-url"])[0]
+ rev = re.search("revision (\d+)", L).group(1)
+ report('latest revision of "%s" is %s' % (url, rev))
+ return rev
+
+def get_created_rev(url):
+ """Lookup the revision at which the path identified by the
+ provided URL was first created."""
+ oldest_rev = -1
+ report('determining oldest revision for URL "%s"' % url)
+ ### TODO: Refactor this to use a modified RevisionLog class.
+ lines = None
+ cmd = "log -r1:HEAD --stop-on-copy -q " + url
+ try:
+ lines = launchsvn(cmd + " --limit=1")
+ except LaunchError:
+ # Assume that --limit isn't supported by the installed 'svn'.
+ lines = launchsvn(cmd)
+ if lines and len(lines) > 1:
+ i = lines[1].find(" ")
+ if i != -1:
+ oldest_rev = int(lines[1][1:i])
+ if oldest_rev == -1:
+ error('unable to determine oldest revision for URL "%s"' % url)
+ return oldest_rev
+
+def get_commit_log(url, revnum):
+ """Return the log message for a specific integer revision
+ number."""
+ out = launchsvn("log --incremental -r%d %s" % (revnum, url))
+ return recode_stdout_to_file("".join(out[1:]))
+
+def construct_merged_log_message(url, revnums):
+ """Return a commit log message containing all the commit messages
+ in the specified revisions at the given URL. The separator used
+ in this log message is determined by searching for the longest
+ svnmerge separator existing in the commit log messages and
+ extending it by one more separator. This results in a new commit
+ log message that is clearer in describing merges that contain
+ other merges. Trailing newlines are removed from the embedded
+ log messages."""
+ messages = ['']
+ longest_sep = ''
+ for r in revnums.sorted():
+ message = get_commit_log(url, r)
+ if message:
+ message = re.sub(r'(\r\n|\r|\n)', "\n", message)
+ message = rstrip(message, "\n") + "\n"
+ messages.append(prefix_lines(LOG_LINE_PREFIX, message))
+ for match in LOG_SEPARATOR_RE.findall(message):
+ sep = match[1]
+ if len(sep) > len(longest_sep):
+ longest_sep = sep
+
+ longest_sep += LOG_SEPARATOR + "\n"
+ messages.append('')
+ return longest_sep.join(messages)
+
+def get_default_source(branch_target, branch_props):
+ """Return the default source for branch_target (given its branch_props).
+ Error out if there is ambiguity."""
+ if not branch_props:
+ error("no integration info available")
+
+ props = branch_props.copy()
+ pathid = PathIdentifier.from_target(branch_target)
+
+ # To make bidirectional merges easier, find the target's
+ # repository local path so it can be removed from the list of
+ # possible integration sources.
+ if props.has_key(pathid):
+ del props[pathid]
+
+ if len(props) > 1:
+ err_msg = "multiple sources found. "
+ err_msg += "Explicit source argument (-S/--source) required.\n"
+ err_msg += "The merge sources available are:"
+ for prop in props:
+ err_msg += "\n " + str(prop)
+ error(err_msg)
+
+ return props.keys()[0]
+
+def should_find_reflected(branch_dir):
+ should_find_reflected = opts["bidirectional"]
+
+ # If the source has integration info for the target, set find_reflected
+ # even if --bidirectional wasn't specified
+ if not should_find_reflected:
+ source_props = get_merge_props(opts["source-url"])
+ should_find_reflected = source_props.has_key(PathIdentifier.from_target(branch_dir))
+
+ return should_find_reflected
+
+def analyze_revs(target_pathid, url, begin=1, end=None,
+ find_reflected=False):
+ """For the source of the merges in the source URL being merged into
+ target_pathid, analyze the revisions in the interval begin-end (which
+ defaults to 1-HEAD), to find out which revisions are changes in
+ the url, which are changes elsewhere (so-called 'phantom'
+ revisions), optionally which are reflected changes (to avoid
+ conflicts that can occur when doing bidirectional merging between
+ branches), and which revisions initialize merge tracking against other
+ branches. Return a tuple of four RevisionSet's:
+ (real_revs, phantom_revs, reflected_revs, initialized_revs).
+
+ NOTE: To maximize speed, if "end" is not provided, the function is
+ not able to find phantom revisions following the last real
+ revision in the URL.
+ """
+
+ begin = str(begin)
+ if end is None:
+ end = "HEAD"
+ else:
+ end = str(end)
+ if long(begin) > long(end):
+ return RevisionSet(""), RevisionSet(""), \
+ RevisionSet(""), RevisionSet("")
+
+ logs[url] = RevisionLog(url, begin, end, find_reflected)
+ revs = RevisionSet(logs[url].revs)
+
+ if end == "HEAD":
+ # If end is not provided, we do not know which is the latest revision
+ # in the repository. So return the phantom revision set only up to
+ # the latest known revision.
+ end = str(list(revs)[-1])
+
+ phantom_revs = RevisionSet("%s-%s" % (begin, end)) - revs
+
+ if find_reflected:
+ reflected_revs = logs[url].merge_metadata().changed_revs(target_pathid)
+ reflected_revs += logs[url].block_metadata().changed_revs(target_pathid)
+ else:
+ reflected_revs = []
+
+ initialized_revs = RevisionSet(logs[url].merge_metadata().initialized_revs())
+ reflected_revs = RevisionSet(reflected_revs)
+
+ return revs, phantom_revs, reflected_revs, initialized_revs
+
+def analyze_source_revs(branch_target, source_url, **kwargs):
+ """For the given branch and source, extract the real and phantom
+ source revisions."""
+ branch_url = target_to_url(branch_target)
+ branch_pathid = PathIdentifier.from_target(branch_target)
+
+ # Extract the latest repository revision from the URL of the branch
+ # directory (which is already cached at this point).
+ end_rev = get_latest_rev(source_url)
+
+ # Calculate the base of analysis. If there is a "1-XX" interval in the
+ # merged_revs, we do not need to check those.
+ base = 1
+ r = opts["merged-revs"].normalized()
+ if r and r[0][0] == 1:
+ base = r[0][1] + 1
+
+ # See if the user filtered the revision set. If so, we are not
+ # interested in something outside that range.
+ if opts["revision"]:
+ revs = RevisionSet(opts["revision"]).sorted()
+ if base < revs[0]:
+ base = revs[0]
+ if end_rev > revs[-1]:
+ end_rev = revs[-1]
+
+ return analyze_revs(branch_pathid, source_url, base, end_rev, **kwargs)
+
+def minimal_merge_intervals(revs, phantom_revs):
+ """Produce the smallest number of intervals suitable for merging. revs
+ is the RevisionSet which we want to merge, and phantom_revs are phantom
+ revisions which can be used to concatenate intervals, thus minimizing the
+ number of operations."""
+ revnums = revs.normalized()
+ ret = []
+
+ cur = revnums.pop()
+ while revnums:
+ next = revnums.pop()
+ assert next[1] < cur[0] # otherwise it is not ordered
+ assert cur[0] - next[1] > 1 # otherwise it is not normalized
+ for i in range(next[1]+1, cur[0]):
+ if i not in phantom_revs:
+ ret.append(cur)
+ cur = next
+ break
+ else:
+ cur = (next[0], cur[1])
+
+ ret.append(cur)
+ ret.reverse()
+ return ret
+
+def display_revisions(revs, display_style, revisions_msg, source_url):
+ """Show REVS as dictated by DISPLAY_STYLE, either numerically, in
+ log format, or as diffs. When displaying revisions numerically,
+ prefix output with REVISIONS_MSG when in verbose mode. Otherwise,
+ request logs or diffs using SOURCE_URL."""
+ if display_style == "revisions":
+ if revs:
+ report(revisions_msg)
+ print revs
+ elif display_style == "logs":
+ for start,end in revs.normalized():
+ svn_command('log --incremental -v -r %d:%d %s' % \
+ (start, end, source_url))
+ elif display_style in ("diffs", "summarize"):
+ if display_style == 'summarize':
+ summarize = '--summarize '
+ else:
+ summarize = ''
+
+ for start, end in revs.normalized():
+ print
+ if start == end:
+ print "%s: changes in revision %d follow" % (NAME, start)
+ else:
+ print "%s: changes in revisions %d-%d follow" % (NAME,
+ start, end)
+ print
+
+ # Note: the starting revision number to 'svn diff' is
+ # NOT inclusive so we have to subtract one from ${START}.
+ svn_command("diff -r %d:%d %s %s" % (start - 1, end, summarize,
+ source_url))
+ else:
+ assert False, "unhandled display style: %s" % display_style
+
+def action_init(target_dir, target_props):
+ """Initialize for merges."""
+ # Check that directory is ready for being modified
+ check_dir_clean(target_dir)
+
+ target_pathid = PathIdentifier.from_target(target_dir)
+ source_pathid = opts['source-pathid']
+ if source_pathid == target_pathid:
+ error("cannot init integration source path '%s'\nIts path identifier does not "
+ "differ from the path identifier of the current directory, '%s'."
+ % (source_pathid, target_pathid))
+
+ source_url = opts['source-url']
+
+ # If the user hasn't specified the revisions to use, see if the
+ # "source" is a copy from the current tree and if so, we can use
+ # the version data obtained from it.
+ revision_range = opts["revision"]
+ if not revision_range:
+ # If source was originally copied from target, and we are merging
+ # changes from source to target (the copy target is the merge source,
+ # and the copy source is the merge target), then we want to mark as
+ # integrated up to the rev in which the copy was committed which
+ # created the merge source:
+ cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(source_url)
+
+ cf_pathid = None
+ if cf_source:
+ cf_url = get_repo_root(source_url) + cf_source
+ if is_url(cf_url) and check_url(cf_url):
+ cf_pathid = PathIdentifier.from_target(cf_url)
+
+ if target_pathid == cf_pathid:
+ report('the source "%s" was copied from "%s" in rev %s and committed in rev %s' %
+ (source_url, target_dir, cf_rev, copy_committed_in_rev))
+ revision_range = "1-" + str(copy_committed_in_rev)
+
+ if not revision_range:
+ # If the reverse is true: copy source is the merge source, and
+ # the copy target is the merge target, then we want to mark as
+ # integrated up to the specific rev of the merge target from
+ # which the merge source was copied. (Longer discussion at:
+ # http://subversion.tigris.org/issues/show_bug.cgi?id=2810 )
+ cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(target_dir)
+
+ cf_pathid = None
+ if cf_source:
+ cf_url = get_repo_root(target_dir) + cf_source
+ if is_url(cf_url) and check_url(cf_url):
+ cf_pathid = PathIdentifier.from_target(cf_url)
+
+ source_pathid = PathIdentifier.from_target(source_url)
+ if source_pathid == cf_pathid:
+ report('the target "%s" was copied the source "%s" in rev %s and committed in rev %s' %
+ (target_dir, source_url, cf_rev, copy_committed_in_rev))
+ revision_range = "1-" + cf_rev
+
+ # When neither the merge source nor target is a copy of the other, and
+ # the user did not specify a revision range, then choose a default which is
+ # the current revision; saying, in effect, "everything has been merged, so
+ # mark as integrated up to the latest rev on source url).
+ if not revision_range:
+ revision_range = "1-" + get_latest_rev(source_url)
+
+ revs = RevisionSet(revision_range)
+
+ report('marking "%s" as already containing revisions "%s" of "%s"' %
+ (target_dir, revs, source_url))
+
+ revs = str(revs)
+ # If the local svnmerge-integrated property already has an entry
+ # for the source-pathid, simply error out.
+ if not opts["force"] and target_props.has_key(source_pathid):
+ error('Repository-relative path %s has already been initialized at %s\n'
+ 'Use --force to re-initialize' % (source_pathid, target_dir))
+ # set the pathid's external_form based on the user's options
+ source_pathid.external_form = source_pathid.format(opts['location-type'])
+
+ revs = str(revs)
+ target_props[source_pathid] = revs
+
+ # Set property
+ set_merge_props(target_dir, target_props)
+
+ # Write out commit message if desired
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ print >>f, 'Initialized merge tracking via "%s" with revisions "%s" from ' \
+ % (NAME, revs)
+ print >>f, '%s' % source_url
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+def action_avail(branch_dir, branch_props):
+ """Show commits available for merges."""
+ source_revs, phantom_revs, reflected_revs, initialized_revs = \
+ analyze_source_revs(branch_dir, opts["source-url"],
+ find_reflected=
+ should_find_reflected(branch_dir))
+ report('skipping phantom revisions: %s' % phantom_revs)
+ if reflected_revs:
+ report('skipping reflected revisions: %s' % reflected_revs)
+ report('skipping initialized revisions: %s' % initialized_revs)
+
+ blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
+ avail_revs = source_revs - opts["merged-revs"] - blocked_revs - \
+ reflected_revs - initialized_revs
+
+ # Compose the set of revisions to show
+ revs = RevisionSet("")
+ report_msg = "revisions available to be merged are:"
+ if "avail" in opts["avail-showwhat"]:
+ revs |= avail_revs
+ if "blocked" in opts["avail-showwhat"]:
+ revs |= blocked_revs
+ report_msg = "revisions blocked are:"
+
+ # Limit to revisions specified by -r (if any)
+ if opts["revision"]:
+ revs = revs & RevisionSet(opts["revision"])
+
+ display_revisions(revs, opts["avail-display"],
+ report_msg,
+ opts["source-url"])
+
+def action_integrated(branch_dir, branch_props):
+ """Show change sets already merged. This set of revisions is
+ calculated from taking svnmerge-integrated property from the
+ branch, and subtracting any revision older than the branch
+ creation revision."""
+ # Extract the integration info for the branch_dir
+ branch_props = get_merge_props(branch_dir)
+ revs = merge_props_to_revision_set(branch_props, opts["source-pathid"])
+
+ # Lookup the oldest revision on the branch path.
+ oldest_src_rev = get_created_rev(opts["source-url"])
+
+ # Subtract any revisions which pre-date the branch.
+ report("subtracting revisions which pre-date the source URL (%d)" %
+ oldest_src_rev)
+ revs = revs - RevisionSet(range(1, oldest_src_rev))
+
+ # Limit to revisions specified by -r (if any)
+ if opts["revision"]:
+ revs = revs & RevisionSet(opts["revision"])
+
+ display_revisions(revs, opts["integrated-display"],
+ "revisions already integrated are:", opts["source-url"])
+
+def action_merge(branch_dir, branch_props):
+ """Record merge meta data, and do the actual merge (if not
+ requested otherwise via --record-only)."""
+ # Check branch directory is ready for being modified
+ check_dir_clean(branch_dir)
+
+ source_revs, phantom_revs, reflected_revs, initialized_revs = \
+ analyze_source_revs(branch_dir, opts["source-url"],
+ find_reflected=
+ should_find_reflected(branch_dir))
+
+ if opts["revision"]:
+ revs = RevisionSet(opts["revision"])
+ else:
+ revs = source_revs
+
+ blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
+ merged_revs = opts["merged-revs"]
+
+ # Show what we're doing
+ if opts["verbose"]: # just to avoid useless calculations
+ if merged_revs & revs:
+ report('"%s" already contains revisions %s' % (branch_dir,
+ merged_revs & revs))
+ if phantom_revs:
+ report('memorizing phantom revision(s): %s' % phantom_revs)
+ if reflected_revs:
+ report('memorizing reflected revision(s): %s' % reflected_revs)
+ if blocked_revs & revs:
+ report('skipping blocked revisions(s): %s' % (blocked_revs & revs))
+ if initialized_revs:
+ report('skipping initialized revision(s): %s' % initialized_revs)
+
+ # Compute final merge set.
+ revs = revs - merged_revs - blocked_revs - reflected_revs - \
+ phantom_revs - initialized_revs
+ if not revs:
+ report('no revisions to merge, exiting')
+ return
+
+ # When manually marking revisions as merged, we only update the
+ # integration meta data, and don't perform an actual merge.
+ record_only = opts["record-only"]
+
+ if record_only:
+ report('recording merge of revision(s) %s from "%s"' %
+ (revs, opts["source-url"]))
+ else:
+ report('merging in revision(s) %s from "%s"' %
+ (revs, opts["source-url"]))
+
+ # Do the merge(s). Note: the starting revision number to 'svn merge'
+ # is NOT inclusive so we have to subtract one from start.
+ # We try to keep the number of merge operations as low as possible,
+ # because it is faster and reduces the number of conflicts.
+ old_block_props = get_block_props(branch_dir)
+ merge_metadata = logs[opts["source-url"]].merge_metadata()
+ block_metadata = logs[opts["source-url"]].block_metadata()
+ for start,end in minimal_merge_intervals(revs, phantom_revs):
+ if not record_only:
+ # Preset merge/blocked properties to the source value at
+ # the start rev to avoid spurious property conflicts
+ set_merge_props(branch_dir, merge_metadata.get(start - 1))
+ set_block_props(branch_dir, block_metadata.get(start - 1))
+ # Do the merge
+ svn_command("merge --force -r %d:%d %s %s" % \
+ (start - 1, end, opts["source-url"], branch_dir))
+ # TODO: to support graph merging, add logic to merge the property
+ # meta-data manually
+
+ # Update the set of merged revisions.
+ merged_revs = merged_revs | revs | reflected_revs | phantom_revs | initialized_revs
+ branch_props[opts["source-pathid"]] = str(merged_revs)
+ set_merge_props(branch_dir, branch_props)
+ # Reset the blocked revs
+ set_block_props(branch_dir, old_block_props)
+
+ # Write out commit message if desired
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ if record_only:
+ print >>f, 'Recorded merge of revisions %s via %s from ' % \
+ (revs, NAME)
+ else:
+ print >>f, 'Merged revisions %s via %s from ' % \
+ (revs, NAME)
+ print >>f, '%s' % opts["source-url"]
+ if opts["commit-verbose"]:
+ print >>f
+ print >>f, construct_merged_log_message(opts["source-url"], revs),
+
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+def action_block(branch_dir, branch_props):
+ """Block revisions."""
+ # Check branch directory is ready for being modified
+ check_dir_clean(branch_dir)
+
+ source_revs, phantom_revs, reflected_revs, initialized_revs = \
+ analyze_source_revs(branch_dir, opts["source-url"])
+ revs_to_block = source_revs - opts["merged-revs"]
+
+ # Limit to revisions specified by -r (if any)
+ if opts["revision"]:
+ revs_to_block = RevisionSet(opts["revision"]) & revs_to_block
+
+ if not revs_to_block:
+ error('no available revisions to block')
+
+ # Change blocked information
+ blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
+ blocked_revs = blocked_revs | revs_to_block
+ set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
+
+ # Write out commit message if desired
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ print >>f, 'Blocked revisions %s via %s' % (revs_to_block, NAME)
+ if opts["commit-verbose"]:
+ print >>f
+ print >>f, construct_merged_log_message(opts["source-url"],
+ revs_to_block),
+
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+def action_unblock(branch_dir, branch_props):
+ """Unblock revisions."""
+ # Check branch directory is ready for being modified
+ check_dir_clean(branch_dir)
+
+ blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"])
+ revs_to_unblock = blocked_revs
+
+ # Limit to revisions specified by -r (if any)
+ if opts["revision"]:
+ revs_to_unblock = revs_to_unblock & RevisionSet(opts["revision"])
+
+ if not revs_to_unblock:
+ error('no available revisions to unblock')
+
+ # Change blocked information
+ blocked_revs = blocked_revs - revs_to_unblock
+ set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs)
+
+ # Write out commit message if desired
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ print >>f, 'Unblocked revisions %s via %s' % (revs_to_unblock, NAME)
+ if opts["commit-verbose"]:
+ print >>f
+ print >>f, construct_merged_log_message(opts["source-url"],
+ revs_to_unblock),
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+def action_rollback(branch_dir, branch_props):
+ """Rollback previously integrated revisions."""
+
+ # Make sure the revision arguments are present
+ if not opts["revision"]:
+ error("The '-r' option is mandatory for rollback")
+
+ # Check branch directory is ready for being modified
+ check_dir_clean(branch_dir)
+
+ # Extract the integration info for the branch_dir
+ branch_props = get_merge_props(branch_dir)
+ # Get the list of all revisions already merged into this source-pathid.
+ merged_revs = merge_props_to_revision_set(branch_props,
+ opts["source-pathid"])
+
+ # At which revision was the src created?
+ oldest_src_rev = get_created_rev(opts["source-url"])
+ src_pre_exist_range = RevisionSet("1-%d" % oldest_src_rev)
+
+ # Limit to revisions specified by -r (if any)
+ revs = merged_revs & RevisionSet(opts["revision"])
+
+ # make sure there's some revision to rollback
+ if not revs:
+ report("Nothing to rollback in revision range r%s" % opts["revision"])
+ return
+
+ # If even one specified revision lies outside the lifetime of the
+ # merge source, error out.
+ if revs & src_pre_exist_range:
+ err_str = "Specified revision range falls out of the rollback range.\n"
+ err_str += "%s was created at r%d" % (opts["source-pathid"],
+ oldest_src_rev)
+ error(err_str)
+
+ record_only = opts["record-only"]
+
+ if record_only:
+ report('recording rollback of revision(s) %s from "%s"' %
+ (revs, opts["source-url"]))
+ else:
+ report('rollback of revision(s) %s from "%s"' %
+ (revs, opts["source-url"]))
+
+ # Do the reverse merge(s). Note: the starting revision number
+ # to 'svn merge' is NOT inclusive so we have to subtract one from start.
+ # We try to keep the number of merge operations as low as possible,
+ # because it is faster and reduces the number of conflicts.
+ rollback_intervals = minimal_merge_intervals(revs, [])
+ # rollback in the reverse order of merge
+ rollback_intervals.reverse()
+ for start, end in rollback_intervals:
+ if not record_only:
+ # Do the merge
+ svn_command("merge --force -r %d:%d %s %s" % \
+ (end, start - 1, opts["source-url"], branch_dir))
+
+ # Write out commit message if desired
+ # calculate the phantom revs first
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ if record_only:
+ print >>f, 'Recorded rollback of revisions %s via %s from ' % \
+ (revs , NAME)
+ else:
+ print >>f, 'Rolled back revisions %s via %s from ' % \
+ (revs , NAME)
+ print >>f, '%s' % opts["source-url"]
+
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+ # Update the set of merged revisions.
+ merged_revs = merged_revs - revs
+ branch_props[opts["source-pathid"]] = str(merged_revs)
+ set_merge_props(branch_dir, branch_props)
+
+def action_uninit(branch_dir, branch_props):
+ """Uninit SOURCE URL."""
+ # Check branch directory is ready for being modified
+ check_dir_clean(branch_dir)
+
+ # If the source-pathid does not have an entry in the svnmerge-integrated
+ # property, simply error out.
+ if not branch_props.has_key(opts["source-pathid"]):
+ error('Repository-relative path "%s" does not contain merge '
+ 'tracking information for "%s"' \
+ % (opts["source-pathid"], branch_dir))
+
+ del branch_props[opts["source-pathid"]]
+
+ # Set merge property with the selected source deleted
+ set_merge_props(branch_dir, branch_props)
+
+ # Set blocked revisions for the selected source to None
+ set_blocked_revs(branch_dir, opts["source-pathid"], None)
+
+ # Write out commit message if desired
+ if opts["commit-file"]:
+ f = open(opts["commit-file"], "w")
+ print >>f, 'Removed merge tracking for "%s" for ' % NAME
+ print >>f, '%s' % opts["source-url"]
+ f.close()
+ report('wrote commit message to "%s"' % opts["commit-file"])
+
+###############################################################################
+# Command line parsing -- options and commands management
+###############################################################################
+
+class OptBase:
+ def __init__(self, *args, **kwargs):
+ self.help = kwargs["help"]
+ del kwargs["help"]
+ self.lflags = []
+ self.sflags = []
+ for a in args:
+ if a.startswith("--"): self.lflags.append(a)
+ elif a.startswith("-"): self.sflags.append(a)
+ else:
+ raise TypeError, "invalid flag name: %s" % a
+ if kwargs.has_key("dest"):
+ self.dest = kwargs["dest"]
+ del kwargs["dest"]
+ else:
+ if not self.lflags:
+ raise TypeError, "cannot deduce dest name without long options"
+ self.dest = self.lflags[0][2:]
+ if kwargs:
+ raise TypeError, "invalid keyword arguments: %r" % kwargs.keys()
+ def repr_flags(self):
+ f = self.sflags + self.lflags
+ r = f[0]
+ for fl in f[1:]:
+ r += " [%s]" % fl
+ return r
+
+class Option(OptBase):
+ def __init__(self, *args, **kwargs):
+ self.default = kwargs.setdefault("default", 0)
+ del kwargs["default"]
+ self.value = kwargs.setdefault("value", None)
+ del kwargs["value"]
+ OptBase.__init__(self, *args, **kwargs)
+ def apply(self, state, value):
+ assert value == ""
+ if self.value is not None:
+ state[self.dest] = self.value
+ else:
+ state[self.dest] += 1
+
+class OptionArg(OptBase):
+ def __init__(self, *args, **kwargs):
+ self.default = kwargs["default"]
+ del kwargs["default"]
+ self.metavar = kwargs.setdefault("metavar", None)
+ del kwargs["metavar"]
+ OptBase.__init__(self, *args, **kwargs)
+
+ if self.metavar is None:
+ if self.dest is not None:
+ self.metavar = self.dest.upper()
+ else:
+ self.metavar = "arg"
+ if self.default:
+ self.help += " (default: %s)" % self.default
+ def apply(self, state, value):
+ assert value is not None
+ state[self.dest] = value
+ def repr_flags(self):
+ r = OptBase.repr_flags(self)
+ return r + " " + self.metavar
+
+class CommandOpts:
+ class Cmd:
+ def __init__(self, *args):
+ self.name, self.func, self.usage, self.help, self.opts = args
+ def short_help(self):
+ return self.help.split(".")[0]
+ def __str__(self):
+ return self.name
+ def __call__(self, *args, **kwargs):
+ return self.func(*args, **kwargs)
+
+ def __init__(self, global_opts, common_opts, command_table, version=None):
+ self.progname = NAME
+ self.version = version.replace("%prog", self.progname)
+ self.cwidth = console_width() - 2
+ self.ctable = command_table.copy()
+ self.gopts = global_opts[:]
+ self.copts = common_opts[:]
+ self._add_builtins()
+ for k in self.ctable.keys():
+ cmd = self.Cmd(k, *self.ctable[k])
+ opts = []
+ for o in cmd.opts:
+ if isinstance(o, types.StringType) or \
+ isinstance(o, types.UnicodeType):
+ o = self._find_common(o)
+ opts.append(o)
+ cmd.opts = opts
+ self.ctable[k] = cmd
+
+ def _add_builtins(self):
+ self.gopts.append(
+ Option("-h", "--help", help="show help for this command and exit"))
+ if self.version is not None:
+ self.gopts.append(
+ Option("-V", "--version", help="show version info and exit"))
+ self.ctable["help"] = (self._cmd_help,
+ "help [COMMAND]",
+ "Display help for a specific command. If COMMAND is omitted, "
+ "display brief command description.",
+ [])
+
+ def _cmd_help(self, cmd=None, *args):
+ if args:
+ self.error("wrong number of arguments", "help")
+ if cmd is not None:
+ cmd = self._command(cmd)
+ self.print_command_help(cmd)
+ else:
+ self.print_command_list()
+
+ def _paragraph(self, text, width=78):
+ chunks = re.split("\s+", text.strip())
+ chunks.reverse()
+ lines = []
+ while chunks:
+ L = chunks.pop()
+ while chunks and len(L) + len(chunks[-1]) + 1 <= width:
+ L += " " + chunks.pop()
+ lines.append(L)
+ return lines
+
+ def _paragraphs(self, text, *args, **kwargs):
+ pars = text.split("\n\n")
+ lines = self._paragraph(pars[0], *args, **kwargs)
+ for p in pars[1:]:
+ lines.append("")
+ lines.extend(self._paragraph(p, *args, **kwargs))
+ return lines
+
+ def _print_wrapped(self, text, indent=0):
+ text = self._paragraphs(text, self.cwidth - indent)
+ print text.pop(0)
+ for t in text:
+ print " " * indent + t
+
+ def _find_common(self, fl):
+ for o in self.copts:
+ if fl in o.lflags+o.sflags:
+ return o
+ assert False, fl
+
+ def _compute_flags(self, opts, check_conflicts=True):
+ back = {}
+ sfl = ""
+ lfl = []
+ for o in opts:
+ sapp = lapp = ""
+ if isinstance(o, OptionArg):
+ sapp, lapp = ":", "="
+ for s in o.sflags:
+ if check_conflicts and back.has_key(s):
+ raise RuntimeError, "option conflict: %s" % s
+ back[s] = o
+ sfl += s[1:] + sapp
+ for l in o.lflags:
+ if check_conflicts and back.has_key(l):
+ raise RuntimeError, "option conflict: %s" % l
+ back[l] = o
+ lfl.append(l[2:] + lapp)
+ return sfl, lfl, back
+
+ def _extract_command(self, args):
+ """
+ Try to extract the command name from the argument list. This is
+ non-trivial because we want to allow command-specific options even
+ before the command itself.
+ """
+ opts = self.gopts[:]
+ for cmd in self.ctable.values():
+ opts.extend(cmd.opts)
+ sfl, lfl, _ = self._compute_flags(opts, check_conflicts=False)
+
+ lopts,largs = getopt.getopt(args, sfl, lfl)
+ if not largs:
+ return None
+ return self._command(largs[0])
+
+ def _fancy_getopt(self, args, opts, state=None):
+ if state is None:
+ state= {}
+ for o in opts:
+ if not state.has_key(o.dest):
+ state[o.dest] = o.default
+
+ sfl, lfl, back = self._compute_flags(opts)
+ try:
+ lopts,args = getopt.gnu_getopt(args, sfl, lfl)
+ except AttributeError:
+ # Before Python 2.3, there was no gnu_getopt support.
+ # So we can't parse intermixed positional arguments
+ # and options.
+ lopts,args = getopt.getopt(args, sfl, lfl)
+
+ for o,v in lopts:
+ back[o].apply(state, v)
+ return state, args
+
+ def _command(self, cmd):
+ if not self.ctable.has_key(cmd):
+ self.error("unknown command: '%s'" % cmd)
+ return self.ctable[cmd]
+
+ def parse(self, args):
+ if not args:
+ self.print_small_help()
+ sys.exit(0)
+
+ cmd = None
+ try:
+ cmd = self._extract_command(args)
+ opts = self.gopts[:]
+ if cmd:
+ opts.extend(cmd.opts)
+ args.remove(cmd.name)
+ state, args = self._fancy_getopt(args, opts)
+ except getopt.GetoptError, e:
+ self.error(e, cmd)
+
+ # Handle builtins
+ if self.version is not None and state["version"]:
+ self.print_version()
+ sys.exit(0)
+ if state["help"]: # special case for --help
+ if cmd:
+ self.print_command_help(cmd)
+ sys.exit(0)
+ cmd = self.ctable["help"]
+ else:
+ if cmd is None:
+ self.error("command argument required")
+ if str(cmd) == "help":
+ cmd(*args)
+ sys.exit(0)
+ return cmd, args, state
+
+ def error(self, s, cmd=None):
+ print >>sys.stderr, "%s: %s" % (self.progname, s)
+ if cmd is not None:
+ self.print_command_help(cmd)
+ else:
+ self.print_small_help()
+ sys.exit(1)
+ def print_small_help(self):
+ print "Type '%s help' for usage" % self.progname
+ def print_usage_line(self):
+ print "usage: %s <subcommand> [options...] [args...]\n" % self.progname
+ def print_command_list(self):
+ print "Available commands (use '%s help COMMAND' for more details):\n" \
+ % self.progname
+ cmds = self.ctable.keys()
+ cmds.sort()
+ indent = max(map(len, cmds))
+ for c in cmds:
+ h = self.ctable[c].short_help()
+ print " %-*s " % (indent, c),
+ self._print_wrapped(h, indent+6)
+ def print_command_help(self, cmd):
+ cmd = self.ctable[str(cmd)]
+ print 'usage: %s %s\n' % (self.progname, cmd.usage)
+ self._print_wrapped(cmd.help)
+ def print_opts(opts, self=self):
+ if not opts: return
+ flags = [o.repr_flags() for o in opts]
+ indent = max(map(len, flags))
+ for f,o in zip(flags, opts):
+ print " %-*s :" % (indent, f),
+ self._print_wrapped(o.help, indent+5)
+ print '\nCommand options:'
+ print_opts(cmd.opts)
+ print '\nGlobal options:'
+ print_opts(self.gopts)
+
+ def print_version(self):
+ print self.version
+
+###############################################################################
+# Options and Commands description
+###############################################################################
+
+global_opts = [
+ Option("-F", "--force",
+ help="force operation even if the working copy is not clean, or "
+ "there are pending updates"),
+ Option("-n", "--dry-run",
+ help="don't actually change anything, just pretend; "
+ "implies --show-changes"),
+ Option("-s", "--show-changes",
+ help="show subversion commands that make changes"),
+ Option("-v", "--verbose",
+ help="verbose mode: output more information about progress"),
+ OptionArg("-u", "--username",
+ default=None,
+ help="invoke subversion commands with the supplied username"),
+ OptionArg("-p", "--password",
+ default=None,
+ help="invoke subversion commands with the supplied password"),
+ OptionArg("-c", "--config-dir", metavar="DIR",
+ default=None,
+ help="cause subversion commands to consult runtime config directory DIR"),
+]
+
+common_opts = [
+ Option("-b", "--bidirectional",
+ value=True,
+ default=False,
+ help="remove reflected and initialized revisions from merge candidates. "
+ "Not required but may be specified to speed things up slightly"),
+ OptionArg("-f", "--commit-file", metavar="FILE",
+ default="svnmerge-commit-message.txt",
+ help="set the name of the file where the suggested log message "
+ "is written to"),
+ Option("-M", "--record-only",
+ value=True,
+ default=False,
+ help="do not perform an actual merge of the changes, yet record "
+ "that a merge happened"),
+ OptionArg("-r", "--revision",
+ metavar="REVLIST",
+ default="",
+ help="specify a revision list, consisting of revision numbers "
+ 'and ranges separated by commas, e.g., "534,537-539,540"'),
+ OptionArg("-S", "--source", "--head",
+ default=None,
+ help="specify a merge source for this branch. It can be either "
+ "a working directory path, a full URL, or an unambiguous "
+ "substring of one of the locations for which merge tracking was "
+ "already initialized. Needed only to disambiguate in case of "
+ "multiple merge sources"),
+]
+
+command_table = {
+ "init": (action_init,
+ "init [OPTION...] [SOURCE]",
+ """Initialize merge tracking from SOURCE on the current working
+ directory.
+
+ If SOURCE is specified, all the revisions in SOURCE are marked as already
+ merged; if this is not correct, you can use --revision to specify the
+ exact list of already-merged revisions.
+
+ If SOURCE is omitted, then it is computed from the "svn cp" history of the
+ current working directory (searching back for the branch point); in this
+ case, %s assumes that no revision has been integrated yet since
+ the branch point (unless you teach it with --revision).""" % NAME,
+ [
+ "-f", "-r", # import common opts
+ OptionArg("-L", "--location-type",
+ dest="location-type",
+ default="path",
+ help="Use this type of location identifier in the new " +
+ "Subversion properties; 'uuid', 'url', or 'path' " +
+ "(default)"),
+ ]),
+
+ "avail": (action_avail,
+ "avail [OPTION...] [PATH]",
+ """Show unmerged revisions available for PATH as a revision list.
+ If --revision is given, the revisions shown will be limited to those
+ also specified in the option.
+
+ When svnmerge is used to bidirectionally merge changes between a
+ branch and its source, it is necessary to not merge the same changes
+ forth and back: e.g., if you committed a merge of a certain
+ revision of the branch into the source, you do not want that commit
+ to appear as available to merged into the branch (as the code
+ originated in the branch itself!). svnmerge will automatically
+ exclude these so-called "reflected" revisions.""",
+ [
+ Option("-A", "--all",
+ dest="avail-showwhat",
+ value=["blocked", "avail"],
+ default=["avail"],
+ help="show both available and blocked revisions (aka ignore "
+ "blocked revisions)"),
+ "-b",
+ Option("-B", "--blocked",
+ dest="avail-showwhat",
+ value=["blocked"],
+ help="show the blocked revision list (see '%s block')" % NAME),
+ Option("-d", "--diff",
+ dest="avail-display",
+ value="diffs",
+ default="revisions",
+ help="show corresponding diff instead of revision list"),
+ Option("--summarize",
+ dest="avail-display",
+ value="summarize",
+ help="show summarized diff instead of revision list"),
+ Option("-l", "--log",
+ dest="avail-display",
+ value="logs",
+ help="show corresponding log history instead of revision list"),
+ "-r",
+ "-S",
+ ]),
+
+ "integrated": (action_integrated,
+ "integrated [OPTION...] [PATH]",
+ """Show merged revisions available for PATH as a revision list.
+ If --revision is given, the revisions shown will be limited to
+ those also specified in the option.""",
+ [
+ Option("-d", "--diff",
+ dest="integrated-display",
+ value="diffs",
+ default="revisions",
+ help="show corresponding diff instead of revision list"),
+ Option("-l", "--log",
+ dest="integrated-display",
+ value="logs",
+ help="show corresponding log history instead of revision list"),
+ "-r",
+ "-S",
+ ]),
+
+ "rollback": (action_rollback,
+ "rollback [OPTION...] [PATH]",
+ """Rollback previously merged in revisions from PATH. The
+ --revision option is mandatory, and specifies which revisions
+ will be rolled back. Only the previously integrated merges
+ will be rolled back.
+
+ When manually rolling back changes, --record-only can be used to
+ instruct %s that a manual rollback of a certain revision
+ already happened, so that it can record it and offer that
+ revision for merge henceforth.""" % (NAME),
+ [
+ "-f", "-r", "-S", "-M", # import common opts
+ ]),
+
+ "merge": (action_merge,
+ "merge [OPTION...] [PATH]",
+ """Merge in revisions into PATH from its source. If --revision is omitted,
+ all the available revisions will be merged. In any case, already merged-in
+ revisions will NOT be merged again.
+
+ When svnmerge is used to bidirectionally merge changes between a
+ branch and its source, it is necessary to not merge the same changes
+ forth and back: e.g., if you committed a merge of a certain
+ revision of the branch into the source, you do not want that commit
+ to appear as available to merged into the branch (as the code
+ originated in the branch itself!). svnmerge will automatically
+ exclude these so-called "reflected" revisions.
+
+ When manually merging changes across branches, --record-only can
+ be used to instruct %s that a manual merge of a certain revision
+ already happened, so that it can record it and not offer that
+ revision for merge anymore. Conversely, when there are revisions
+ which should not be merged, use '%s block'.""" % (NAME, NAME),
+ [
+ "-b", "-f", "-r", "-S", "-M", # import common opts
+ ]),
+
+ "block": (action_block,
+ "block [OPTION...] [PATH]",
+ """Block revisions within PATH so that they disappear from the available
+ list. This is useful to hide revisions which will not be integrated.
+ If --revision is omitted, it defaults to all the available revisions.
+
+ Do not use this option to hide revisions that were manually merged
+ into the branch. Instead, use '%s merge --record-only', which
+ records that a merge happened (as opposed to a merge which should
+ not happen).""" % NAME,
+ [
+ "-f", "-r", "-S", # import common opts
+ ]),
+
+ "unblock": (action_unblock,
+ "unblock [OPTION...] [PATH]",
+ """Revert the effect of '%s block'. If --revision is omitted, all the
+ blocked revisions are unblocked""" % NAME,
+ [
+ "-f", "-r", "-S", # import common opts
+ ]),
+
+ "uninit": (action_uninit,
+ "uninit [OPTION...] [PATH]",
+ """Remove merge tracking information from PATH. It cleans any kind of merge
+ tracking information (including the list of blocked revisions). If there
+ are multiple sources, use --source to indicate which source you want to
+ forget about.""",
+ [
+ "-f", "-S", # import common opts
+ ]),
+}
+
+
+def main(args):
+ global opts
+
+ # Initialize default options
+ opts = default_opts.copy()
+ logs.clear()
+
+ optsparser = CommandOpts(global_opts, common_opts, command_table,
+ version="%%prog r%s\n modified: %s\n\n"
+ "Copyright (C) 2004,2005 Awarix Inc.\n"
+ "Copyright (C) 2005, Giovanni Bajo"
+ % (__revision__, __date__))
+
+ cmd, args, state = optsparser.parse(args)
+ opts.update(state)
+
+ source = opts.get("source", None)
+ branch_dir = "."
+
+ if str(cmd) == "init":
+ if len(args) == 1:
+ source = args[0]
+ elif len(args) > 1:
+ optsparser.error("wrong number of parameters", cmd)
+ elif str(cmd) in command_table.keys():
+ if len(args) == 1:
+ branch_dir = args[0]
+ elif len(args) > 1:
+ optsparser.error("wrong number of parameters", cmd)
+ else:
+ assert False, "command not handled: %s" % cmd
+
+ # Validate branch_dir
+ if not is_wc(branch_dir):
+ if str(cmd) == "avail":
+ info = None
+ # it should be noted here that svn info does not error exit
+ # if an invalid target is specified to it (as is
+ # intuitive). so the try, except code is not absolutely
+ # necessary. but, I retain it to indicate the intuitive
+ # handling.
+ try:
+ info = get_svninfo(branch_dir)
+ except LaunchError:
+ pass
+ # test that we definitely targeted a subversion directory,
+ # mirroring the purpose of the earlier is_wc() call
+ if info is None or not info.has_key("Node Kind") or info["Node Kind"] != "directory":
+ error('"%s" is neither a valid URL, nor a working directory' % branch_dir)
+ else:
+ error('"%s" is not a subversion working directory' % branch_dir)
+
+ # give out some hints as to potential pathids
+ PathIdentifier.hint(branch_dir)
+ if source: PathIdentifier.hint(source)
+
+ # Extract the integration info for the branch_dir
+ branch_props = get_merge_props(branch_dir)
+
+ # Calculate source_url and source_path
+ report("calculate source path for the branch")
+ if not source:
+ if str(cmd) == "init":
+ cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(branch_dir)
+ if not cf_source:
+ error('no copyfrom info available. '
+ 'Explicit source argument (-S/--source) required.')
+ opts["source-url"] = get_repo_root(branch_dir) + cf_source
+ opts["source-pathid"] = PathIdentifier.from_target(opts["source-url"])
+
+ if not opts["revision"]:
+ opts["revision"] = "1-" + cf_rev
+ else:
+ opts["source-pathid"] = get_default_source(branch_dir, branch_props)
+ opts["source-url"] = opts["source-pathid"].get_url()
+
+ assert is_pathid(opts["source-pathid"])
+ assert is_url(opts["source-url"])
+ else:
+ # The source was given as a command line argument and is stored in
+ # SOURCE. Ensure that the specified source does not end in a /,
+ # otherwise it's easy to have the same source path listed more
+ # than once in the integrated version properties, with and without
+ # trailing /'s.
+ source = rstrip(source, "/")
+ if not is_wc(source) and not is_url(source):
+ # Check if it is a substring of a pathid recorded
+ # within the branch properties.
+ found = []
+ for pathid in branch_props.keys():
+ if pathid.match_substring(source):
+ found.append(pathid)
+ if len(found) == 1:
+ # (assumes pathid is a repository-relative-path)
+ source_pathid = found[0]
+ source = source_pathid.get_url()
+ else:
+ error('"%s" is neither a valid URL, nor an unambiguous '
+ 'substring of a repository path, nor a working directory'
+ % source)
+ else:
+ source_pathid = PathIdentifier.from_target(source)
+
+ source_pathid = PathIdentifier.from_target(source)
+ if str(cmd) == "init" and \
+ source_pathid == PathIdentifier.from_target("."):
+ error("cannot init integration source path '%s'\n"
+ "Its repository-relative path must differ from the "
+ "repository-relative path of the current directory."
+ % source_pathid)
+ opts["source-pathid"] = source_pathid
+ opts["source-url"] = target_to_url(source)
+
+ # Sanity check source_url
+ assert is_url(opts["source-url"])
+ # SVN does not support non-normalized URL (and we should not
+ # have created them)
+ assert opts["source-url"].find("/..") < 0
+
+ report('source is "%s"' % opts["source-url"])
+
+ # Get previously merged revisions (except when command is init)
+ if str(cmd) != "init":
+ opts["merged-revs"] = merge_props_to_revision_set(branch_props,
+ opts["source-pathid"])
+
+ # Perform the action
+ cmd(branch_dir, branch_props)
+
+
+if __name__ == "__main__":
+ try:
+ main(sys.argv[1:])
+ except LaunchError, (ret, cmd, out):
+ err_msg = "command execution failed (exit code: %d)\n" % ret
+ err_msg += cmd + "\n"
+ err_msg += "".join(out)
+ error(err_msg)
+ except KeyboardInterrupt:
+ # Avoid traceback on CTRL+C
+ print "aborted by user"
+ sys.exit(1)
diff --git a/staging/subversion/svnserve b/staging/subversion/svnserve
new file mode 100755
index 000000000..670fee742
--- /dev/null
+++ b/staging/subversion/svnserve
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+. /etc/rc.conf
+. /etc/rc.d/functions
+. /etc/conf.d/svnserve
+
+PID=`pidof -o %PPID /usr/bin/svnserve`
+case "$1" in
+ start)
+ stat_busy "Starting svnserve"
+ if [ -z "$PID" ]; then
+ if [ -n "$SVNSERVE_USER" ]; then
+ su -s '/bin/sh' $SVNSERVE_USER -c "/usr/bin/svnserve -d $SVNSERVE_ARGS" &
+ else
+ /usr/bin/svnserve -d $SVNSERVE_ARGS &
+ fi
+ fi
+ if [ ! -z "$PID" -o $? -gt 0 ]; then
+ stat_fail
+ else
+ add_daemon svnserve
+ stat_done
+ fi
+ ;;
+ stop)
+ stat_busy "Stopping svnserve"
+ [ ! -z "$PID" ] && kill $PID &> /dev/null
+ if [ $? -gt 0 ]; then
+ stat_fail
+ else
+ rm_daemon svnserve
+ stat_done
+ fi
+ ;;
+ restart)
+ $0 stop
+ sleep 1
+ $0 start
+ ;;
+ *)
+ echo "usage: $0 {start|stop|restart}"
+esac
diff --git a/staging/subversion/svnserve.conf b/staging/subversion/svnserve.conf
new file mode 100644
index 000000000..37fb7ea10
--- /dev/null
+++ b/staging/subversion/svnserve.conf
@@ -0,0 +1,7 @@
+#
+# Parameters to be passed to svnserve
+#
+#SVNSERVE_ARGS="-r /path/to/some/repos"
+SVNSERVE_ARGS=""
+
+#SVNSERVE_USER="svn"
diff --git a/testing/blender/PKGBUILD b/testing/blender/PKGBUILD
index 152dc77bf..68688bc51 100644
--- a/testing/blender/PKGBUILD
+++ b/testing/blender/PKGBUILD
@@ -1,18 +1,20 @@
-# $Id: PKGBUILD 119990 2011-04-18 02:49:47Z eric $
+# $Id: PKGBUILD 121081 2011-04-29 00:16:23Z eric $
# Contributor: John Sowiak <john@archlinux.org>
# Maintainer: tobias <tobias@archlinux.org>
# Apparently, the blender guys refuse to release source tarballs for
# intermediate releases that deal mainly with binaries but incorporate tiny
-# minor changes from cvs. Since I'm sick and tired of the urges of users that
-# look for release numbers only we make a messy PKGBUILD that can checkout cvs
-# tags if necessary.
-#_cvs=true
-_cvs=false
+# minor changes from svn. Since I'm sick and tired of the urges of users that
+# look for release numbers only we make a messy PKGBUILD that can checkout svn
+# release if necessary.
+
+_svn=true
+#_svn=false
pkgname=blender
-pkgver=2.57
-pkgrel=1
+pkgver=2.57b
+pkgrel=2
+epoch=1
pkgdesc="A fully integrated 3D graphics creation suite"
arch=('i686' 'x86_64')
license=('GPL')
@@ -20,30 +22,36 @@ url="http://www.blender.org"
depends=('libjpeg' 'libpng' 'openjpeg' 'libtiff' 'openexr' 'sdl' 'python' 'desktop-file-utils' \
'shared-mime-info' 'hicolor-icon-theme' 'xdg-utils' 'gettext' 'libxi' 'libxmu' 'mesa' \
'freetype2' 'openal' 'libsndfile' 'libsamplerate' 'ffmpeg')
-makedepends=('cmake' 'cvs')
+makedepends=('cmake') # 'subversion'
install=blender.install
-if [ ! $_cvs -o $_cvs = false ]; then
- makedepends=('cmake')
+if [ $_svn = false ]; then
source=(http://download.blender.org/source/$pkgname-$pkgver.tar.gz)
md5sums=('e6cb5523323a71c070051e6fe6efe497')
+else
+ source=(ftp://ftp.archlinux.org/other/${pkgname}/$pkgname-$pkgver.tar.xz)
+ md5sums=('59e344d4ec48750a86e4ef079c3acbfc')
fi
-build() {
- if [ $_cvs = true ]; then
- cd "$srcdir/"
- #cvs -d:pserver:anonymous@cvs.blender.org:/cvsroot/bf-blender login
- cvs -z3 -d:pserver:anonymous@cvs.blender.org:/cvsroot/bf-blender checkout $pkgname
- _cvsrel=$pkgname-$(echo $pkgver | sed 's/\./-/')-release
- cvs -d:pserver:anonymous@cvs.blender.org:/cvsroot/bf-blender update -r $_cvsrel
- find $pkgname/{bin,release/scripts} -name 'CVS' -exec rm -rf {} \; 2> /dev/null
- cd $pkgname
- else
- cd "$srcdir/$pkgname-$pkgver"
- fi
+# source PKGBUILD && mksource
+mksource() {
+ _svnver=36339
+ _svntrunk="https://svn.blender.org/svnroot/bf-blender/trunk/blender"
+ _svnmod="$pkgname-$pkgver"
+ mkdir ${pkgname}-$pkgver
+ pushd ${pkgname}-$pkgver
+ svn co $_svntrunk --config-dir ./ -r $_svnver $_svnmod
+ find . -depth -type d -name .svn -exec rm -rf {} \;
+ tar -cJf ../${pkgname}-$pkgver.tar.xz ${pkgname}-$pkgver/*
+ popd
+}
+build() {
+ cd "$srcdir/$pkgname-$pkgver"
mkdir build
cd build
+ [[ $CARCH == i686 ]] && ENABLESSE2="-DSUPPORT_SSE2_BUILD:BOOL=OFF"
+
cmake .. \
-DCMAKE_INSTALL_PREFIX:PATH=/usr \
-DCMAKE_BUILD_TYPE:STRING=Release \
@@ -55,27 +63,27 @@ build() {
-DPYTHON_VERSION:STRING=3.2 \
-DPYTHON_LIBPATH:STRING=/usr/lib \
-DPYTHON_LIBRARY:STRING=python3.2mu \
- -DPYTHON_INCLUDE_DIRS:STRING=/usr/include/python3.2mu
+ -DPYTHON_INCLUDE_DIRS:STRING=/usr/include/python3.2mu \
+ $ENABLESSE2
make $MAKEFLAGS
+
+ cp -rf "$srcdir"/${pkgname}-$pkgver/release/plugins/* \
+ "$srcdir"/${pkgname}-$pkgver/source/blender/blenpluginapi/
+ cd "$srcdir"/${pkgname}-$pkgver/source/blender/blenpluginapi
+ chmod 755 bmake
+ make
}
package() {
- if [ $_cvs = true ]; then
- cd "$srcdir/$pkgname"
- else
- cd "$srcdir/$pkgname-$pkgver"
- fi
-
- cd build
+ cd "$srcdir/$pkgname-$pkgver/build"
make DESTDIR="${pkgdir}" install
python -m compileall "${pkgdir}/usr/share/blender"
- for i in 128 16 192 22 32 48 64 96 ; do
- install -D -m644 ../release/freedesktop/icons/${i}x${i}/blender.png \
- "${pkgdir}/usr/share/icons/hicolor/${i}x${i}/apps/blender.png"
- done
-
- install -D -m644 ../release/freedesktop/icons/scalable/blender.svg \
- "${pkgdir}/usr/share/icons/hicolor/scalable/apps/blender.svg"
+# install plugins
+ install -d -m755 "$pkgdir"/usr/share/blender/${pkgver%[a-z]}/plugins/{sequence,texture}
+ cp "$srcdir"/${pkgname}-$pkgver/source/blender/blenpluginapi/sequence/*.so \
+ "$pkgdir"/usr/share/blender/${pkgver%[a-z]}/plugins/sequence/
+ cp "$srcdir"/${pkgname}-$pkgver/source/blender/blenpluginapi/texture/*.so \
+ "$pkgdir"/usr/share/blender/${pkgver%[a-z]}/plugins/texture/
}
diff --git a/testing/gnome-desktop-sharp/PKGBUILD b/testing/gnome-desktop-sharp/PKGBUILD
new file mode 100644
index 000000000..0ca1779fb
--- /dev/null
+++ b/testing/gnome-desktop-sharp/PKGBUILD
@@ -0,0 +1,35 @@
+# $Id: PKGBUILD 121043 2011-04-28 09:15:35Z heftig $
+# Maintainer: Jan de Groot <jgc@archlinux.org>
+
+pkgname=gnome-desktop-sharp
+pkgver=2.26.0
+pkgrel=6
+pkgdesc="GNOME desktop bindings for C#"
+arch=('i686' 'x86_64')
+license=(LGPL)
+url="http://gtk-sharp.sourceforge.net"
+depends=('gnome-sharp' 'gnome-desktop2' 'vte' 'librsvg' 'libwnck' 'gtkhtml' 'gtksourceview2')
+makedepends=('monodoc' 'libgnomeprintui')
+optdepends=('libgnomeprintui')
+options=('!libtool')
+source=(http://ftp.gnome.org/pub/gnome/sources/$pkgname/${pkgver%.*}/$pkgname-$pkgver.tar.bz2
+ gnome-desktop-sharp-lib-target.patch)
+md5sums=('4bc990900bb318b2ba0b0e7998bb47d1'
+ '980cd3adf5e745f4caee0a172a51dcc3')
+
+build() {
+ # get rid of that .wapi errors; thanks to brice
+ export MONO_SHARED_DIR="$srcdir/wapi"
+ mkdir -p "$MONO_SHARED_DIR"
+
+ cd "$srcdir/$pkgname-$pkgver"
+ patch -Np1 -i $srcdir/gnome-desktop-sharp-lib-target.patch
+ ./configure --prefix=/usr --sysconfdir=/etc
+ make
+}
+
+package() {
+ cd "$srcdir/$pkgname-$pkgver"
+ make GACUTIL="/usr/bin/gacutil /root ${pkgdir}/usr/lib" \
+ DESTDIR="$pkgdir" install
+}
diff --git a/testing/gnome-desktop-sharp/gnome-desktop-sharp-lib-target.patch b/testing/gnome-desktop-sharp/gnome-desktop-sharp-lib-target.patch
new file mode 100644
index 000000000..3f0ea4933
--- /dev/null
+++ b/testing/gnome-desktop-sharp/gnome-desktop-sharp-lib-target.patch
@@ -0,0 +1,9 @@
+--- gnome-desktop-sharp-2.26.0/gnomedesktop/gnomedesktop-sharp.dll.config.in.false 2009-02-23 19:01:27.000000000 +0100
++++ gnome-desktop-sharp-2.26.0/gnomedesktop/gnomedesktop-sharp.dll.config.in 2010-02-11 15:59:36.000000000 +0100
+@@ -3,5 +3,5 @@
+ <dllmap dll="libgobject-2.0-0.dll" target="libgobject-2.0@LIB_PREFIX@.0@LIB_SUFFIX@"/>
+ <dllmap dll="libgthread-2.0-0.dll" target="libgthread-2.0@LIB_PREFIX@.0@LIB_SUFFIX@"/>
+ <dllmap dll="libgtk-win32-2.0-0.dll" target="libgtk-x11-2.0@LIB_PREFIX@.0@LIB_SUFFIX@"/>
+- <dllmap dll="gnome-desktop-2" target="libgnome-desktop-2@LIB_PREFIX@.11@LIB_SUFFIX@"/>
++ <dllmap dll="gnome-desktop-2" target="libgnome-desktop-2@LIB_PREFIX@.17@LIB_SUFFIX@"/>
+ </configuration>
diff --git a/testing/gnome-desktop2/PKGBUILD b/testing/gnome-desktop2/PKGBUILD
new file mode 100644
index 000000000..3829d54f7
--- /dev/null
+++ b/testing/gnome-desktop2/PKGBUILD
@@ -0,0 +1,32 @@
+# $Id: PKGBUILD 121039 2011-04-28 07:31:32Z heftig $
+# Maintainer: Jan "heftig" Steffens <jan.steffens@gmail.com>
+# Contributor: Jan de Groot <jan@archlinux.org>
+
+_pkgname=gnome-desktop
+pkgname=${_pkgname}2
+pkgver=2.32.1
+pkgrel=1
+pkgdesc="The GNOME Desktop"
+arch=(i686 x86_64)
+license=(GPL LGPL)
+depends=(gconf gtk2 startup-notification)
+makedepends=(gnome-doc-utils intltool)
+url="http://www.gnome.org"
+options=(!libtool !emptydirs)
+source=(http://ftp.gnome.org/pub/gnome/sources/$_pkgname/${pkgver%.*}/$_pkgname-$pkgver.tar.bz2)
+sha256sums=('55cbecf67efe1fa1e57ac966520a7c46d799c8ba3c652a1219f60cafccb3739d')
+
+build() {
+ cd "$srcdir/$_pkgname-$pkgver"
+ ./configure --prefix=/usr --sysconfdir=/etc \
+ --localstatedir=/var --disable-static \
+ --with-gnome-distributor="Archlinux" \
+ --disable-scrollkeeper \
+ --disable-gnome-about --disable-desktop-docs
+ make
+}
+
+package() {
+ cd "$srcdir/$_pkgname-$pkgver"
+ make DESTDIR="$pkgdir" install
+}