diff options
author | root <root@rshg047.dnsready.net> | 2011-06-02 22:47:10 +0000 |
---|---|---|
committer | root <root@rshg047.dnsready.net> | 2011-06-02 22:47:10 +0000 |
commit | 2d8c4c44185a682290ccde4d23132ae3acf01678 (patch) | |
tree | c23dfddada868c72eb548f8159b7fd0b1179e02c | |
parent | e7b9c9697e6a50c3b9e78941fa95ba11c716d238 (diff) |
Thu Jun 2 22:47:10 UTC 2011
88 files changed, 8073 insertions, 318 deletions
diff --git a/community/balsa/PKGBUILD b/community/balsa/PKGBUILD index 1f69914b0..5ca276e96 100644 --- a/community/balsa/PKGBUILD +++ b/community/balsa/PKGBUILD @@ -1,16 +1,16 @@ -# $Id: PKGBUILD 48198 2011-05-29 16:56:14Z bfanella $ +# $Id: PKGBUILD 48556 2011-06-02 02:30:07Z bfanella $ # Maintainer : Ionut Biru <ibiru@archlinux.org> # Maintainer: Brad Fanella <bradfanella@archlinux.us> # Contributor: Roman Kyrylych <roman@archlinux.org> pkgname=balsa pkgver=2.4.9 -pkgrel=3 +pkgrel=4 pkgdesc="An e-mail client for GNOME" arch=('i686' 'x86_64') license=('GPL') url='http://pawsa.fedorapeople.org/balsa/' -depends=('libgssglue' 'gmime' 'libwebkit' 'libesmtp' 'libnotify' 'gpgme' 'gtksourceview2' 'gtkspell' 'gnome-icon-theme') +depends=('gmime' 'libwebkit' 'libesmtp' 'libnotify' 'gpgme' 'gtksourceview2' 'gtkspell' 'gnome-icon-theme') makedepends=('perlxml' 'gnome-doc-utils' 'intltool' 'namcap') install=balsa.install source=(http://pawsa.fedorapeople.org/${pkgname}/${pkgname}-${pkgver}.tar.bz2) diff --git a/community/cegui/PKGBUILD b/community/cegui/PKGBUILD index 9b5c83ac2..c84ee1806 100644 --- a/community/cegui/PKGBUILD +++ b/community/cegui/PKGBUILD @@ -1,39 +1,32 @@ -# $Id: PKGBUILD 37913 2011-01-19 01:10:11Z lcarlier $ -# Maintainer: +# $Id: PKGBUILD 48542 2011-06-01 17:05:37Z stephane $ +# Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Juergen Hoetzel <juergen@archlinux.org> # Contributor: William Rea <sillywilly@gmail.com>, # Contributor: Bjorn Lindeijer <bjorn@lindeijer.nl> pkgname=cegui pkgver=0.7.5 -pkgrel=1 +pkgrel=4 pkgdesc="A free library providing windowing and widgets for graphics APIs/engines" -arch=(i686 x86_64) +arch=('i686' 'x86_64') url="http://crayzedsgui.sourceforge.net" #options=('!libtool') license=("MIT") depends=('pcre' 'glew' 'expat' 'freetype2' 'libxml2' 'devil' 'freeglut' 'lua' 'silly') -makedepends=('python2') +makedepends=('python2' 'doxygen') source=(http://downloads.sourceforge.net/crayzedsgui/CEGUI-$pkgver.tar.gz) md5sums=('38c79d1fdfaaa10f481c99a2ac479516') build() { cd $srcdir/CEGUI-${pkgver} - -# old configure from cegui-0.6.X, to remove later! -# ./configure --prefix=/usr \ -# --sysconfdir=/etc \ -# --enable-devil \ -# --enable-silly \ -# --disable-xerces-c \ -# --with-default-image-codec=SILLYImageCodec \ -# --enable-lua + + sed -i '1i#include <cstddef>' cegui/include/CEGUIString.h ./configure --prefix=/usr \ --sysconfdir=/etc \ --disable-xerces-c \ --enable-null-renderer -make + make } package() { @@ -41,6 +34,10 @@ package() { make DESTDIR=${pkgdir} install - install -D -m644 doc/COPYING ${pkgdir}/usr/share/licenses/$pkgname/LICENSE + #build docs + cd doc/doxygen && doxygen + cd .. && make DESTDIR=${pkgdir} install-html + + install -Dm644 COPYING ${pkgdir}/usr/share/licenses/$pkgname/LICENSE } diff --git a/community/clementine/PKGBUILD b/community/clementine/PKGBUILD index 4d0965bed..c9e9763d4 100644 --- a/community/clementine/PKGBUILD +++ b/community/clementine/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 46607 2011-05-09 23:12:17Z stephane $ +# $Id: PKGBUILD 48537 2011-06-01 16:56:54Z stephane $ #Maintainer: Stéphane Gaudreault <stephane@archlinux.org> #Contributor: BlackEagle < ike DOT devolder AT gmail DOT com > #Contributor: Dany Martineau <dany.luc.martineau@gmail.com> pkgname=clementine pkgver=0.7.1 -pkgrel=3 +pkgrel=5 pkgdesc="A music player and library organizer" url="http://www.clementine-player.org/" license=('GPL') diff --git a/community/csfml/PKGBUILD b/community/csfml/PKGBUILD new file mode 100644 index 000000000..d0920b18d --- /dev/null +++ b/community/csfml/PKGBUILD @@ -0,0 +1,60 @@ +# $Id: PKGBUILD 47932 2011-05-26 06:40:32Z svenstaro $ +# Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> + +pkgname=csfml + +_git=true + +if [[ "${_git}" = "true" ]]; then + pkgver=1.99.git20110526 +fi + +pkgrel=1 +pkgdesc='C bindings for sfml' +arch=('i686' 'x86_64') +url='http://www.sfml-dev.org/' +license=('zlib') +depends=('sfml') +makedepends=('git' 'cmake' 'doxygen') + +_gitroot='https://github.com/LaurentGomila/SFML.git' +_gitname='SFML' + +build() { + cd "$srcdir" + msg "Connecting to GIT server...." + + if [ -d $_gitname ] ; then + cd $_gitname && git pull origin + msg "The local files are updated." + else + git clone $_gitroot + cd $_gitname + fi + + msg "GIT checkout done or server timeout" + msg "Starting make..." + + rm -rf "$srcdir/$_gitname-build" + cp -r "$srcdir/$_gitname" "$srcdir/$_gitname-build" + cd "$srcdir/$_gitname-build" + + cd bindings/c/ + mkdir build && cd build + cmake -DCMAKE_INSTALL_PREFIX=/usr .. \ + -DBUILD_DOC=true + make + make doc +} + +package() { + cd "$srcdir/$_gitname-build/bindings/c/build/" + + make DESTDIR="$pkgdir/" install + + install -Dm644 ../license.txt \ + ${pkgdir}/usr/share/licenses/${pkgname}/LICENSE + + make clean +} + diff --git a/community/frogatto/PKGBUILD b/community/frogatto/PKGBUILD index fb3b6553d..434ddd9fb 100644 --- a/community/frogatto/PKGBUILD +++ b/community/frogatto/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 47675 2011-05-24 19:05:16Z svenstaro $ +# $Id: PKGBUILD 47897 2011-05-26 00:34:36Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Tom Wambold <tom5760@gmail.com> pkgname='frogatto' arch=('i686' 'x86_64') pkgver=1.1 -pkgrel=2 +pkgrel=3 pkgdesc="An old-school 2d platformer game, starring a certain quixotic frog" url="http://www.frogatto.com" license=('GPL') diff --git a/community/gambas3/PKGBUILD b/community/gambas3/PKGBUILD index 5a4b5206d..45dee93f7 100644 --- a/community/gambas3/PKGBUILD +++ b/community/gambas3/PKGBUILD @@ -12,7 +12,7 @@ pkgname=('gambas3-runtime' 'gambas3-devel' 'gambas3-ide' 'gambas3-script' 'gamba 'gambas3-gb-report' 'gambas3-gb-sdl' 'gambas3-gb-sdl-sound' 'gambas3-gb-settings' 'gambas3-gb-signal' 'gambas3-gb-v4l' 'gambas3-gb-vb' 'gambas3-gb-xml' 'gambas3-gb-xml-rpc' 'gambas3-gb-xml-xslt' 'gambas3-gb-web') pkgver=2.99.1 -pkgrel=2 +pkgrel=3 pkgdesc="A free development environment based on a Basic interpreter." arch=('i686' 'x86_64') url="http://gambas.sourceforge.net/" diff --git a/community/gnome-packagekit/PKGBUILD b/community/gnome-packagekit/PKGBUILD index 20c933ec7..390f39a73 100644 --- a/community/gnome-packagekit/PKGBUILD +++ b/community/gnome-packagekit/PKGBUILD @@ -1,8 +1,8 @@ -# $Id: PKGBUILD 48001 2011-05-26 23:17:43Z jconder $ +# $Id: PKGBUILD 48532 2011-06-01 14:10:40Z jconder $ # Maintainer: Jonathan Conder <jonno.conder@gmail.com> pkgname=gnome-packagekit -pkgver=3.0.2 +pkgver=3.0.3 pkgrel=1 pkgdesc='Collection of graphical tools for PackageKit to be used in the GNOME desktop' arch=('i686' 'x86_64') @@ -10,13 +10,13 @@ url='http://www.packagekit.org/' license=('GPL') depends=('dconf' 'desktop-file-utils' 'gnome-menus' 'gtk3' 'hicolor-icon-theme' 'libnotify' 'packagekit') -makedepends=('gconf' 'gtk-doc' 'gnome-common' 'intltool' 'libcanberra' 'upower') +makedepends=('gconf' 'gtk-doc' 'intltool' 'libcanberra' 'upower') optdepends=('gnome-settings-daemon-updates: update and message notifications') options=('!emptydirs' '!libtool') install="$pkgname.install" -source=("http://ftp.gnome.org/pub/gnome/sources/$pkgname/${pkgver%.*}/$pkgname-$pkgver.tar.bz2" +source=("http://ftp.gnome.org/pub/gnome/sources/$pkgname/${pkgver%.*}/$pkgname-$pkgver.tar.xz" 'arch.patch') -sha256sums=('faaae52d839a66d0bcb7d7cf97ef62d11bccf558e6aa9fdb0dbc8d61dbaf714f' +sha256sums=('3042bb008548f70a98ff4d90da86ad7ddbdd2ec03c37daaf87b8e6341a03d211' '9c809ac3c8bbf870442e7bc4123c70b144930a287b28626be1b8bae95edf71ac') build() { @@ -26,8 +26,7 @@ build() { sed -i 's@python@python2@' 'python/enum-convertor.py' export PYTHON=/usr/bin/python2 - # TODO: remove gnome-common dependency and switch back to configure - gnome-autogen.sh --prefix=/usr \ + ./configure --prefix=/usr \ --sysconfdir=/etc \ --disable-gtk-doc \ --disable-schemas-compile \ diff --git a/community/gource/PKGBUILD b/community/gource/PKGBUILD index eaceceace..2c357a634 100644 --- a/community/gource/PKGBUILD +++ b/community/gource/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 47420 2011-05-23 01:24:33Z svenstaro $ +# $Id: PKGBUILD 47914 2011-05-26 01:27:25Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Jose Valecillos <valecillosjg (at) gmail (dot) com> # Contributor: Olivier Ramonat <olivier at ramonat dot fr> pkgname=gource pkgver=0.34 -pkgrel=1 +pkgrel=2 pkgdesc="software version control visualization" license=(GPL3) arch=(i686 x86_64) diff --git a/community/lightspark/PKGBUILD b/community/lightspark/PKGBUILD index 328e44a6a..d583b36bb 100644 --- a/community/lightspark/PKGBUILD +++ b/community/lightspark/PKGBUILD @@ -3,7 +3,7 @@ pkgname=lightspark pkgver=0.4.8.1 -pkgrel=1 +pkgrel=2 pkgdesc='An alternative Flash Player for Linux.' arch=('i686' 'x86_64') url='http://lightspark.sourceforge.net' diff --git a/community/performous/PKGBUILD b/community/performous/PKGBUILD index 16518d9c9..01aab85d1 100644 --- a/community/performous/PKGBUILD +++ b/community/performous/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 47641 2011-05-24 14:35:06Z tdziedzic $ +# $Id: PKGBUILD 47952 2011-05-26 11:21:56Z lcarlier $ # Maintainer : Laurent Carlier <lordheavym@gmail.com> # Contributor: Christoph Zeiler <archNOSPAM_at_moonblade.dot.org> pkgname=performous pkgver=0.6.1 -pkgrel=5 +pkgrel=6 pkgdesc='A free game like "Singstar", "Rockband" or "Stepmania"' arch=('i686' 'x86_64') url="http://performous.org/" @@ -24,7 +24,7 @@ build() { # fix config loading with libxml++ export LDFLAGS=${LDFLAGS/-Wl,--as-needed/} - # fix to built against boost 1.46 + # fix to built against boost 1.46, upstream (git) now support v3 export CXXFLAGS="${CXXFLAGS} -DBOOST_FILESYSTEM_VERSION=2" cmake -DCMAKE_BUILD_TYPE=Release \ diff --git a/community/perl-digest-md5/PKGBUILD b/community/perl-digest-md5/PKGBUILD index e813d9ea1..bb6d0bbd5 100644 --- a/community/perl-digest-md5/PKGBUILD +++ b/community/perl-digest-md5/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 35628 2010-12-21 23:03:54Z tdziedzic $ +# $Id: PKGBUILD 48551 2011-06-01 19:52:59Z spupykin $ # Maintainer: Sergej Pupykin <pupykin.s+arch@gmail.com> # Contributor: Ashok `ScriptDevil` Gautham <ScriptDevil@gmail.com> pkgname=perl-digest-md5 -pkgver=2.40 -pkgrel=2 +pkgver=2.51 +pkgrel=1 pkgdesc="Digest::MD5::Perl - Perl implementation of Ron Rivests MD5 Algorithm" arch=('i686' 'x86_64') url="http://search.cpan.org/dist/Digest-MD5" @@ -12,16 +12,18 @@ license=('GPL' 'PerlArtistic') depends=('perl>=5.10.0') options=(!emptydirs) source=(http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/Digest-MD5-$pkgver.tar.gz) -md5sums=('97051183c4ff7012bdeaf55881164f4b') +md5sums=('73967d50b9d19990a1d609fe2b1e36c3') build() { cd "$srcdir/Digest-MD5-$pkgver" - # install module in vendor directories. PERL_MM_USE_DEFAULT=1 perl Makefile.PL INSTALLDIRS=vendor make - make install DESTDIR="$pkgdir/" +} +package() { + cd "$srcdir/Digest-MD5-$pkgver" + make install DESTDIR="$pkgdir/" # remove perllocal.pod and .packlist find "$pkgdir" -name perllocal.pod -delete find "$pkgdir" -name .packlist -delete diff --git a/community/rawtherapee/PKGBUILD b/community/rawtherapee/PKGBUILD index 63813738d..1de4c8f15 100644 --- a/community/rawtherapee/PKGBUILD +++ b/community/rawtherapee/PKGBUILD @@ -1,17 +1,17 @@ -# $Id: PKGBUILD 36068 2010-12-26 11:58:33Z stativ $ +# $Id: PKGBUILD 48519 2011-06-01 11:08:54Z stativ $ # Maintainer: Lukas Jirkovsky <l.jirkovsky AT gmail.com> # Contributor: Bogdan Szczurek <thebodzio(at)gmail.com> # Contributor: Vaclav Kramar <vaclav.kramar@tiscali.cz> # Contributor: Archie <mymaud@gmail.com> pkgname=rawtherapee -pkgver=3.0a1_742 +pkgver=3.1m5 pkgrel=1 pkgdesc="RAW photo editor" arch=('i686' 'x86_64') url="http://www.rawtherapee.com/" license=('GPL') -depends=('libsigc++' 'gtkmm' 'libiptcdata' 'lcms') +depends=('bzip2' 'libsigc++' 'gtkmm' 'libiptcdata' 'lcms2' 'desktop-file-utils') makedepends=('mercurial' 'cmake') install=rawtherapee.install source=(rawtherapee.desktop rawtherapee.png) @@ -20,8 +20,8 @@ md5sums=('83f14b57b27f066705b0aec7ad56c53d' _root="https://rawtherapee.googlecode.com/hg/" _repo="rawtherapee" -_branch="branch_3.0" -_changeset="71045bfba97e" # changeset $pkgver +#_branch="branch_3.0" +#_changeset="71045bfba97e" # changeset $pkgver build() { cd "$srcdir" @@ -30,10 +30,10 @@ build() { if [ -d $_repo ] ; then cd $_repo hg pull - hg up -r $_changeset + hg up "Dev-$pkgver" msg "The local files are updated." else - hg clone -b $_branch -u $_changeset $_root $_repo + hg clone -u "Dev-$pkgver" $_root $_repo fi msg "Mercurial checkout done or server timeout" @@ -45,7 +45,10 @@ build() { mkdir "$srcdir/$_repo-build" cd "$srcdir/$_repo-build" - cmake -DCMAKE_INSTALL_PREFIX=/usr ../$_repo + cmake -DCMAKE_INSTALL_PREFIX=/usr \ + -DCMAKE_BUILD_TYPE=Release \ + -DWITH_MYFILE_MMAP=OFF \ + ../$_repo make } diff --git a/community/rawtherapee/rawtherapee.install b/community/rawtherapee/rawtherapee.install index ccf3323ae..6162b34dd 100644 --- a/community/rawtherapee/rawtherapee.install +++ b/community/rawtherapee/rawtherapee.install @@ -1,24 +1,21 @@ -post_upgrade() { - -if [ ${2%%_*} == "3.0a1" ]; then - TMP=${2##*_} - TMP=${TMP%%-*} +post_install() { + echo "update desktop mime database..." + update-desktop-database -q + update-mime-database usr/share/mime 1>&2 > /dev/null +} - if [ $TMP -lt 316 ]; then - echo "Profile extension has changed from .pp2 to .pp3" - echo -e "since rawtherapee-3.0a1_54-1\n" - echo "If you used rawtherapee-3.0a1_54-1 or any earlier 3.0 version" - echo "you have to change the extension manually.\n" - echo "NOTE: pp2 profiles from rawtherapee 2.4 and older" - echo "are not compatible." - fi +post_upgrade() { + post_install $1 - if [ $TMP -lt 589 ]; then + if [ ${2%%_*} == "3.0a1" ]; then echo "There were lots of changes to the behavior of the majority of tools." echo "This may cause your files to look differently than before." fi -fi +} +post_remove() { + post_install $1 } + # vim:set ts=2 sw=2 et: diff --git a/community/root/PKGBUILD b/community/root/PKGBUILD index 2345ec8c1..4f46232cf 100644 --- a/community/root/PKGBUILD +++ b/community/root/PKGBUILD @@ -3,7 +3,7 @@ pkgname=root pkgver=5.28.00d -pkgrel=1 +pkgrel=2 pkgdesc='C++ data analysis framework and interpreter from CERN.' arch=('i686' 'x86_64') url='http://root.cern.ch' diff --git a/community/rss-glx/PKGBUILD b/community/rss-glx/PKGBUILD index 84479622c..37c440cf4 100644 --- a/community/rss-glx/PKGBUILD +++ b/community/rss-glx/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 20029 2010-07-01 19:03:57Z ibiru $ +# $Id: PKGBUILD 48037 2011-05-27 16:21:40Z spupykin $ # Maintainer : Ionut Biru <ibiru@archlinux.org> # Contributor: Corrado 'bardo' Primier <corrado.primier@mail.polimi.it> # Contributor: Tate "Tatey" Johnson <tatey86@tpg.com.au> pkgname=rss-glx pkgver=0.9.1 -pkgrel=5 +pkgrel=6 pkgdesc="The Really Slick Screensavers port to GLX" arch=('i686' 'x86_64') url="http://rss-glx.sourceforge.net/" @@ -33,7 +33,7 @@ build() { make CFLAGS="$CFLAGS -I/usr/include/ImageMagick" make DESTDIR=${pkgdir} install -#FS#18300 + # FS#18300 install -d ${pkgdir}/usr/lib/xscreensaver/ list=$(ls ${pkgdir}/usr/bin --ignore rss-glx_install.pl) for i in $list; do @@ -43,5 +43,3 @@ build() { install -d ${pkgdir}/usr/share/applications/screensavers install -m644 ${srcdir}/${pkgname}-desktops/*.desktop ${pkgdir}/usr/share/applications/screensavers } - -# vim:set ts=2 sw=2 et: diff --git a/community/rss-glx/rss-glx.install b/community/rss-glx/rss-glx.install index 3ffe7103c..758642618 100644 --- a/community/rss-glx/rss-glx.install +++ b/community/rss-glx/rss-glx.install @@ -10,4 +10,3 @@ EOM post_remove() { update-desktop-database -q } - diff --git a/community/sfml/PKGBUILD b/community/sfml/PKGBUILD index 00f1d39c9..806e373d4 100644 --- a/community/sfml/PKGBUILD +++ b/community/sfml/PKGBUILD @@ -1,96 +1,60 @@ -# $Id: PKGBUILD 23743 2010-08-16 10:49:18Z svenstaro $ +# $Id: PKGBUILD 47923 2011-05-26 03:12:35Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> -# Contributor: scj <scj archlinux us> +# Contributor: Ondrej Martinak <omartinak@gmail.com> pkgname=sfml -pkgver=1.6 -pkgrel=3 -pkgdesc="A simple, fast, cross-platform and object-oriented multimedia API." -arch=('i686' 'x86_64') -url="http://www.sfml-dev.org" -license=('custom:zlib') -depends=('libxrandr' 'libsndfile' 'openal' 'glew' 'libjpeg' 'libpng' 'soil' 'zlib' 'freetype2') -optdepends=('qt: build qt-sample' 'wxgtk: build wxwidgets-sample') -install=sfml.install -source=(http://downloads.sourceforge.net/${pkgname}/SFML-${pkgver}-sdk-linux-64.tar.gz - use-system-libs.patch) -md5sums=('7a9b3a1ef6d14cd25090e440ccdbb3a8' - '505ea908fb6e4b9359061d8d55373963') -build() { - cd "${srcdir}/SFML-${pkgver}" +_git=true - # apply patch to use system libs in favor of included ones (fixes many problems) - patch -Np1 < ../use-system-libs.patch +if [[ "${_git}" = "true" ]]; then + pkgver=1.99.git20110526 +fi - # compile sfml - make +pkgrel=1 +pkgdesc='A simple, fast, cross-platform, and object-oriented multimedia API' +arch=('i686' 'x86_64') +url='http://www.sfml-dev.org/' +license=('zlib') +depends=('libsndfile' 'libxrandr' 'libjpeg' 'openal' 'glew' 'freetype2') +makedepends=('git' 'mesa' 'cmake' 'doxygen') - # prepare samples - sed -e '/export LDFLAGS/d' -i samples/Makefile +_gitroot='https://github.com/LaurentGomila/SFML.git' +_gitname='SFML' - #check optional dependencies - if [ ! -e "/usr/bin/wx-config" ]; then - sed -e 's/wxwidgets-sample //' -i samples/Makefile - fi - if [ ! -e "/usr/include/QtGui" ]; then - sed -e 's/qt-sample //' -i samples/Makefile +build() { + cd "$srcdir" + msg "Connecting to GIT server...." + + if [ -d $_gitname ] ; then + cd $_gitname && git pull origin + msg "The local files are updated." + else + git clone $_gitroot + cd $_gitname fi - # fix some samples - sed -e 's|qt4/||g' -i samples/qt/Makefile - sed -e '/#include <iostream>/a\#include <stdlib.h>' -i \ - samples/sockets/Sockets.cpp \ - samples/voip/VoIP.cpp + msg "GIT checkout done or server timeout" + msg "Starting make..." - # fix the library softlinks for samples - cd lib - for lib in *; do - ln -sf $lib ${lib/.${pkgver}/} - done - cd .. + rm -rf "$srcdir/$_gitname-build" + cp -r "$srcdir/$_gitname" "$srcdir/$_gitname-build" + cd "$srcdir/$_gitname-build" - # compile samples - LDFLAGS="-L${srcdir}/SFML-${pkgver}/lib" make sfml-samples + mkdir build && cd build + cmake -DCMAKE_INSTALL_PREFIX=/usr .. \ + -DBUILD_DOC=true \ + -DBUILD_EXAMPLES=true + make + make doc } package() { - cd "${srcdir}/SFML-${pkgver}" + cd "$srcdir/$_gitname-build/build" + make DESTDIR="$pkgdir/" install - # prepare some dirs - mkdir -p ${pkgdir}/usr/lib \ - ${pkgdir}/usr/include \ - ${pkgdir}/usr/share/sfml \ - ${pkgdir}/usr/share/doc \ - ${pkgdir}/opt/SFML + install -Dm644 ../license.txt \ + ${pkgdir}/usr/share/licenses/${pkgname}/LICENSE - # install it - sed '/export DESTDIR/d' -i src/SFML/Makefile - make DESTDIR="${pkgdir}/usr" install - - # fix the library softlinks - again - cd ${pkgdir}/usr/lib - rm *.so - for lib in *; do - ln -s $lib ${lib/.${pkgver}/} - done - cd "${srcdir}/SFML-${pkgver}" - - # install samples - cp -r samples ${pkgdir}/opt/SFML/samples - - # install docs - cp -r doc ${pkgdir}/usr/share/doc/sfml - - # handy symlinks - cd ${pkgdir}/usr/share/sfml - ln -s ../../../opt/SFML/samples samples - ln -s ../doc/sfml docs - - # install license - install -Dm 644 ${srcdir}/SFML-${pkgver}/license.txt "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" - - # clean up - find "${pkgdir}/opt/SFML/samples" -name "*.o" -delete + make clean } -# vim:set ts=2 sw=2 et: + diff --git a/community/spring/PKGBUILD b/community/spring/PKGBUILD index 5430aace6..38ce61979 100644 --- a/community/spring/PKGBUILD +++ b/community/spring/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 47736 2011-05-25 04:13:58Z svenstaro $ +# $Id: PKGBUILD 47908 2011-05-26 01:20:52Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Arkham <arkham at archlinux dot us> # Contributor: Christoph Zeiler <archNOSPAM_at_moonblade.dot.org> pkgname=spring pkgver=0.82.7.1 -pkgrel=3 +pkgrel=4 pkgdesc='A free 3D real-time-strategy (RTS) game engine' arch=('i686' 'x86_64') url="http://springrts.com/" diff --git a/community/supertux/PKGBUILD b/community/supertux/PKGBUILD index 48159914c..dbcd690af 100644 --- a/community/supertux/PKGBUILD +++ b/community/supertux/PKGBUILD @@ -5,7 +5,7 @@ pkgname=supertux pkgver=0.3.3 -pkgrel=2 +pkgrel=3 pkgdesc="A classic 2D jump'n run sidescroller game in a style similar to the original SuperMario games" arch=('i686' 'x86_64') url="http://super-tux.sourceforge.net/" @@ -16,12 +16,15 @@ source=(http://download.berlios.de/supertux/${pkgname}-${pkgver}.tar.bz2) md5sums=('f3f803e629ee51a9de0b366a036e393d') build() { - cd ${srcdir}/${pkgname}-${pkgver} - cmake -D CMAKE_INSTALL_PREFIX=/usr -D INSTALL_SUBDIR_BIN=bin . - make || return 1 + cd ${srcdir}/${pkgname}-${pkgver} + + sed -i '1i#include <cstddef>' src/supertux/screen_manager.hpp + cmake -D CMAKE_INSTALL_PREFIX=/usr -D INSTALL_SUBDIR_BIN=bin . + make } package() { - cd ${srcdir}/${pkgname}-${pkgver} - make DESTDIR=${pkgdir} install || return 1 + cd ${srcdir}/${pkgname}-${pkgver} + + make DESTDIR=${pkgdir} install } diff --git a/community/vdrift/PKGBUILD b/community/vdrift/PKGBUILD index 39dae2cf6..41dab8d90 100644 --- a/community/vdrift/PKGBUILD +++ b/community/vdrift/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 27647 2010-09-26 17:36:38Z svenstaro $ +# $Id: PKGBUILD 47904 2011-05-26 00:55:30Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Anton Bazhenov <anton.bazhenov at gmail> # Contributor: Lone_Wolf lonewolf@xs4all.nl pkgname=vdrift pkgver=2010.06.30 -pkgrel=2 +pkgrel=3 pkgdesc="An open source driving simulation made with drift racing in mind" arch=('i686' 'x86_64') url="http://vdrift.net/" diff --git a/community/widelands/PKGBUILD b/community/widelands/PKGBUILD index e308f27bd..e2a882999 100644 --- a/community/widelands/PKGBUILD +++ b/community/widelands/PKGBUILD @@ -1,4 +1,4 @@ -# $Id: PKGBUILD 45158 2011-04-18 00:22:21Z svenstaro $ +# $Id: PKGBUILD 47900 2011-05-26 00:39:20Z svenstaro $ # Maintainer: Sven-Hendrik Haase <sh@lutzhaase.com> # Contributor: Arkham <arkham at archlinux dot us> # Contributor: Christoph Zeiler <rabyte*gmail> @@ -6,7 +6,7 @@ pkgname=widelands pkgver=16 _realver=build16 -pkgrel=1 +pkgrel=2 pkgdesc="A realtime strategy game with emphasis on economy and transport" arch=('i686' 'x86_64') url="http://widelands.org/" diff --git a/core/dnsutils/PKGBUILD b/core/dnsutils/PKGBUILD index 823df71de..12829b5d1 100644 --- a/core/dnsutils/PKGBUILD +++ b/core/dnsutils/PKGBUILD @@ -1,4 +1,4 @@ -# $Id: PKGBUILD 122965 2011-05-07 14:46:05Z bisson $ +# $Id: PKGBUILD 125626 2011-05-27 10:41:33Z bisson $ # Maintainer: Gaetan Bisson <bisson@archlinux.org> # Contributor: kevin <kevin@archlinux.org> # Contributor: mario <mario_vazq@hotmail.com> @@ -6,8 +6,8 @@ pkgname=dnsutils # Use a period and not a hyphen before the patch level for proper versioning. -pkgver=9.8.0.P1 -_pkgver=9.8.0-P1 +pkgver=9.8.0.P2 +_pkgver=9.8.0-P2 pkgrel=1 pkgdesc='Various DNS utilities - dig host nslookup nsupdate' @@ -19,21 +19,30 @@ replaces=('bind-tools' 'host') options=('!makeflags') source=("http://ftp.isc.org/isc/bind9/${_pkgver}/bind-${_pkgver}.tar.gz" 'tools-only.patch') -sha1sums=('aa8f308f218e437ac4bad616e0ae83a9b9c40c29' +sha1sums=('b3492ad11cfbf2939d9b0fb62c52c204de58679b' 'ac88c89e25c26d73095a3dd85e1ca1070d184ded') build() { cd "${srcdir}/bind-${_pkgver}" - patch -p1 < ../tools-only.patch + + patch -p1 -i ../tools-only.patch export STD_CDEFINES='-DDIG_SIGCHASE' + ./configure \ - --prefix=/usr --sysconfdir=/etc --localstatedir=/var \ - --with-openssl=yes --with-idn=yes --disable-linux-caps + --prefix=/usr \ + --sysconfdir=/etc \ + --localstatedir=/var \ + --with-openssl=yes \ + --with-idn=yes \ + --disable-linux-caps \ + make } package() { cd "${srcdir}/bind-${_pkgver}/bin" + make DESTDIR="${pkgdir}" install + install -Dm644 ../COPYRIGHT "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" } diff --git a/core/less/PKGBUILD b/core/less/PKGBUILD index 707787bbb..c26ddfb00 100644 --- a/core/less/PKGBUILD +++ b/core/less/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 119700 2011-04-13 21:50:08Z allan $ +# $Id: PKGBUILD 125524 2011-05-26 14:15:10Z stephane $ # Maintainer: Allan McRae <allan@archlinux.org> # Contributor: judd <jvinet@zeroflux.org> pkgname=less pkgver=443 -pkgrel=1 +pkgrel=2 pkgdesc="A terminal based program for viewing text files" license=('GPL3') arch=('i686' 'x86_64') @@ -17,14 +17,14 @@ md5sums=('47db098fb3cdaf847b3c4be05ee954fc') build() { cd "${srcdir}/${pkgname}-${pkgver}" - ./configure --prefix=/usr --with-regex=pcre + ./configure --prefix=/usr --sysconfdir=/etc --with-regex=pcre make } package() { cd "${srcdir}/${pkgname}-${pkgver}" - make prefix=${pkgdir}/usr install - install -dm755 ${pkgdir}/bin - mv ${pkgdir}/usr/bin/${pkgname} ${pkgdir}/bin + make prefix="${pkgdir}"/usr install + install -dm755 "${pkgdir}"/bin + mv "${pkgdir}"/usr/bin/${pkgname} "${pkgdir}"/bin } diff --git a/core/pkg-config/PKGBUILD b/core/pkg-config/PKGBUILD index b432af63e..95ed134c6 100644 --- a/core/pkg-config/PKGBUILD +++ b/core/pkg-config/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 102636 2010-12-09 12:15:21Z stephane $ +# $Id: PKGBUILD 124806 2011-05-24 14:39:31Z stephane $ # Maintainer: Allan McRae <allan@archlinux.org> # Committer: Judd Vinet <jvinet@zeroflux.org> pkgname=pkg-config -pkgver=0.25 -pkgrel=3 +pkgver=0.26 +pkgrel=1 pkgdesc="A system for managing library compile/link flags" arch=('i686' 'x86_64') url="http://pkgconfig.freedesktop.org/wiki/" @@ -14,20 +14,22 @@ depends=('glibc' 'popt' 'glib2') provides=("pkgconfig=${pkgver}") conflicts=('pkgconfig') replaces=('pkgconfig') -source=(http://pkgconfig.freedesktop.org/releases/${pkgname}-${pkgver}.tar.gz - autoconf-2.66.patch) -md5sums=('a3270bab3f4b69b7dc6dbdacbcae9745' - '0fbfc1a5088f792bbeec7b5090e449f1') +source=(http://pkgconfig.freedesktop.org/releases/${pkgname}-${pkgver}.tar.gz) +md5sums=('47525c26a9ba7ba14bf85e01509a7234') build() { cd "${srcdir}/${pkgname}-${pkgver}" - patch -Np1 -i $srcdir/autoconf-2.66.patch - # Use system popt and glib - ./configure --prefix=/usr --with-installed-glib --with-installed-popt + # Use system popt + ./configure --prefix=/usr --with-installed-popt make } +check() { + cd "${srcdir}/${pkgname}-${pkgver}" + make check +} + package() { cd "${srcdir}/${pkgname}-${pkgver}" make DESTDIR=${pkgdir} install diff --git a/core/sudo/PKGBUILD b/core/sudo/PKGBUILD index add4a025c..fee69d2c1 100644 --- a/core/sudo/PKGBUILD +++ b/core/sudo/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 120368 2011-04-22 22:01:28Z allan $ +# $Id: PKGBUILD 124889 2011-05-24 21:47:33Z eric $ # Maintainer: Allan McRae <allan@archlinux.org> # Contributor: Tom Newsom <Jeepster@gmx.co.uk> pkgname=sudo -_ver=1.8.1p1 +_ver=1.8.1p2 pkgver=${_ver/[a-z]/.${_ver//[0-9.]/}} pkgrel=1 pkgdesc="Give certain users the ability to run some commands as root" @@ -12,14 +12,14 @@ url="http://www.sudo.ws/sudo/" license=('custom') depends=('glibc' 'pam') backup=('etc/sudoers' 'etc/pam.d/sudo') +options=('!libtool' '!makeflags') source=(ftp://ftp.sudo.ws/pub/sudo/$pkgname-$_ver.tar.gz sudo.pam) -options=('!libtool' '!makeflags') -md5sums=('318337804d976c9419cf3004b707945c' +md5sums=('e8330f0e63b0ecb2e12b5c76922818cc' '4e7ad4ec8f2fe6a40e12bcb2c0b256e3') build() { - cd $srcdir/$pkgname-$_ver + cd "$srcdir/$pkgname-$_ver" ./configure --prefix=/usr --with-pam --libexecdir=/usr/lib \ --with-env-editor --with-all-insults --with-logfac=auth @@ -27,11 +27,11 @@ build() { } package() { - cd $srcdir/$pkgname-$_ver - install -dm755 $pkgdir/var/lib + cd "$srcdir/$pkgname-$_ver" + install -dm755 "$pkgdir/var/lib" - make DESTDIR=$pkgdir install - install -Dm644 $srcdir/sudo.pam $pkgdir/etc/pam.d/sudo + make DESTDIR="$pkgdir" install + install -Dm644 "$srcdir/sudo.pam" "$pkgdir/etc/pam.d/sudo" - install -Dm644 doc/LICENSE $pkgdir/usr/share/licenses/sudo/LICENSE + install -Dm644 doc/LICENSE "$pkgdir/usr/share/licenses/sudo/LICENSE" } diff --git a/extra/bind/PKGBUILD b/extra/bind/PKGBUILD index 5af689c41..8df2eb03d 100644 --- a/extra/bind/PKGBUILD +++ b/extra/bind/PKGBUILD @@ -1,4 +1,4 @@ -# $Id: PKGBUILD 124551 2011-05-22 22:52:52Z bisson $ +# $Id: PKGBUILD 125623 2011-05-27 08:34:32Z bisson $ # Maintainer: Gaetan Bisson <bisson@archlinux.org> # Contributor: judd <jvinet@zeroflux.org> # Contributor: Mario Vazquez <mario_vazq@hotmail.com> @@ -6,9 +6,9 @@ pkgname=bind # Use a period and not a hyphen before the patch level for proper versioning. -pkgver=9.8.0.P1 -_pkgver=9.8.0-P1 -pkgrel=3 +pkgver=9.8.0.P2 +_pkgver=9.8.0-P2 +pkgrel=1 pkgdesc='Berkeley Internet Name Daemon (BIND) is the reference implementation of the Domain Name System (DNS) protocols' arch=('i686' 'x86_64') @@ -30,10 +30,10 @@ source=("http://ftp.isc.org/isc/bind9/${_pkgver}/${pkgname}-${_pkgver}.tar.gz" 'named.logrotate' 'localhost.zone' '127.0.0.zone') -sha1sums=('aa8f308f218e437ac4bad616e0ae83a9b9c40c29' +sha1sums=('b3492ad11cfbf2939d9b0fb62c52c204de58679b' 'ee52947062c1582858187716b776afa3613608fb' - '2f737f4e81186447ac2ef370fa8dcea0b3abec31' - '5277cf4d6fbc5728c55b51c77c9347d28393fb7c' + 'b433ba99f23e3db305f8ce293fe4ce6d1b8d4cfb' + '17444f9d759a4bde7688bdaa304dac57a138e4c1' '46232e9db243c6c05e170a1781d7a7f413be5d03' '5ca7a5f2a132548a090a045a2df3acea6b35d9eb' '7848edbfb9a848843f57c11c02b0289eefd42d00' @@ -46,8 +46,8 @@ install=install build() { cd "${srcdir}/${pkgname}-${_pkgver}" - patch -p1 -i "${srcdir}"/so_bsdcompat.patch - patch -p1 -i "${srcdir}"/notools.patch + patch -p1 -i ../so_bsdcompat.patch + patch -p1 -i ../notools.patch ./configure \ --prefix=/usr \ diff --git a/extra/bind/notools.patch b/extra/bind/notools.patch index 2d16fdcd5..0ad083a01 100644 --- a/extra/bind/notools.patch +++ b/extra/bind/notools.patch @@ -1,5 +1,6 @@ ---- bind-9.7.0/bin/Makefile.in.orig 2010-03-14 21:19:23.000000000 -0400 -+++ bind-9.7.0/bin/Makefile.in 2010-03-14 21:19:37.000000000 -0400 +diff -aur old/bin/Makefile.in new/bin/Makefile.in +--- old/bin/Makefile.in 2011-05-27 10:12:59.392416094 +0200 ++++ new/bin/Makefile.in 2011-05-27 10:13:12.372521984 +0200 @@ -19,7 +19,7 @@ VPATH = @srcdir@ top_srcdir = @top_srcdir@ diff --git a/extra/bind/so_bsdcompat.patch b/extra/bind/so_bsdcompat.patch index cae2b835c..4d7031fcc 100644 --- a/extra/bind/so_bsdcompat.patch +++ b/extra/bind/so_bsdcompat.patch @@ -1,13 +1,12 @@ diff -aur old/lib/isc/unix/socket.c new/lib/isc/unix/socket.c ---- old/lib/isc/unix/socket.c 2010-03-12 04:25:20.000000000 +0100 -+++ new/lib/isc/unix/socket.c 2011-01-22 21:07:52.410000038 +0100 -@@ -681,6 +681,8 @@ - isc_sockstatscounter_fdwatchrecvfail - }; +--- old/lib/isc/unix/socket.c 2011-02-18 05:01:16.000000000 +0100 ++++ new/lib/isc/unix/socket.c 2011-05-27 10:12:01.685269374 +0200 +@@ -712,6 +712,8 @@ + } + #endif +#undef SO_BSDCOMPAT + static void - manager_log(isc__socketmgr_t *sockmgr, - isc_logcategory_t *category, isc_logmodule_t *module, int level, -Only in new/lib/isc/unix: socket.c.orig + socket_log(isc__socket_t *sock, isc_sockaddr_t *address, + isc_logcategory_t *category, isc_logmodule_t *module, int level, diff --git a/extra/enblend-enfuse/PKGBUILD b/extra/enblend-enfuse/PKGBUILD index 76fc7290d..219240f8c 100644 --- a/extra/enblend-enfuse/PKGBUILD +++ b/extra/enblend-enfuse/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 64417 2010-01-20 07:24:28Z eric $ +# $Id: PKGBUILD 126103 2011-06-01 14:26:46Z stephane $ # Maintainer: tobias <tobias@archlinux.org> # Contributor: Dominik Ryba <domryba@post.pl> pkgname=enblend-enfuse pkgver=4.0 -pkgrel=1 +pkgrel=2 pkgdesc="intelligent blend tool for overlapping picture" arch=("i686" "x86_64") license=('GPL') @@ -21,10 +21,14 @@ sha1sums=('34c3a5ce11c6ef0ef520d8a15a3cb6a94a567033' 'eae6cf48ea082865130302d0b4 build() { cd "${srcdir}/${pkgname}-${pkgver}-753b534c819d" - patch -p1 < ../libpng-1.4.patch || return 1 - ./configure --prefix=/usr || return 1 - make || return 1 - make DESTDIR="${pkgdir}" install || return 1 - install -D -m644 doc/enblend.info "${pkgdir}/usr/share/info/enblend.info" || return 1 - install -D -m644 doc/enfuse.info "${pkgdir}/usr/share/info/enfuse.info" || return 1 + patch -Np1 -i ../libpng-1.4.patch + ./configure --prefix=/usr + make +} + +package() { + cd "${srcdir}/${pkgname}-${pkgver}-753b534c819d" + make DESTDIR="${pkgdir}" install + install -D -m644 doc/enblend.info "${pkgdir}/usr/share/info/enblend.info" + install -D -m644 doc/enfuse.info "${pkgdir}/usr/share/info/enfuse.info" } diff --git a/extra/glew/PKGBUILD b/extra/glew/PKGBUILD index baee8c467..6270e8c8f 100644 --- a/extra/glew/PKGBUILD +++ b/extra/glew/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 108690 2011-02-02 16:13:23Z stephane $ +# $Id: PKGBUILD 125318 2011-05-25 20:28:27Z stephane $ # Maintainer: Stéphane Gaudreault <stephane@archlinux.org> # Contributor: SleepyDog pkgname=glew -pkgver=1.5.8 +pkgver=1.6.0 pkgrel=1 pkgdesc="The OpenGL Extension Wrangler Library" arch=('i686' 'x86_64') @@ -11,7 +11,7 @@ url="http://glew.sourceforge.net" license=('BSD' 'MIT' 'GPL') depends=('libxmu' 'libxi' 'mesa') source=(http://downloads.sourceforge.net/${pkgname}/${pkgname}-${pkgver}.tgz) -sha1sums=('450946935faa20ac4950cb42ff025be2c1f7c22e') +sha1sums=('ed555d15d0f01239b262c4cf803e97d60d8a18b6') build() { cd "${srcdir}/${pkgname}-${pkgver}" @@ -21,7 +21,9 @@ build() { package() { cd "${srcdir}/${pkgname}-${pkgver}" - make GLEW_DEST="${pkgdir}/usr" install + make GLEW_DEST="${pkgdir}/usr" install.all install -D -m644 LICENSE.txt "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" - chmod 0755 "${pkgdir}/usr/lib/libGLEW.so.${pkgver}" + + rm "${pkgdir}"/usr/lib/{libGLEW,libGLEWmx}.a + chmod 0755 "${pkgdir}"/usr/lib/libGLEW*.so.${pkgver} } diff --git a/extra/hugin/PKGBUILD b/extra/hugin/PKGBUILD index 736aeb71b..d09e302dd 100644 --- a/extra/hugin/PKGBUILD +++ b/extra/hugin/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 124912 2011-05-25 06:37:46Z eric $ +# $Id: PKGBUILD 125347 2011-05-26 02:15:23Z eric $ # Maintainer: Tobias Kieslich <tobias@archlinux.org> # Contributor: Giovanni Scafora <giovanni@archlinux.org> # Contributor: Dominik Ryba <domryba@post.pl> pkgname=hugin pkgver=2010.4.0 -pkgrel=3 +pkgrel=4 pkgdesc="A frontend to the panorama-tools" arch=('i686' 'x86_64') url="http://hugin.sourceforge.net/" diff --git a/extra/koffice/PKGBUILD b/extra/koffice/PKGBUILD index 66e08bc5c..1884dd92c 100644 --- a/extra/koffice/PKGBUILD +++ b/extra/koffice/PKGBUILD @@ -1,5 +1,6 @@ -# $Id: PKGBUILD 115172 2011-03-17 10:46:47Z andrea $ +# $Id: PKGBUILD 125678 2011-05-28 00:58:39Z andrea $ # Maintainer: Ronald van Haren <ronald.archlinux.org> +# Contributor: Andrea Scarpino <andrea@archlinux.org> # Contributor: BlackEagle < ike DOT devolder AT gmail DOT com > pkgbase=koffice @@ -36,23 +37,25 @@ pkgname=( # 'koffice-kdgantt' # still an empty package ) pkgver=2.3.3 -pkgrel=3 +pkgrel=4 arch=('i686' 'x86_64') url='http://koffice.kde.org' license=('GPL' 'LGPL' 'FDL') -makedepends=('pkg-config' 'cmake' 'automoc4' 'boost' 'eigen' \ - 'kdelibs' 'gsl' 'qca' 'lcms' 'glew' 'qimageblitz' \ - 'kdepimlibs' 'pstoedit' 'poppler-qt' 'libwpd' 'libwpg' 'openexr' 'opengtl' 'kdegraphics-libs' 'sqlite3' 'docbook-xml' 'docbook-xsl') +makedepends=('pkg-config' 'cmake' 'automoc4' 'boost' 'eigen' 'gsl' 'lcms' + 'glew' 'qimageblitz' 'kdepimlibs' 'pstoedit' 'poppler-qt' 'libwpd' + 'libwpg' 'opengtl' 'kdegraphics-libs') groups=('koffice') -source=("ftp://ftp.kde.org/pub/kde/stable/${pkgbase}-${pkgver}/${pkgbase}-${pkgver}.tar.bz2" - "kde4-koffice-libwpg02.patch") +source=("http://download.kde.org/stable/${pkgbase}-${pkgver}/${pkgbase}-${pkgver}.tar.bz2" + 'kde4-koffice-libwpg02.patch' 'gcc46.patch') sha256sums=('31ba0d98c0d29c7b8ab97efdeb6c618b82177b2b0ec85da088178254da43c099' - '69106deb4081d71b5bd8f2e4f5af67ca689e4ce9f2bb49c11dbce5fb3409d612') + '69106deb4081d71b5bd8f2e4f5af67ca689e4ce9f2bb49c11dbce5fb3409d612' + 'e095c0b2bbedf41da6535a68b2275464dafd3f194566028d0135322f596e4739') build() { cd "${srcdir}/${pkgbase}-${pkgver}" - patch -Np1 -i "${srcdir}/kde4-koffice-libwpg02.patch" + patch -p1 -i "${srcdir}/kde4-koffice-libwpg02.patch" + patch -p1 -i "${srcdir}"/gcc46.patch cd "${srcdir}" mkdir build diff --git a/extra/koffice/gcc46.patch b/extra/koffice/gcc46.patch new file mode 100644 index 000000000..dcf8a4f85 --- /dev/null +++ b/extra/koffice/gcc46.patch @@ -0,0 +1,23 @@ +diff -up koffice-2.3.2/krita/plugins/formats/exr/exr_converter.cc.gcc46 koffice-2.3.2/krita/plugins/formats/exr/exr_converter.cc +--- koffice-2.3.2/krita/plugins/formats/exr/exr_converter.cc.gcc46 2011-02-17 06:13:40.000000000 -0600 ++++ koffice-2.3.2/krita/plugins/formats/exr/exr_converter.cc 2011-02-19 21:15:56.597142885 -0600 +@@ -80,6 +80,9 @@ ImageType imfTypeToKisType(Imf::PixelTyp + return IT_FLOAT16; + case Imf::FLOAT: + return IT_FLOAT32; ++ default: ++ // shouldn't reach here ++ return IT_UNSUPPORTED; + } + } + +@@ -93,6 +96,9 @@ const KoColorSpace* kisTypeToColorSpace( + case IT_UNKNOWN: + case IT_UNSUPPORTED: + return 0; ++ default: ++ // shouldn't reach here ++ return 0; + } + } + diff --git a/extra/libass/PKGBUILD b/extra/libass/PKGBUILD index 7de827fb1..44277f446 100644 --- a/extra/libass/PKGBUILD +++ b/extra/libass/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 87470 2010-08-13 22:22:30Z giovanni $ +# $Id: PKGBUILD 126098 2011-06-01 12:39:10Z giovanni $ # Maintainer: Giovanni Scafora <giovanni@archlinux.org> # Contributor: G_Syme <demichan(at)mail(dot)upb(dot)de> pkgname=libass -pkgver=0.9.11 +pkgver=0.9.12 pkgrel=1 pkgdesc="A portable library for SSA/ASS subtitles rendering" arch=('i686' 'x86_64') @@ -12,13 +12,18 @@ license=('GPL') depends=('enca' 'fontconfig' 'libpng') makedepends=('pkgconfig') options=(!libtool) -source=(http://libass.googlecode.com/files/${pkgname}-${pkgver}.tar.bz2) -md5sums=('f9042884397002ba40aa89dc7d34f59f') +source=("http://libass.googlecode.com/files/${pkgname}-${pkgver}.tar.xz") +md5sums=('fcef4b048ca2655a14073d692551cd1f') build() { cd "${srcdir}/${pkgname}-${pkgver}" ./configure --prefix=/usr - make || return 1 - make DESTDIR="${pkgdir}" install || return 1 + make +} + +package() { + cd "${srcdir}/${pkgname}-${pkgver}" + + make DESTDIR="${pkgdir}" install } diff --git a/extra/libssh/PKGBUILD b/extra/libssh/PKGBUILD index e6acc5d23..34d929086 100644 --- a/extra/libssh/PKGBUILD +++ b/extra/libssh/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 107427 2011-01-24 20:24:15Z andrea $ +# $Id: PKGBUILD 126134 2011-06-01 18:06:44Z andrea $ # Maintainer: Andrea Scarpino <andrea@archlinux.org> # Contributor: ice-man <icemanf@gmail.com> # Contributor: sergeantspoon <sergeantspoon@archlinux.us> pkgname=libssh -pkgver=0.4.8 +pkgver=0.5.0 pkgrel=1 pkgdesc="Library for accessing ssh client services through C libraries" url="http://www.libssh.org/" @@ -12,11 +12,11 @@ license=('LGPL') arch=('i686' 'x86_64') depends=('openssl') makedepends=('cmake' 'doxygen') -source=("http://www.libssh.org/files/0.4/${pkgname}-${pkgver}.tar.gz") -md5sums=('d97b3feea1abe047ca8cf86d06e4b789') +source=("http://www.libssh.org/files/0.5/${pkgname}-${pkgver}.tar.gz") +md5sums=('9b37f45751c0ae7ba66099c1fb136946') build() { - cd ${srcdir} + cd "${srcdir}" mkdir build cd build cmake ../${pkgname}-${pkgver} \ @@ -26,6 +26,6 @@ build() { } package(){ - cd ${srcdir}/build - make DESTDIR=${pkgdir} install + cd "${srcdir}"/build + make DESTDIR="${pkgdir}" install } diff --git a/extra/mysql/PKGBUILD b/extra/mysql/PKGBUILD index 269829024..639fd702c 100644 --- a/extra/mysql/PKGBUILD +++ b/extra/mysql/PKGBUILD @@ -1,11 +1,11 @@ -# $Id: PKGBUILD 122912 2011-05-07 10:07:11Z andrea $ +# $Id: PKGBUILD 126056 2011-06-01 08:38:57Z andrea $ # Maintainer: Andrea Scarpino <andrea@archlinux.org> # Contributor: Douglas Soares de Andrade <douglas@archlinux.org> # Contributor: judd <jvinet@zeroflux.org> pkgbase=mysql pkgname=('libmysqlclient' 'mysql-clients' 'mysql') -pkgver=5.5.12 +pkgver=5.5.13 pkgrel=1 arch=('i686' 'x86_64') license=('GPL') @@ -15,7 +15,7 @@ options=('!libtool') source=("http://ftp.gwdg.de/pub/misc/mysql/Downloads/MySQL-5.5/${pkgbase}-${pkgver}.tar.gz" 'mysqld' 'my.cnf') -md5sums=('53d31a0b24f3eb3176185090eff129b9' +md5sums=('f0e519e90ee7c00fceb0730edf859d7b' '2234207625baa29b2ff7d7b4f088abce' '1c949c0dbea5206af0db14942d9927b6') @@ -31,8 +31,13 @@ build() { cmake ../${pkgbase}-${pkgver} \ -DCMAKE_BUILD_TYPE=Release \ -DCMAKE_INSTALL_PREFIX=/usr \ - -DMYSQL_DATADIR=/var/lib/mysql \ + -DMANUFACTURER="Arch Linux" \ -DSYSCONFDIR=/etc/mysql \ + -DMYSQL_DATADIR=/var/lib/mysql \ + -DMYSQL_UNIX_ADDR=/var/run/mysqld/mysqld.sock \ + -DDEFAULT_CHARSET=utf8 \ + -DDEFAULT_COLLATION=utf8_general_ci \ + -DENABLED_LOCAL_INFILE=ON \ -DINSTALL_INFODIR=share/mysql/docs \ -DINSTALL_MANDIR=share/man \ -DINSTALL_PLUGINDIR=/usr/lib/mysql/plugin \ @@ -41,23 +46,21 @@ build() { -DINSTALL_DOCREADMEDIR=share/mysql \ -DINSTALL_SUPPORTFILESDIR=share/mysql \ -DINSTALL_MYSQLSHAREDIR=share/mysql \ + -DINSTALL_DOCDIR=share/mysql/docs \ -DINSTALL_SHAREDIR=share/mysql \ -DWITH_READLINE=ON \ -DWITH_ZLIB=system \ -DWITH_SSL=system \ -DWITH_LIBWRAP=ON \ - -DDEFAULT_CHARSET=utf8 \ - -DDEFAULT_COLLATION=utf8_general_ci \ + -DWITH_MYSQLD_LDFLAGS="${LDFLAGS}" \ -DWITH_EXTRA_CHARSETS=complex \ -DWITH_EMBEDDED_SERVER=ON \ - -DMYSQL_UNIX_ADDR=/var/run/mysqld/mysqld.sock \ - -DENABLED_LOCAL_INFILE=ON \ + -DWITH_INNOBASE_STORAGE_ENGINE=1 \ -DWITH_PARTITION_STORAGE_ENGINE=1 \ -DWITHOUT_EXAMPLE_STORAGE_ENGINE=1 \ -DWITHOUT_ARCHIVE_STORAGE_ENGINE=1 \ -DWITHOUT_BLACKHOLE_STORAGE_ENGINE=1 \ - -DWITHOUT_FEDERATED_STORAGE_ENGINE=1 \ - -DWITH_INNOBASE_STORAGE_ENGINE=1 + -DWITHOUT_FEDERATED_STORAGE_ENGINE=1 make } @@ -71,11 +74,11 @@ package_libmysqlclient(){ make -C ${dir} DESTDIR="${pkgdir}" install done - install -d "${pkgdir}/usr/bin" - install -m755 scripts/mysql_config "${pkgdir}/usr/bin/" - install -d "${pkgdir}/usr/share/man/man1" + install -d "${pkgdir}"/usr/bin + install -m755 scripts/mysql_config "${pkgdir}"/usr/bin/ + install -d "${pkgdir}"/usr/share/man/man1 for man in mysql_config mysql_client_test_embedded mysqltest_embedded; do - install -m644 "${srcdir}/${pkgbase}-${pkgver}/man/$man.1" "${pkgdir}/usr/share/man/man1/$man.1" + install -m644 "${srcdir}"/${pkgbase}-${pkgver}/man/$man.1 "${pkgdir}"/usr/share/man/man1/$man.1 done } @@ -87,9 +90,9 @@ package_mysql-clients(){ make -C client DESTDIR="${pkgdir}" install # install man pages - install -d "${pkgdir}/usr/share/man/man1" + install -d "${pkgdir}"/usr/share/man/man1 for man in mysql mysqladmin mysqlcheck mysqldump mysqlimport mysqlshow mysqlslap; do - install -m644 "${srcdir}/${pkgbase}-${pkgver}/man/$man.1" "${pkgdir}/usr/share/man/man1/$man.1" + install -m644 "${srcdir}"/${pkgbase}-${pkgver}/man/$man.1 "${pkgdir}"/usr/share/man/man1/$man.1 done # provided by mysql @@ -104,28 +107,24 @@ package_mysql(){ optdepends=('perl-dbi' 'perl-dbd-mysql') cd "${srcdir}"/build - make DESTDIR=${pkgdir} install + make DESTDIR="${pkgdir}" install - install -Dm644 ${srcdir}/my.cnf ${pkgdir}/etc/mysql/my.cnf - install -Dm755 ${srcdir}/mysqld ${pkgdir}/etc/rc.d/mysqld + install -Dm644 "${srcdir}"/my.cnf "${pkgdir}"/etc/mysql/my.cnf + install -Dm755 "${srcdir}"/mysqld "${pkgdir}"/etc/rc.d/mysqld # provided by libmysqlclient - rm ${pkgdir}/usr/bin/{mysql_config,mysql_client_test_embedded,mysqltest_embedded} - rm ${pkgdir}/usr/lib/libmysql* - rm -r ${pkgdir}/usr/include/ - rm ${pkgdir}/usr/share/man/man1/{mysql_config,mysql_client_test_embedded,mysqltest_embedded}.1 + rm "${pkgdir}"/usr/bin/{mysql_config,mysql_client_test_embedded,mysqltest_embedded} + rm "${pkgdir}"/usr/lib/libmysql* + rm -r "${pkgdir}"/usr/include/ + rm "${pkgdir}"/usr/share/man/man1/{mysql_config,mysql_client_test_embedded,mysqltest_embedded}.1 # provided by mysql-clients - rm ${pkgdir}/usr/bin/{mysql,mysqladmin,mysqlcheck,mysqldump,mysqlimport,mysqlshow,mysqlslap} - rm ${pkgdir}/usr/share/man/man1/{mysql,mysqladmin,mysqlcheck,mysqldump,mysqlimport,mysqlshow,mysqlslap}.1 + rm "${pkgdir}"/usr/bin/{mysql,mysqladmin,mysqlcheck,mysqldump,mysqlimport,mysqlshow,mysqlslap} + rm "${pkgdir}"/usr/share/man/man1/{mysql,mysqladmin,mysqlcheck,mysqldump,mysqlimport,mysqlshow,mysqlslap}.1 # not needed - rm -r ${pkgdir}/usr/{mysql-test,sql-bench} - - # These shouldn't be here - rm -r ${pkgdir}/usr/docs - install -d ${pkgdir}/var/lib/mysql - cp -r ${pkgdir}/usr/data/* ${pkgdir}/var/lib/mysql/ - chmod -R 700 ${pkgdir}/var/lib/mysql - rm -r ${pkgdir}/usr/data + rm -r "${pkgdir}"/usr/{data,mysql-test,sql-bench} + rm "${pkgdir}"/usr/share/man/man1/mysql-test-run.pl.1 + + install -dm700 "${pkgdir}"/var/lib/mysql } diff --git a/extra/telepathy-glib/PKGBUILD b/extra/telepathy-glib/PKGBUILD index 2feeea9b5..7f7261081 100644 --- a/extra/telepathy-glib/PKGBUILD +++ b/extra/telepathy-glib/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 124130 2011-05-16 17:05:51Z ibiru $ +# $Id: PKGBUILD 126144 2011-06-01 19:05:33Z ibiru $ # Maintainer: Ionut Biru <ibiru@archlinux.org> # Contributor: Kessia 'even' Pinheiro <kessiapinheiro at gmail.com # Contributor: Bjorn Lindeijer <bjorn lindeijer nl> pkgname=telepathy-glib -pkgver=0.14.6 +pkgver=0.14.7 pkgrel=1 pkgdesc="GLib bindings for the Telepathy D-Bus protocol" arch=('i686' 'x86_64') @@ -14,7 +14,7 @@ options=('!libtool' '!emptydirs') depends=('dbus-glib') makedepends=('libxslt' 'vala' 'gobject-introspection') source=("http://telepathy.freedesktop.org/releases/${pkgname}/${pkgname}-${pkgver}.tar.gz") -md5sums=('ab8cf90283ef3382de1d20d87c9d970a') +md5sums=('3230f7389cd3a0ebe4436eb1f7e40c18') build() { cd "${srcdir}/${pkgname}-${pkgver}" diff --git a/extra/vala/PKGBUILD b/extra/vala/PKGBUILD index 8772c333f..339e2f86d 100644 --- a/extra/vala/PKGBUILD +++ b/extra/vala/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 117513 2011-04-04 09:41:11Z ibiru $ +# $Id: PKGBUILD 126139 2011-06-01 18:48:36Z ibiru $ # Maintainer : Ionut Biru <ibiru@archlinux.org> # Contributor: Timm Preetz <timm@preetz.us> pkgname=vala -pkgver=0.12.0 +pkgver=0.12.1 pkgrel=1 pkgdesc="Compiler for the GObject type system" arch=('i686' 'x86_64') @@ -12,8 +12,8 @@ license=('LGPL') depends=('glib2') makedepends=('libxslt') options=('!libtool') -source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/0.12/${pkgname}-${pkgver}.tar.bz2) -sha256sums=('9a398e16fba2c78c9bcadb05e489c9bc318e34901d43451ac5d2ce4bc46b1225') +source=(http://ftp.gnome.org/pub/gnome/sources/${pkgname}/0.12/${pkgname}-${pkgver}.tar.xz) +sha256sums=('f102bb64549ebe88955bb9fa0f502d974413aec71fec88e3544c65adfeb0afb4') build() { cd "${srcdir}/${pkgname}-${pkgver}" diff --git a/extra/wireshark/PKGBUILD b/extra/wireshark/PKGBUILD index a83355164..5181428a6 100644 --- a/extra/wireshark/PKGBUILD +++ b/extra/wireshark/PKGBUILD @@ -1,17 +1,17 @@ -# $Id: PKGBUILD 121168 2011-04-29 06:39:53Z stephane $ +# $Id: PKGBUILD 126090 2011-06-01 11:36:55Z guillaume $ # Maintainer: Guillaume ALAUX <guillaume at alaux dot net> # Contributor: Florian Pritz <bluewind at jabber dot ccc dot de> pkgname=(wireshark-cli wireshark-gtk) pkgbase=wireshark -pkgver=1.4.6 -pkgrel=2 +pkgver=1.4.7 +pkgrel=1 arch=('i686' 'x86_64') license=('GPL2') makedepends=('bison' 'flex' 'gtk2' 'krb5' 'libpcap' 'bash' 'gnutls' 'libcap') url="http://www.wireshark.org/" options=(!libtool) source=(http://www.wireshark.org/download/src/${pkgbase}-${pkgver}.tar.bz2) -md5sums=('fd301004ebc5fac8e56c2f0d4ef6173f') +md5sums=('b5065426d5524ddc1667314f8256c2b1') build() { cd "${srcdir}/${pkgbase}-${pkgver}" diff --git a/multilib-testing/lib32-mesa/PKGBUILD b/multilib-testing/lib32-mesa/PKGBUILD index bcb02cb8c..6e6ac7c57 100644 --- a/multilib-testing/lib32-mesa/PKGBUILD +++ b/multilib-testing/lib32-mesa/PKGBUILD @@ -1,4 +1,4 @@ -# $Id: PKGBUILD 48014 2011-05-27 05:54:14Z lcarlier $ +# $Id: PKGBUILD 48547 2011-06-01 17:35:49Z lcarlier $ # Contributor: Jan de Groot <jgc@archlinux.org> # Contributor: Andreas Radke <andyrtr@archlinux.org> @@ -10,7 +10,7 @@ _git=true #_git=false if [ "${_git}" = "true" ]; then - pkgver=7.10.99.git20110526 + pkgver=7.10.99.git20110531 else pkgver=7.10.2 fi @@ -22,9 +22,9 @@ makedepends=('glproto>=1.4.12' 'pkgconfig' 'lib32-libdrm>=2.4.25' 'lib32-libxxf8 url="http://mesa3d.sourceforge.net" license=('custom') if [ "${_git}" = "true" ]; then - # mesa git shot from mastee (will become 7.11) branch - see for state: http://cgit.freedesktop.org/mesa/mesa/commit/?id=f7b3f40b70dc7dd602897d364011089047583c5d - source=('ftp://ftp.archlinux.org/other/mesa/mesa-f7b3f40b70dc7dd602897d364011089047583c5d.tar.bz2') - md5sums=('ca2b343a0b8077fda38077c547b80fc3') + # mesa git shot from mastee (will become 7.11) branch - see for state: http://cgit.freedesktop.org/mesa/mesa/commit/?id=b1246cf13bc4e301b499a95d33e0cab26655fed5 + source=('ftp://ftp.archlinux.org/other/mesa/mesa-b1246cf13bc4e301b499a95d33e0cab26655fed5.tar.bz2') + md5sums=('c2ff7ab905adf67a5d6c8acbbf495b12') else source=("ftp://ftp.freedesktop.org/pub/mesa/${pkgver}/MesaLib-${pkgver}.tar.bz2" nouveau-fix-header.patch) md5sums=('f5de82852f1243f42cc004039e10b771' '67c87b77cc2236b52a3b47dad3fbb5d4') @@ -67,11 +67,11 @@ build() { --enable-gles1 \ --enable-gles2 \ --enable-egl \ + --enable-texture-float \ --enable-32-bit \ --libdir=/usr/lib32 # --enable-gallium-svga \ # --enable-texture-float (enable floating-point textures and renderbuffers) - http://www.phoronix.com/scan.php?page=news_item&px=OTMzMg - # The source code to implement ARB_texture_float extension is included and can be toggled on at compile time only by those who purchased a license from SGI, or are in a country where the patent does not apply. # --enable-shared-dricore - http://bugs.gentoo.org/show_bug.cgi?id=357177 else ./configure --prefix=/usr \ diff --git a/staging/boost/4994-compile-fix-for-Python32-v2.patch b/staging/boost/4994-compile-fix-for-Python32-v2.patch new file mode 100644 index 000000000..22613b3f2 --- /dev/null +++ b/staging/boost/4994-compile-fix-for-Python32-v2.patch @@ -0,0 +1,16 @@ +Index: libs/python/src/converter/builtin_converters.cpp +=================================================================== +--- libs/python/src/converter/builtin_converters.cpp (revision 67279) ++++ libs/python/src/converter/builtin_converters.cpp (working copy) +@@ -431,7 +431,11 @@ + if (!result.empty()) + { + int err = PyUnicode_AsWideChar( ++#if PY_VERSION_HEX >= 0x03020000 ++ intermediate ++#else + (PyUnicodeObject *)intermediate ++#endif + , &result[0] + , result.size()); + diff --git a/staging/boost/PKGBUILD b/staging/boost/PKGBUILD new file mode 100644 index 000000000..9329f188a --- /dev/null +++ b/staging/boost/PKGBUILD @@ -0,0 +1,112 @@ +# $Id: PKGBUILD 126149 2011-06-01 20:01:54Z ibiru $ +# Maintainer: kevin <kevin@archlinux.org> +# Contributor: Giovanni Scafora <giovanni@archlinux.org> +# Contributor: Kritoke <kritoke@gamebox.net> +# Contributor: Luca Roccia <little_rock@users.sourceforge.net> + +pkgbase=boost +pkgname=('boost-libs' 'boost') +pkgver=1.46.1 +_boostver=${pkgver//./_} +pkgrel=2 +arch=('i686' 'x86_64') +url="http://www.boost.org/" +makedepends=('icu' 'python' 'python2' 'bzip2' 'zlib' 'openmpi') +source=(http://downloads.sourceforge.net/sourceforge/${pkgbase}/${pkgbase}_${_boostver}.tar.gz + 4994-compile-fix-for-Python32-v2.patch + boost-1.46.0-spirit.patch) +license=('custom') +md5sums=('341e5d993b19d099bf1a548495ea91ec' + 'cb59e8adbf2a45ef9264a2f4ab92b849' + '9d6e2f13fef23bf27d7bdddc104e182a') + +_stagedir="${srcdir}/stagedir" + +build() { + # set python path for bjam + cd "${srcdir}/${pkgbase}_${_boostver}/tools" + echo "using python : 2.7 : /usr/bin/python2 ;" >> build/v2/user-config.jam + echo "using python : 3.2 : /usr/bin/python : /usr/include/python3.2mu : /usr/lib ;" >> build/v2/user-config.jam + echo "using mpi ;" >> build/v2/user-config.jam + + # build bjam + cd "${srcdir}/${pkgbase}_${_boostver}/tools/build/v2/engine/src" + ./build.sh cc + + _bindir="bin.linuxx86" + [ "${CARCH}" = "x86_64" ] && _bindir="bin.linuxx86_64" + + install -d "${_stagedir}"/usr/bin + install ${_bindir}/bjam "${_stagedir}"/usr/bin/bjam + + # build bcp + cd "${srcdir}/${pkgbase}_${_boostver}/tools/bcp" + ../build/v2/engine/src/${_bindir}/bjam --toolset=gcc + install -m755 "${srcdir}/${pkgbase}_${_boostver}/dist/bin/bcp" \ + ${_stagedir}/usr/bin/bcp + + # build libs + cd "${srcdir}/${pkgbase}_${_boostver}" + #python 3.2 support + #https://svn.boost.org/trac/boost/ticket/4994 + patch -Np0 -i "${srcdir}/4994-compile-fix-for-Python32-v2.patch" + patch -Np0 -i "${srcdir}/boost-1.46.0-spirit.patch" + + # default "minimal" install: "release link=shared,static + # runtime-link=shared threading=single,multi" + # --layout=tagged will add the "-mt" suffix for multithreaded libraries + # and installs includes in /usr/include/boost. + # --layout=system no longer adds the -mt suffix for multi-threaded libs. + # install to ${_stagedir} in preparation for split packaging + + ./tools/build/v2/engine/src/${_bindir}/bjam \ + release debug-symbols=off threading=multi \ + runtime-link=shared link=shared,static \ + cflags=-fno-strict-aliasing \ + toolset=gcc \ + --prefix="${_stagedir}" \ + -sTOOLS=gcc \ + --layout=system \ + ${MAKEFLAGS} \ + install + + # pyste is unmaintained: http://www.boost.org/doc/libs/1_46_0/libs/python/doc/index.html + # build pyste + #cd "${srcdir}/${pkgbase}_${_boostver}/libs/python/pyste/install" + #python2 setup.py install --root=${_stagedir} --optimize=1 +} + +package_boost() { + pkgdesc="Free peer-reviewed portable C++ source libraries - Development" + depends=("boost-libs=${pkgver}") + optdepends=('python: for python bindings' + 'python2: for python2 bindings') + + install -d "${pkgdir}"/usr/{include,lib} + # headers/source files + cp -r "${_stagedir}"/include/ "${pkgdir}"/usr/ + + # static libs + cp -r "${_stagedir}"/lib/*.a "${pkgdir}"/usr/lib/ + + # utilities (bjam, bcp, pyste) + cp -r "${_stagedir}"/usr/* "${pkgdir}"/usr/ + + # license + install -D -m644 "${srcdir}/${pkgbase}_${_boostver}/LICENSE_1_0.txt" \ + "${pkgdir}"/usr/share/licenses/boost/LICENSE_1_0.txt +} + +package_boost-libs() { + pkgdesc="Free peer-reviewed portable C++ source libraries - Runtime" + depends=('gcc-libs' 'bzip2' 'zlib' 'icu') + optdepends=('openmpi: for mpi support') + + install -d "${pkgdir}/usr/lib" + #shared libs + cp -r "${_stagedir}"/lib/*.so{,.*} "${pkgdir}/usr/lib/" + + # license + install -D -m644 "${srcdir}/${pkgbase}_${_boostver}/LICENSE_1_0.txt" \ + "${pkgdir}"/usr/share/licenses/boost-libs/LICENSE_1_0.txt +} diff --git a/staging/boost/boost-1.46.0-spirit.patch b/staging/boost/boost-1.46.0-spirit.patch new file mode 100644 index 000000000..6fae331ee --- /dev/null +++ b/staging/boost/boost-1.46.0-spirit.patch @@ -0,0 +1,59 @@ +Index: boost/spirit/home/qi/nonterminal/detail/parameterized.hpp +=================================================================== +--- boost/spirit/home/qi/nonterminal/detail/parameterized.hpp (revision 68724) ++++ boost/spirit/home/qi/nonterminal/detail/parameterized.hpp (revision 68725) +@@ -14,6 +14,7 @@ + + #include <boost/ref.hpp> + ++#include <boost/spirit/home/support/handles_container.hpp> + #include <boost/spirit/home/qi/parser.hpp> + + namespace boost { namespace spirit { namespace qi +@@ -59,4 +60,16 @@ namespace boost { namespace spirit { nam + }; + }}} + ++namespace boost { namespace spirit { namespace traits ++{ ++ /////////////////////////////////////////////////////////////////////////// ++ template <typename Subject, typename Params, typename Attribute ++ , typename Context, typename Iterator> ++ struct handles_container<qi::parameterized_nonterminal<Subject, Params> ++ , Attribute, Context, Iterator> ++ : handles_container<typename remove_const<Subject>::type ++ , Attribute, Context, Iterator> ++ {}; ++}}} ++ + #endif +Index: boost/spirit/home/karma/nonterminal/detail/parameterized.hpp +=================================================================== +--- boost/spirit/home/karma/nonterminal/detail/parameterized.hpp (revision 68724) ++++ boost/spirit/home/karma/nonterminal/detail/parameterized.hpp (revision 68725) +@@ -14,6 +14,7 @@ + + #include <boost/ref.hpp> + ++#include <boost/spirit/home/support/handles_container.hpp> + #include <boost/spirit/home/karma/generator.hpp> + + namespace boost { namespace spirit { namespace karma +@@ -60,4 +61,17 @@ namespace boost { namespace spirit { nam + }; + }}} + ++ ++namespace boost { namespace spirit { namespace traits ++{ ++ /////////////////////////////////////////////////////////////////////////// ++ template <typename Subject, typename Params, typename Attribute ++ , typename Context, typename Iterator> ++ struct handles_container<karma::parameterized_nonterminal<Subject, Params> ++ , Attribute, Context, Iterator> ++ : handles_container<typename remove_const<Subject>::type ++ , Attribute, Context, Iterator> ++ {}; ++}}} ++ + #endif diff --git a/staging/brltty/PKGBUILD b/staging/brltty/PKGBUILD new file mode 100644 index 000000000..e173c7e6c --- /dev/null +++ b/staging/brltty/PKGBUILD @@ -0,0 +1,49 @@ +# $Id: PKGBUILD 126155 2011-06-01 20:36:52Z andyrtr $ +# Maintainer: +# Contributor: Jan de Groot <jgc@archlinux.org> +# Contributor: Giovanni Scafora <giovanni@archlinux.org> + +pkgname=brltty +pkgver=4.2 +pkgrel=4 +pkgdesc="Braille display driver for Linux/Unix" +arch=('i686' 'x86_64') +url="http://mielke.cc/brltty" +license=('GPL' 'LGPL') +depends=('libxaw' 'at-spi' 'gpm' 'icu' 'python2' 'tcl' 'atk' 'libxtst') +makedepends=('pyrex' 'bluez') +optdepends=('bluez: bluetooth support') +backup=(etc/brltty.conf etc/conf.d/brltty.conf) +options=('!makeflags' '!emptydirs') +source=(http://mielke.cc/${pkgname}/releases/${pkgname}-${pkgver}.tar.gz + 'brltty-4.2-S_ISCHR.patch' + 'brltty' + 'brltty.conf') +md5sums=('192af5e634210616928496645e392097' + '5954b289efaf2ff17676d06de9a88854' + '831ebaf0c56091702929c68805d20c4f' + 'a8ab8b3dd059e96e1734bc9cdcf844fc') + +build() { + cd "${srcdir}/${pkgname}-${pkgver}" + CFLAGS+="${CFLAGS} -D_GNU_SOURCE" \ + ./configure --prefix=/usr \ + --sysconfdir=/etc \ + --mandir=/usr/share/man \ + --localstatedir=/var \ + --enable-gpm \ + --disable-java-bindings \ + --disable-caml-bindings \ + PYTHON=/usr/bin/python2 + + patch -Np1 -i ${srcdir}/brltty-4.2-S_ISCHR.patch + make +} + +package() { + cd "${srcdir}/${pkgname}-${pkgver}" + make INSTALL_ROOT="${pkgdir}" install + install -D -m755 ${srcdir}/brltty ${pkgdir}/etc/rc.d/brltty + install -D -m644 ${srcdir}/brltty.conf ${pkgdir}/etc/conf.d/brltty.conf + install -D -m644 Documents/brltty.conf ${pkgdir}/etc/brltty.conf +} diff --git a/staging/brltty/brltty b/staging/brltty/brltty new file mode 100755 index 000000000..5ed21a52d --- /dev/null +++ b/staging/brltty/brltty @@ -0,0 +1,68 @@ +#!/bin/bash + +daemon_name=brltty + +. /etc/rc.conf +. /etc/rc.d/functions +. /etc/conf.d/$daemon_name.conf + +get_pid() { + pidof -o %PPID $daemon_name +} + +case "$1" in + start) + stat_busy "Starting $daemon_name daemon" + + PID=$(get_pid) + if [ -z "$PID" ]; then + [ -f /var/run/$daemon_name.pid ] && rm -f /var/run/$daemon_name.pid + # RUN + $daemon_name $brltty_args + # + if [ $? -gt 0 ]; then + stat_fail + exit 1 + else + echo $(get_pid) > /var/run/$daemon_name.pid + add_daemon $daemon_name + stat_done + fi + else + stat_fail + exit 1 + fi + ;; + + stop) + stat_busy "Stopping $daemon_name daemon" + PID=$(get_pid) + # KILL + [ ! -z "$PID" ] && kill $PID &> /dev/null + # + if [ $? -gt 0 ]; then + stat_fail + exit 1 + else + rm -f /var/run/$daemon_name.pid &> /dev/null + rm_daemon $daemon_name + stat_done + fi + ;; + + restart) + $0 stop + sleep 3 + $0 start + ;; + + status) + stat_busy "Checking $daemon_name status"; + ck_status $daemon_name + ;; + + *) + echo "usage: $0 {start|stop|restart|status}" +esac + +exit 0 diff --git a/staging/brltty/brltty-4.2-S_ISCHR.patch b/staging/brltty/brltty-4.2-S_ISCHR.patch new file mode 100644 index 000000000..eddac9616 --- /dev/null +++ b/staging/brltty/brltty-4.2-S_ISCHR.patch @@ -0,0 +1,11 @@ +diff -up brltty-4.2/Programs/sys_linux.c.BAD brltty-4.2/Programs/sys_linux.c +--- brltty-4.2/Programs/sys_linux.c.BAD 2010-05-21 09:04:10.003122084 -0400 ++++ brltty-4.2/Programs/sys_linux.c 2010-05-21 09:04:19.376198268 -0400 +@@ -23,6 +23,7 @@ + #include <errno.h> + #include <fcntl.h> + #include <sys/ioctl.h> ++#include <sys/stat.h> + #include <linux/kd.h> + + #ifdef HAVE_LINUX_INPUT_H diff --git a/staging/brltty/brltty.conf b/staging/brltty/brltty.conf new file mode 100644 index 000000000..94115e1d5 --- /dev/null +++ b/staging/brltty/brltty.conf @@ -0,0 +1,2 @@ +# Specify any arguments to pass to brltty here. +brltty_args=""
\ No newline at end of file diff --git a/staging/enchant/PKGBUILD b/staging/enchant/PKGBUILD new file mode 100644 index 000000000..1f95981e7 --- /dev/null +++ b/staging/enchant/PKGBUILD @@ -0,0 +1,30 @@ +# $Id: PKGBUILD 126152 2011-06-01 20:31:07Z andyrtr $ +# Maintainer: Andrea Scarpino <andrea@archlinux.org> +# Contributor: dorphell <dorphell@archlinux.org> + +pkgname=enchant +pkgver=1.6.0 +pkgrel=2 +pkgdesc="A wrapper library for generic spell checking" +arch=('i686' 'x86_64') +url="http://www.abisource.com/enchant/" +license=('LGPL') +depends=('aspell' 'dbus-glib' 'hunspell') +makedepends=('hspell') +options=('!libtool') +source=("http://www.abisource.com/downloads/${pkgname}/${pkgver}/${pkgname}-${pkgver}.tar.gz") +md5sums=('de11011aff801dc61042828041fb59c7') + +build() { + cd "${srcdir}/${pkgname}-${pkgver}" + ./configure --prefix=/usr \ + --disable-static \ + --disable-ispell \ + --with-myspell-dir=/usr/share/myspell + make +} + +package() { + cd "${srcdir}/${pkgname}-${pkgver}" + make DESTDIR="${pkgdir}" install +} diff --git a/staging/gptfdisk/PKGBUILD b/staging/gptfdisk/PKGBUILD new file mode 100644 index 000000000..c3e5215e6 --- /dev/null +++ b/staging/gptfdisk/PKGBUILD @@ -0,0 +1,35 @@ +# $Id: PKGBUILD 126131 2011-06-01 17:33:51Z foutrelis $ +# Maintainer: Evangelos Foutras <foutrelis@gmail.com> +# Contributor: Tobias Powalowski <tpowa@archlinux.org> +# Contributor: Hokum <hokum_at_mail_dot_ru> + +pkgname=gptfdisk +pkgver=0.7.1 +pkgrel=3 +pkgdesc="A text-mode partitioning tool that works on Globally Unique Identifier (GUID) Partition Table (GPT) disks" +arch=('i686' 'x86_64') +url="http://www.rodsbooks.com/gdisk/" +license=('GPL2') +depends=('gcc-libs' 'util-linux' 'popt' 'icu') +provides=('gdisk') +conflicts=('gdisk') +replaces=('gdisk') +source=(http://downloads.sourceforge.net/project/$pkgname/$pkgname/$pkgver/$pkgname-$pkgver.tgz) +md5sums=('7c8d810df61e81c821bef399b832e89e') + +build() { + cd "$srcdir/$pkgname-$pkgver" + + make +} + +package () { + cd "$srcdir/$pkgname-$pkgver" + + install -d "$pkgdir"/{sbin,usr/share/{man/man8,gdisk}} + install -t "$pkgdir/sbin" gdisk sgdisk fixparts + install -m644 -t "$pkgdir/usr/share/man/man8" {gdisk,sgdisk}.8 + install -m644 -t "$pkgdir/usr/share/gdisk" README NEWS +} + +# vim:set ts=2 sw=2 et: diff --git a/staging/hunspell/PKGBUILD b/staging/hunspell/PKGBUILD new file mode 100644 index 000000000..fefa04db4 --- /dev/null +++ b/staging/hunspell/PKGBUILD @@ -0,0 +1,28 @@ +# $Id: PKGBUILD 125861 2011-05-30 15:55:43Z andyrtr $ +# Maintainer: Andreas Radke <andyrtr@archlinux.org> +# Contributor: Hussam Al-Tayeb <ht990332@gmail.com> + +pkgname=hunspell +pkgver=1.3.2 +pkgrel=1 +pkgdesc="Spell checker and morphological analyzer library and program" +arch=('i686' 'x86_64') +url="http://hunspell.sourceforge.net/" +license=('GPL' 'LGPL' 'MPL') +depends=('gcc-libs' 'readline') +optdepends=('perl: for ispellaff2myspell') +options=('!libtool') +source=(http://downloads.sourceforge.net/hunspell/hunspell-$pkgver.tar.gz) +md5sums=('3121aaf3e13e5d88dfff13fb4a5f1ab8') + +build() { + cd "$srcdir/hunspell-$pkgver" + ./configure --prefix=/usr --disable-static \ + --with-ui --with-readline --with-experimental + make +} + +package() { + cd "$srcdir/hunspell-$pkgver" + make DESTDIR="$pkgdir" install +} diff --git a/staging/kdesdk/PKGBUILD b/staging/kdesdk/PKGBUILD new file mode 100644 index 000000000..afb12e671 --- /dev/null +++ b/staging/kdesdk/PKGBUILD @@ -0,0 +1,243 @@ +# $Id: PKGBUILD 126168 2011-06-02 00:51:07Z andrea $ +# Maintainer: Andrea Scarpino <andrea@archlinux.org> +# Contributor: Pierre Schmitz <pierre@archlinux.de> + +pkgbase=kdesdk +pkgname=('kdesdk-cervisia' + 'kdesdk-dolphin-plugins' + 'kdesdk-kapptemplate' + 'kdesdk-kate' + 'kdesdk-kcachegrind' + 'kdesdk-kdeaccounts-plugin' + 'kdesdk-kdepalettes' + 'kdesdk-kioslave' + 'kdesdk-kmtrace' + 'kdesdk-kompare' + 'kdesdk-kpartloader' + 'kdesdk-kprofilemethod' + 'kdesdk-kstartperf' + 'kdesdk-kuiviewer' + 'kdesdk-lokalize' + 'kdesdk-okteta' + 'kdesdk-poxml' + 'kdesdk-scripts' + 'kdesdk-strigi-analyzer' + 'kdesdk-umbrello') +pkgver=4.6.3 +pkgrel=2 +arch=('i686' 'x86_64') +url='http://www.kde.org' +license=('GPL' 'LGPL' 'FDL') +groups=('kde' 'kdesdk') +makedepends=('pkgconfig' 'cmake' 'automoc4' 'boost' 'kdepim-runtime' 'subversion' + 'antlr2' 'kdebase-konqueror') +source=("http://download.kde.org/stable/${pkgver}/src/${pkgbase}-${pkgver}.tar.bz2" + 'fix-python2-path.patch') +sha1sums=('6faecbd828fda6cf0aced642287d982d3541d746' + 'd05ca0231869c484fd3861955d960a60aff7dcfb') + +build() { + cd ${srcdir}/${pkgbase}-${pkgver} + + # Fix python2 path + patch -Np1 -i ${srcdir}/fix-python2-path.patch + sed -i -e "s|#![ ]*/usr/bin/python$|#!/usr/bin/python2|" \ + -e "s|#![ ]*/usr/bin/env python$|#!/usr/bin/env python2|" \ + $(find . -name '*.py') + + cd ${srcdir} + mkdir build + cd build + cmake ../${pkgbase}-${pkgver} \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_SKIP_RPATH=ON \ + -DCMAKE_INSTALL_PREFIX=/usr + make +} + +package_kdesdk-cervisia() { + pkgdesc='CVS Frontend' + depends=('kdebase-runtime') + url="http://kde.org/applications/development/cervisia/" + install='kdesdk.install' + cd $srcdir/build/cervisia + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/cervisia + make DESTDIR=$pkgdir install +} + +package_kdesdk-dolphin-plugins() { + pkgdesc='Extra Dolphin plugins' + depends=('kdebase-dolphin' 'subversion' 'git' 'kdesdk-kompare') + install='kdesdk.install' + cd $srcdir/build/dolphin-plugins/git + make DESTDIR=$pkgdir install + cd $srcdir/build/dolphin-plugins/svn + make DESTDIR=$pkgdir install +} + +package_kdesdk-kapptemplate() { + pkgdesc='KDE Template Generator' + depends=('kdebase-runtime') + url="http://kde.org/applications/development/kapptemplate/" + install='kdesdk.install' + cd $srcdir/build/kapptemplate + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/kapptemplate + make DESTDIR=$pkgdir install +} + +package_kdesdk-kate() { + pkgdesc='Advanced Text Editor' + depends=('kdebase-runtime' 'kdebase-lib') + url="http://kde.org/applications/utilities/kate/" + install='kdesdk-kate.install' + cd $srcdir/build/kate + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/kate + make DESTDIR=$pkgdir install +} + +package_kdesdk-kcachegrind() { + pkgdesc='Visualization of Performance Profiling Data' + depends=('kdebase-runtime' 'python2') + optdepends=('php: PHP support') + url="http://kde.org/applications/development/kcachegrind/" + install='kdesdk.install' + cd $srcdir/build/kcachegrind + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/kcachegrind + make DESTDIR=$pkgdir install +} + +package_kdesdk-kdeaccounts-plugin() { + pkgdesc='KDE Repository Accounts' + depends=('kdepim-runtime') + cd $srcdir/build/kdeaccounts-plugin + make DESTDIR=$pkgdir install +} + +package_kdesdk-kdepalettes() { + pkgdesc='Palettes for the Gimp that match the KDE standard color palette' + optdepends=('gimp') + install -D -m644 $srcdir/${pkgbase}-${pkgver}/kdepalettes/KDE_Gimp \ + $pkgdir/usr/share/gimp/2.0/palettes/KDE.gpl +} + +package_kdesdk-kioslave() { + pkgdesc='KDED Subversion Module' + depends=('kdebase-runtime' 'subversion') + cd $srcdir/build/kioslave + make DESTDIR=$pkgdir install +} + +package_kdesdk-kmtrace() { + pkgdesc='A KDE tool to assist with malloc debugging using glibc´s "mtrace" functionality' + depends=('kdebase-runtime') + cd $srcdir/build/kmtrace + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/kmtrace + make DESTDIR=$pkgdir install +} + +package_kdesdk-kompare() { + pkgdesc='Diff/Patch Frontend' + depends=('kdebase-runtime') + url="http://kde.org/applications/development/kompare/" + install='kdesdk.install' + cd $srcdir/build/kompare + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/kompare + make DESTDIR=$pkgdir install +} + +package_kdesdk-kpartloader() { + pkgdesc='A test application for KParts' + depends=('kdebase-runtime') + install='kdesdk.install' + cd $srcdir/build/kpartloader + make DESTDIR=$pkgdir install +} + +package_kdesdk-kprofilemethod() { + pkgdesc='Macros helping to profile' + cd $srcdir/build/kprofilemethod + make DESTDIR=$pkgdir install +} + +package_kdesdk-kstartperf() { + pkgdesc='Startup time measurement tool for KDE applications' + depends=('kdebase-runtime') + cd $srcdir/build/kstartperf + make DESTDIR=$pkgdir install +} + +package_kdesdk-kuiviewer() { + pkgdesc='Qt Designer UI File Viewer' + depends=('kdebase-runtime') + url="http://kde.org/applications/development/kuiviewer/" + install='kdesdk.install' + cd $srcdir/build/kuiviewer + make DESTDIR=$pkgdir install +} + +package_kdesdk-lokalize() { + pkgdesc='Computer-Aided Translation System' + depends=('kdebase-runtime' 'kdebindings-python') + url="http://kde.org/applications/development/lokalize/" + optdepends=('translate-toolkit: enable extra python script') + install='kdesdk.install' + cd $srcdir/build/lokalize + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/lokalize + make DESTDIR=$pkgdir install +} + +package_kdesdk-okteta() { + pkgdesc='Hex Editor' + depends=('kdebase-runtime') + replaces=('kdeutils-okteta') + conflicts=('kdeutils-okteta') + url="http://kde.org/applications/utilities/okteta" + install='kdesdk-okteta.install' + cd $srcdir/build/okteta + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/okteta + make DESTDIR=$pkgdir install +} + +package_kdesdk-poxml() { + pkgdesc='Translates DocBook XML files using gettext po files' + depends=('qt' 'antlr2') + cd $srcdir/build/poxml + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/poxml + make DESTDIR=$pkgdir install +} + +package_kdesdk-scripts() { + pkgdesc='KDE SDK scripts' + depends=('python2') + cd $srcdir/build/scripts + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/scripts + make DESTDIR=$pkgdir install +} + +package_kdesdk-strigi-analyzer() { + pkgdesc='Strigi-Analyzer for KDE SDK' + depends=('kdelibs') + cd $srcdir/build/strigi-analyzer + make DESTDIR=$pkgdir install +} + +package_kdesdk-umbrello() { + pkgdesc='UML Modeller' + depends=('kdebase-runtime') + url="http://kde.org/applications/development/umbrello/" + install='kdesdk.install' + cd $srcdir/build/umbrello + make DESTDIR=$pkgdir install + cd $srcdir/build/doc/umbrello + make DESTDIR=$pkgdir install +} diff --git a/staging/kdesdk/fix-python2-path.patch b/staging/kdesdk/fix-python2-path.patch new file mode 100644 index 000000000..c2c0745d1 --- /dev/null +++ b/staging/kdesdk/fix-python2-path.patch @@ -0,0 +1,64 @@ +--- kdesdk-4.5.80/kcachegrind/converters/hotshot2calltree~ 2010-11-24 11:53:38.586666671 +0100 ++++ kdesdk-4.5.80/kcachegrind/converters/hotshot2calltree 2010-11-24 11:53:38.623333337 +0100 +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python2 + # _*_ coding: latin1 _*_ + + # +--- kdesdk-4.5.80/lokalize/scripts/msgmerge.py~ 2010-11-24 11:22:42.120000002 +0100 ++++ kdesdk-4.5.80/lokalize/scripts/msgmerge.py 2010-11-24 11:22:42.146666670 +0100 +@@ -114,7 +114,7 @@ + print >>sys.stderr, "Execution failed:", e + + cmd='%s/odf/xliffmerge.py -i %s -t %s -o %s' % (ourPath,xliffpathname,xlifftemplatepathname,xliffpathname) +- if os.name!='nt': cmd='python '+cmd ++ if os.name!='nt': cmd='python2 '+cmd + else: cmd=cmd.replace('/','\\') + os.system(cmd) + +--- kdesdk-4.5.80/lokalize/scripts/xliff2odf.py~ 2010-11-24 11:24:10.853333336 +0100 ++++ kdesdk-4.5.80/lokalize/scripts/xliff2odf.py 2010-11-24 11:24:10.883333336 +0100 +@@ -42,7 +42,7 @@ + xliff2odf.convertxliff(xliffinput, translatedodfpathname, odf) + + ourpath=([p for p in sys.path if os.path.exists(p+'/xliff2odf.py')]+[''])[0] +- os.system('python "'+ourpath+'/xliff2odf-standalone.py" "%s" "%s" &'%(translatedodfpathname, Editor.currentEntryId())) ++ os.system('python2 "'+ourpath+'/xliff2odf-standalone.py" "%s" "%s" &'%(translatedodfpathname, Editor.currentEntryId())) + + try: convert() + except: print 'error occured' +--- kdesdk-4.5.80/scripts/rename_source_files~ 2010-11-24 11:45:41.040000004 +0100 ++++ kdesdk-4.5.80/scripts/rename_source_files 2010-11-24 11:45:41.093333336 +0100 +@@ -1,4 +1,4 @@ +-#! /usr/bin/env python ++#! /usr/bin/env python2 + # + # Copyright David Faure <faure@kde.org>, License LGPL v2 + # +--- kdesdk-4.5.80/scripts/svn2log.sh~ 2010-11-24 11:46:24.863333337 +0100 ++++ kdesdk-4.5.80/scripts/svn2log.sh 2010-11-24 11:46:24.896666669 +0100 +@@ -17,6 +17,6 @@ + svn cat svn://anonsvn.kde.org/home/kde/trunk/kde-common/accounts > /tmp/accounts.$PPID + + echo "Creating changelog..."; +-svn log -v --xml $1 | python $CURRENT/svn2log.py --users=/tmp/accounts.$PPID --users-charset=UTF8 ++svn log -v --xml $1 | python2 $CURRENT/svn2log.py --users=/tmp/accounts.$PPID --users-charset=UTF8 + + rm /tmp/accounts.$PPID +--- kdesdk-4.5.80/scripts/kde_generate_export_header~ 2010-11-24 11:48:49.696666669 +0100 ++++ kdesdk-4.5.80/scripts/kde_generate_export_header 2010-11-24 11:48:49.753333338 +0100 +@@ -1,4 +1,4 @@ +-#! /usr/bin/env python ++#! /usr/bin/env python2 + + import os, sys, string + +--- kdesdk-4.5.80/scripts/reviewboarddiff~ 2010-11-24 11:49:37.686666670 +0100 ++++ kdesdk-4.5.80/scripts/reviewboarddiff 2010-11-24 11:49:37.740000003 +0100 +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python ++#!/usr/bin/env python2 + # encoding: utf-8 + # + # Generates reviewboard compatible diffs from git-svn repositories. diff --git a/staging/kdesdk/kdesdk-kate.install b/staging/kdesdk/kdesdk-kate.install new file mode 100644 index 000000000..a60d358ce --- /dev/null +++ b/staging/kdesdk/kdesdk-kate.install @@ -0,0 +1,12 @@ +post_install() { + xdg-icon-resource forceupdate --theme hicolor &> /dev/null + update-mime-database usr/share/mime &> /dev/null +} + +post_upgrade() { + post_install +} + +post_remove() { + post_install +} diff --git a/staging/kdesdk/kdesdk-okteta.install b/staging/kdesdk/kdesdk-okteta.install new file mode 100644 index 000000000..3f06b8deb --- /dev/null +++ b/staging/kdesdk/kdesdk-okteta.install @@ -0,0 +1,12 @@ +post_install() { + xdg-icon-resource forceupdate --theme hicolor &> /dev/null + update-mime-database usr/share/mime &> /dev/null +} + +post_upgrade() { + post_install +} + +post_remove() { + post_install +} diff --git a/staging/kdesdk/kdesdk.install b/staging/kdesdk/kdesdk.install new file mode 100644 index 000000000..e70c054ec --- /dev/null +++ b/staging/kdesdk/kdesdk.install @@ -0,0 +1,11 @@ +post_install() { + xdg-icon-resource forceupdate --theme hicolor &> /dev/null +} + +post_upgrade() { + post_install +} + +post_remove() { + post_install +} diff --git a/staging/libwebkit/PKGBUILD b/staging/libwebkit/PKGBUILD new file mode 100644 index 000000000..032056ae3 --- /dev/null +++ b/staging/libwebkit/PKGBUILD @@ -0,0 +1,61 @@ +# $Id: PKGBUILD 126165 2011-06-01 22:31:25Z andyrtr $ +# Maintainer: Andreas Radke <andyrtr@archlinux.org> + +pkgbase=libwebkit +pkgname=(libwebkit libwebkit3) +pkgver=1.4.0 +pkgrel=2 +pkgdesc="An opensource web content engine" +arch=('i686' 'x86_64') +url="http://webkitgtk.org/" +license=('custom') +depends=('libxt' 'libxslt' 'sqlite3' 'icu' 'gstreamer0.10-base' 'libsoup' 'enchant') +makedepends=('gperf' 'gtk-doc' 'gobject-introspection' 'python2' 'gtk2' 'gtk3') +options=('!libtool') +install=libwebkit.install +source=(http://webkitgtk.org/webkit-${pkgver}.tar.gz + gcc46.patch + replace-switch-with-given-when.patch) +md5sums=('10c969db3b5484c71df1aa9a338377ff' + '970a2fa91b9827dff8e9b9edb4867701' + '3ba708a26b7af0e1e853867966fe14f7') + +build() { + cd "${srcdir}/webkit-${pkgver}" + patch -Np1 -i "${srcdir}/gcc46.patch" + patch -Np1 -i "${srcdir}/replace-switch-with-given-when.patch" + mkdir build-gtk{2,3} + + ( cd build-gtk2 && _build --with-gtk=2.0 ) + ( cd build-gtk3 && _build --with-gtk=3.0 ) +} + +_build() { + PYTHON=/usr/bin/python2 ../configure --prefix=/usr \ + --enable-introspection \ + --with-font-backend=freetype --enable-gtk-doc \ + --with-unicode-backend=icu \ + --enable-spellcheck "$@" + make all stamp-po +} + +package_libwebkit() { + pkgdesc+=" (for GTK2)" + depends+=(gtk2) + provides=('webkitgtk-svn') + conflicts=('webkitgtk-svn') + replaces=('webkitgtk-svn') + + cd "$srcdir/webkit-$pkgver/build-gtk2" + make DESTDIR="${pkgdir}" install + install -Dm644 ../Source/WebKit/LICENSE "${pkgdir}/usr/share/licenses/libwebkit/LICENSE" +} + +package_libwebkit3() { + pkgdesc+=" (for GTK3)" + depends+=(gtk3) + + cd "${srcdir}/webkit-${pkgver}/build-gtk3" + make DESTDIR="${pkgdir}" install + install -Dm644 ../Source/WebKit/LICENSE "${pkgdir}/usr/share/licenses/libwebkit3/LICENSE" +} diff --git a/staging/libwebkit/gcc46.patch b/staging/libwebkit/gcc46.patch new file mode 100644 index 000000000..befd892d4 --- /dev/null +++ b/staging/libwebkit/gcc46.patch @@ -0,0 +1,11 @@ +Index: trunk/Source/WebCore/dom/make_names.pl
+===================================================================
+--- trunk/Source/WebCore/dom/make_names.pl (revision 73989)
++++ trunk/Source/WebCore/dom/make_names.pl (revision 84123)
+@@ -66,5 +66,5 @@
+ $gccLocation = "/usr/bin/gcc";
+ }
+-my $preprocessor = $gccLocation . " -E -P -x c++";
++my $preprocessor = $gccLocation . " -E -x c++";
+
+ GetOptions(
diff --git a/staging/libwebkit/libwebkit.install b/staging/libwebkit/libwebkit.install new file mode 100644 index 000000000..24072f316 --- /dev/null +++ b/staging/libwebkit/libwebkit.install @@ -0,0 +1,11 @@ +post_install() { + usr/bin/glib-compile-schemas usr/share/glib-2.0/schemas +} + +post_upgrade() { + post_install +} + +post_remove() { + post_install +} diff --git a/staging/libwebkit/replace-switch-with-given-when.patch b/staging/libwebkit/replace-switch-with-given-when.patch new file mode 100644 index 000000000..4e64610bb --- /dev/null +++ b/staging/libwebkit/replace-switch-with-given-when.patch @@ -0,0 +1,45 @@ +diff -upr webkit-1.4.0.orig/Source/WebCore/make-hash-tools.pl webkit-1.4.0/Source/WebCore/make-hash-tools.pl +--- webkit-1.4.0.orig/Source/WebCore/make-hash-tools.pl 2011-04-25 22:27:15.000000000 +0300 ++++ webkit-1.4.0/Source/WebCore/make-hash-tools.pl 2011-06-02 00:41:26.000000000 +0300 +@@ -20,7 +20,7 @@ + # Boston, MA 02110-1301, USA. + + use strict; +-use Switch; ++use feature 'switch'; + use File::Basename; + + my $outdir = $ARGV[0]; +@@ -28,9 +28,9 @@ shift; + my $option = basename($ARGV[0],".gperf"); + + +-switch ($option) { ++given ($option) { + +-case "DocTypeStrings" { ++when ("DocTypeStrings") { + + my $docTypeStringsGenerated = "$outdir/DocTypeStrings.cpp"; + my $docTypeStringsGperf = $ARGV[0]; +@@ -38,9 +38,9 @@ case "DocTypeStrings" { + + system("gperf --key-positions=\"*\" -s 2 $docTypeStringsGperf > $docTypeStringsGenerated") == 0 || die "calling gperf failed: $?"; + +-} # case "DocTypeStrings" ++} # when ("DocTypeStrings") + +-case "ColorData" { ++when ("ColorData") { + + my $colorDataGenerated = "$outdir/ColorData.cpp"; + my $colorDataGperf = $ARGV[0]; +@@ -48,6 +48,6 @@ case "ColorData" { + + system("gperf --key-positions=\"*\" -D -s 2 $colorDataGperf > $colorDataGenerated") == 0 || die "calling gperf failed: $?"; + +-} # case "ColorData" ++} # when ("ColorData") + +-} # switch ($option) ++} # given ($option) diff --git a/staging/php/PKGBUILD b/staging/php/PKGBUILD new file mode 100644 index 000000000..ccbaae0cd --- /dev/null +++ b/staging/php/PKGBUILD @@ -0,0 +1,379 @@ +# $Id: PKGBUILD 126095 2011-06-01 12:12:00Z pierre $ +# Maintainer: Pierre Schmitz <pierre@archlinux.de> + +pkgbase=php +pkgname=('php' + 'php-cgi' + 'php-apache' + 'php-fpm' + 'php-embed' + 'php-pear' + 'php-curl' + 'php-enchant' + 'php-gd' + 'php-gmp' + 'php-intl' + 'php-ldap' + 'php-mcrypt' + 'php-mssql' + 'php-odbc' + 'php-pgsql' + 'php-pspell' + 'php-snmp' + 'php-sqlite' + 'php-tidy' + 'php-xsl') +pkgver=5.3.6 +pkgrel=4 +_suhosinver=${pkgver}-0.9.10 +arch=('i686' 'x86_64') +license=('PHP') +url='http://www.php.net' +makedepends=('apache' 'imap' 'postgresql-libs' 'mysql' 'libldap' 'postfix' + 'sqlite3' 'unixodbc' 'net-snmp' 'libzip' 'enchant' 'file' 'freetds' + 'libmcrypt' 'tidyhtml' 'aspell' 'libtool' 'libpng' 'libjpeg' 'icu' + 'curl' 'libxslt' 'openssl' 'bzip2' 'db' 'gmp' 'freetype2') +source=("http://www.php.net/distributions/${pkgbase}-${pkgver}.tar.bz2" + "suhosin-patch-${_suhosinver}.patch.gz" + 'php.ini.patch' 'apache.conf' 'rc.d.php-fpm' 'php-fpm.conf.in.patch' + 'logrotate.d.php-fpm') +md5sums=('2286f5a82a6e8397955a0025c1c2ad98' + 'fff1a38877142f3ae6036dbe5a85d0a6' + '39eaa70d276fc3d45d6bcf6cd5ae1106' + 'dec2cbaad64e3abf4f0ec70e1de4e8e9' + 'b01be5f816988fcee7e78225836e5e27' + 'd50ff349da08110a7cc8c691ce2d0423' + '07c4e412909ac65a44ec90e7a2c4bade') + +build() { + # ldap-sasl does not compile with --as-needed + export LDFLAGS="${LDFLAGS//-Wl,--as-needed}" + export LDFLAGS="${LDFLAGS//,--as-needed}" + + phpconfig="--srcdir=../${pkgbase}-${pkgver} \ + --prefix=/usr \ + --sysconfdir=/etc/php \ + --localstatedir=/var \ + --with-layout=GNU \ + --with-config-file-path=/etc/php \ + --with-config-file-scan-dir=/etc/php/conf.d \ + --enable-inline-optimization \ + --disable-debug \ + --disable-rpath \ + --disable-static \ + --enable-shared \ + --mandir=/usr/share/man \ + --without-pear \ + " + + phpextensions="--enable-bcmath=shared \ + --enable-calendar=shared \ + --enable-dba=shared \ + --enable-exif=shared \ + --enable-ftp=shared \ + --enable-gd-native-ttf \ + --enable-intl=shared \ + --enable-json=shared \ + --enable-mbregex \ + --enable-mbstring \ + --enable-pdo \ + --enable-phar=shared \ + --enable-posix=shared \ + --enable-session \ + --enable-shmop=shared \ + --enable-soap=shared \ + --enable-sockets=shared \ + --enable-sqlite-utf8 \ + --enable-sysvmsg=shared \ + --enable-sysvsem=shared \ + --enable-sysvshm=shared \ + --enable-xml \ + --enable-zip=shared \ + --with-bz2=shared \ + --with-curl=shared \ + --with-db4=/usr \ + --with-enchant=shared,/usr \ + --with-freetype-dir=shared,/usr \ + --with-gd=shared \ + --with-gdbm=shared \ + --with-gettext=shared \ + --with-gmp=shared \ + --with-iconv=shared \ + --with-icu-dir=/usr \ + --with-imap-ssl=shared \ + --with-imap=shared \ + --with-jpeg-dir=shared,/usr \ + --with-ldap=shared \ + --with-ldap-sasl \ + --with-mcrypt=shared \ + --with-mhash \ + --with-mssql=shared \ + --with-mysql-sock=/var/run/mysqld/mysqld.sock \ + --with-mysql=shared,mysqlnd \ + --with-mysqli=shared,mysqlnd \ + --with-openssl=shared \ + --with-pcre-regex=/usr \ + --with-pdo-mysql=shared,mysqlnd \ + --with-pdo-odbc=shared,unixODBC,/usr \ + --with-pdo-pgsql=shared \ + --with-pdo-sqlite=shared,/usr \ + --with-pgsql=shared \ + --with-png-dir=shared,/usr \ + --with-pspell=shared \ + --with-regex=php \ + --with-snmp=shared \ + --with-sqlite3=shared,/usr \ + --with-sqlite=shared \ + --with-tidy=shared \ + --with-unixODBC=shared,/usr \ + --with-xmlrpc=shared \ + --with-xsl=shared \ + --with-zlib \ + --without-db2 \ + --without-db3 \ + " + + EXTENSION_DIR=/usr/lib/php/modules + export EXTENSION_DIR + PEAR_INSTALLDIR=/usr/share/pear + export PEAR_INSTALLDIR + + cd ${srcdir}/${pkgbase}-${pkgver} + + # apply suhosin patch + patch -p1 -i ${srcdir}/suhosin-patch-${_suhosinver}.patch + + # adjust paths + patch -p0 -i ${srcdir}/php.ini.patch + patch -p0 -i ${srcdir}/php-fpm.conf.in.patch + + # php + mkdir ${srcdir}/build-php + cd ${srcdir}/build-php + ln -s ../${pkgbase}-${pkgver}/configure + ./configure ${phpconfig} \ + --disable-cgi \ + --with-readline \ + --enable-pcntl \ + ${phpextensions} + make + + # cgi and fcgi + # reuse the previous run; this will save us a lot of time + cp -a ${srcdir}/build-php ${srcdir}/build-cgi + cd ${srcdir}/build-cgi + ./configure ${phpconfig} \ + --disable-cli \ + --enable-cgi \ + ${phpextensions} + make + + # apache + cp -a ${srcdir}/build-php ${srcdir}/build-apache + cd ${srcdir}/build-apache + ./configure ${phpconfig} \ + --disable-cli \ + --with-apxs2 \ + ${phpextensions} + make + + # fpm + cp -a ${srcdir}/build-php ${srcdir}/build-fpm + cd ${srcdir}/build-fpm + ./configure ${phpconfig} \ + --disable-cli \ + --enable-fpm \ + --with-fpm-user=http \ + --with-fpm-group=http \ + ${phpextensions} + make + + # embed + cp -a ${srcdir}/build-php ${srcdir}/build-embed + cd ${srcdir}/build-embed + ./configure ${phpconfig} \ + --disable-cli \ + --enable-embed=shared \ + ${phpextensions} + make + + # pear + cp -a ${srcdir}/build-php ${srcdir}/build-pear + cd ${srcdir}/build-pear + ./configure ${phpconfig} \ + --disable-cgi \ + --with-readline \ + --enable-pcntl \ + --with-pear \ + ${phpextensions} + make +} + +# check() { +# cd ${srcdir}/build-php +# make test +# } + +package_php() { + pkgdesc='An HTML-embedded scripting language' + depends=('pcre' 'libxml2' 'bzip2' 'openssl') + replaces=('php-fileinfo') + provides=('php-fileinfo') + conflicts=('php-fileinfo') + backup=('etc/php/php.ini') + + cd ${srcdir}/build-php + make -j1 INSTALL_ROOT=${pkgdir} install + install -d -m755 ${pkgdir}/usr/share/pear + # install php.ini + install -D -m644 ${srcdir}/${pkgbase}-${pkgver}/php.ini-production ${pkgdir}/etc/php/php.ini + install -d -m755 ${pkgdir}/etc/php/conf.d/ + + # remove static modules + rm -f ${pkgdir}/usr/lib/php/modules/*.a + # remove modules provided by sub packages + rm -f ${pkgdir}/usr/lib/php/modules/{curl,enchant,gd,gmp,intl,ldap,mcrypt,mssql,odbc,pdo_odbc,pgsql,pdo_pgsql,pspell,snmp,sqlite3,pdo_sqlite,tidy,xsl}.so +} + +package_php-cgi() { + pkgdesc='CGI and FCGI SAPI for PHP' + depends=('php' 'pcre' 'libxml2') + + install -D -m755 ${srcdir}/build-cgi/sapi/cgi/php-cgi ${pkgdir}/usr/bin/php-cgi +} + +package_php-apache() { + pkgdesc='Apache SAPI for PHP' + depends=('php' 'apache' 'pcre' 'libxml2') + backup=('etc/httpd/conf/extra/php5_module.conf') + + install -D -m755 ${srcdir}/build-apache/libs/libphp5.so ${pkgdir}/usr/lib/httpd/modules/libphp5.so + install -D -m644 ${srcdir}/apache.conf ${pkgdir}/etc/httpd/conf/extra/php5_module.conf +} + +package_php-fpm() { + pkgdesc='FastCGI Process Manager for PHP' + depends=('php') + backup=('etc/php/php-fpm.conf') + + install -D -m755 ${srcdir}/build-fpm/sapi/fpm/php-fpm ${pkgdir}/usr/sbin/php-fpm + install -D -m644 ${srcdir}/build-fpm/sapi/fpm/php-fpm.8 ${pkgdir}/usr/share/man/man8/php-fpm.8 + install -D -m644 ${srcdir}/build-fpm/sapi/fpm/php-fpm.conf ${pkgdir}/etc/php/php-fpm.conf + install -D -m755 ${srcdir}/rc.d.php-fpm ${pkgdir}/etc/rc.d/php-fpm + install -D -m644 ${srcdir}/logrotate.d.php-fpm ${pkgdir}/etc/logrotate.d/php-fpm + install -d -m755 ${pkgdir}/etc/php/fpm.d +} + +package_php-embed() { + pkgdesc='Embed SAPI for PHP' + depends=('php' 'pcre' 'libxml2') + + install -D -m755 ${srcdir}/build-embed/libs/libphp5.so ${pkgdir}/usr/lib/libphp5.so + install -D -m644 ${srcdir}/${pkgbase}-${pkgver}/sapi/embed/php_embed.h ${pkgdir}/usr/include/php/sapi/embed/php_embed.h +} + +package_php-pear() { + pkgdesc='PHP Extension and Application Repository' + depends=('php' 'bash') + backup=('etc/php/pear.conf') + + cd ${srcdir}/build-pear + make -j1 install-pear INSTALL_ROOT=${pkgdir} + local i + while read i; do + [ ! -e "$i" ] || rm -rf "$i" + done < <(find ${pkgdir} -name '.*') +} + +package_php-curl() { + depends=('php' 'curl') + pkgdesc='curl module for PHP' + install -D -m755 ${srcdir}/build-php/modules/curl.so ${pkgdir}/usr/lib/php/modules/curl.so +} + +package_php-enchant() { + depends=('php' 'enchant') + pkgdesc='enchant module for PHP' + install -D -m755 ${srcdir}/build-php/modules/enchant.so ${pkgdir}/usr/lib/php/modules/enchant.so +} + +package_php-gd() { + depends=('php' 'libpng' 'libjpeg' 'freetype2') + pkgdesc='gd module for PHP' + install -D -m755 ${srcdir}/build-php/modules/gd.so ${pkgdir}/usr/lib/php/modules/gd.so +} + +package_php-gmp() { + depends=('php' 'gmp') + pkgdesc='gmp module for PHP' + install -D -m755 ${srcdir}/build-php/modules/gmp.so ${pkgdir}/usr/lib/php/modules/gmp.so +} + +package_php-intl() { + depends=('php' 'icu') + pkgdesc='intl module for PHP' + install -D -m755 ${srcdir}/build-php/modules/intl.so ${pkgdir}/usr/lib/php/modules/intl.so +} + +package_php-ldap() { + depends=('php' 'libldap') + pkgdesc='ldap module for PHP' + install -D -m755 ${srcdir}/build-php/modules/ldap.so ${pkgdir}/usr/lib/php/modules/ldap.so +} + +package_php-mcrypt() { + depends=('php' 'libmcrypt' 'libtool') + pkgdesc='mcrypt module for PHP' + install -D -m755 ${srcdir}/build-php/modules/mcrypt.so ${pkgdir}/usr/lib/php/modules/mcrypt.so +} + +package_php-mssql() { + depends=('php' 'freetds') + pkgdesc='mssql module for PHP' + install -D -m755 ${srcdir}/build-php/modules/mssql.so ${pkgdir}/usr/lib/php/modules/mssql.so +} + +package_php-odbc() { + depends=('php' 'unixodbc') + pkgdesc='ODBC modules for PHP' + install -D -m755 ${srcdir}/build-php/modules/odbc.so ${pkgdir}/usr/lib/php/modules/odbc.so + install -D -m755 ${srcdir}/build-php/modules/pdo_odbc.so ${pkgdir}/usr/lib/php/modules/pdo_odbc.so +} + +package_php-pgsql() { + depends=('php' 'postgresql-libs') + pkgdesc='PostgreSQL modules for PHP' + install -D -m755 ${srcdir}/build-php/modules/pgsql.so ${pkgdir}/usr/lib/php/modules/pgsql.so + install -D -m755 ${srcdir}/build-php/modules/pdo_pgsql.so ${pkgdir}/usr/lib/php/modules/pdo_pgsql.so +} + +package_php-pspell() { + depends=('php' 'aspell') + pkgdesc='pspell module for PHP' + install -D -m755 ${srcdir}/build-php/modules/pspell.so ${pkgdir}/usr/lib/php/modules/pspell.so +} + +package_php-snmp() { + depends=('php' 'net-snmp') + pkgdesc='snmp module for PHP' + install -D -m755 ${srcdir}/build-php/modules/snmp.so ${pkgdir}/usr/lib/php/modules/snmp.so +} + +package_php-sqlite() { + depends=('php' 'sqlite3') + pkgdesc='sqlite3 module for PHP' + install -D -m755 ${srcdir}/build-php/modules/sqlite3.so ${pkgdir}/usr/lib/php/modules/sqlite3.so + install -D -m755 ${srcdir}/build-php/modules/pdo_sqlite.so ${pkgdir}/usr/lib/php/modules/pdo_sqlite.so +} + +package_php-tidy() { + depends=('php' 'tidyhtml') + pkgdesc='tidy module for PHP' + install -D -m755 ${srcdir}/build-php/modules/tidy.so ${pkgdir}/usr/lib/php/modules/tidy.so +} + +package_php-xsl() { + depends=('php' 'libxslt') + pkgdesc='xsl module for PHP' + install -D -m755 ${srcdir}/build-php/modules/xsl.so ${pkgdir}/usr/lib/php/modules/xsl.so +} diff --git a/staging/php/apache.conf b/staging/php/apache.conf new file mode 100644 index 000000000..c3ca0aad5 --- /dev/null +++ b/staging/php/apache.conf @@ -0,0 +1,13 @@ +# Required modules: dir_module, php5_module + +<IfModule dir_module> + <IfModule php5_module> + DirectoryIndex index.php index.html + <FilesMatch "\.php$"> + SetHandler application/x-httpd-php + </FilesMatch> + <FilesMatch "\.phps$"> + SetHandler application/x-httpd-php-source + </FilesMatch> + </IfModule> +</IfModule> diff --git a/staging/php/logrotate.d.php-fpm b/staging/php/logrotate.d.php-fpm new file mode 100644 index 000000000..7a1ba2597 --- /dev/null +++ b/staging/php/logrotate.d.php-fpm @@ -0,0 +1,6 @@ +/var/log/php-fpm.log { + missingok + postrotate + /etc/rc.d/php-fpm logrotate >/dev/null || true + endscript +} diff --git a/staging/php/php-fpm.conf.in.patch b/staging/php/php-fpm.conf.in.patch new file mode 100644 index 000000000..93c62430a --- /dev/null +++ b/staging/php/php-fpm.conf.in.patch @@ -0,0 +1,80 @@ +--- sapi/fpm/php-fpm.conf.in 2010-12-11 08:31:47.695294987 +0100 ++++ sapi/fpm/php-fpm.conf.in 2010-12-11 08:31:55.907812237 +0100 +@@ -12,7 +12,7 @@ + ; Relative path can also be used. They will be prefixed by: + ; - the global prefix if it's been set (-p arguement) + ; - @prefix@ otherwise +-;include=etc/fpm.d/*.conf ++;include=/etc/php/fpm.d/*.conf + + ;;;;;;;;;;;;;;;;;; + ; Global Options ; +@@ -22,7 +22,7 @@ + ; Pid file + ; Note: the default prefix is @EXPANDED_LOCALSTATEDIR@ + ; Default Value: none +-;pid = run/php-fpm.pid ++pid = run/php-fpm/php-fpm.pid + + ; Error log file + ; Note: the default prefix is @EXPANDED_LOCALSTATEDIR@ +@@ -93,7 +93,8 @@ + ; specific port; + ; '/path/to/unix/socket' - to listen on a unix socket. + ; Note: This value is mandatory. +-listen = 127.0.0.1:9000 ++;listen = 127.0.0.1:9000 ++listen = /var/run/php-fpm/php-fpm.sock + + ; Set listen(2) backlog. A value of '-1' means unlimited. + ; Default Value: 128 (-1 on FreeBSD and OpenBSD) +@@ -112,9 +113,9 @@ + ; BSD-derived systems allow connections regardless of permissions. + ; Default Values: user and group are set as the running user + ; mode is set to 0666 +-;listen.owner = @php_fpm_user@ +-;listen.group = @php_fpm_group@ +-;listen.mode = 0666 ++listen.owner = @php_fpm_user@ ++listen.group = @php_fpm_group@ ++listen.mode = 0660 + + ; Unix user/group of processes + ; Note: The user is mandatory. If the group is not set, the default user's group +@@ -154,23 +155,23 @@ + ; The number of child processes created on startup. + ; Note: Used only when pm is set to 'dynamic' + ; Default Value: min_spare_servers + (max_spare_servers - min_spare_servers) / 2 +-;pm.start_servers = 20 ++pm.start_servers = 20 + + ; The desired minimum number of idle server processes. + ; Note: Used only when pm is set to 'dynamic' + ; Note: Mandatory when pm is set to 'dynamic' +-;pm.min_spare_servers = 5 ++pm.min_spare_servers = 5 + + ; The desired maximum number of idle server processes. + ; Note: Used only when pm is set to 'dynamic' + ; Note: Mandatory when pm is set to 'dynamic' +-;pm.max_spare_servers = 35 ++pm.max_spare_servers = 35 + + ; The number of requests each child process should execute before respawning. + ; This can be useful to work around memory leaks in 3rd party libraries. For + ; endless request processing specify '0'. Equivalent to PHP_FCGI_MAX_REQUESTS. + ; Default Value: 0 +-;pm.max_requests = 500 ++pm.max_requests = 500 + + ; The URI to view the FPM status page. If this value is not set, no URI will be + ; recognized as a status page. By default, the status page shows the following +@@ -264,7 +265,7 @@ + ; Chdir to this directory at the start. + ; Note: relative path can be used. + ; Default Value: current directory or / when chroot +-;chdir = /var/www ++;chdir = /srv/http + + ; Redirect worker stdout and stderr into main error log. If not set, stdout and + ; stderr will be redirected to /dev/null according to FastCGI specs. diff --git a/staging/php/php.ini.patch b/staging/php/php.ini.patch new file mode 100644 index 000000000..356e190b4 --- /dev/null +++ b/staging/php/php.ini.patch @@ -0,0 +1,126 @@ +--- php.ini-production 2011-02-09 01:25:44.000000000 +0100 ++++ php.ini-production 2011-03-19 11:11:44.496987763 +0100 +@@ -376,7 +376,7 @@ + ; or per-virtualhost web server configuration file. This directive is + ; *NOT* affected by whether Safe Mode is turned On or Off. + ; http://php.net/open-basedir +-;open_basedir = ++open_basedir = /srv/http/:/home/:/tmp/:/usr/share/pear/ + + ; This directive allows you to disable certain functions for security reasons. + ; It receives a comma-delimited list of function names. This directive is +@@ -781,7 +781,7 @@ + ;;;;;;;;;;;;;;;;;;;;;;;;; + + ; UNIX: "/path1:/path2" +-;include_path = ".:/php/includes" ++include_path = ".:/usr/share/pear" + ; + ; Windows: "\path1;\path2" + ;include_path = ".;c:\php\includes" +@@ -804,7 +804,7 @@ + + ; Directory in which the loadable extensions (modules) reside. + ; http://php.net/extension-dir +-; extension_dir = "./" ++extension_dir = "/usr/lib/php/modules/" + ; On windows: + ; extension_dir = "ext" + +@@ -938,53 +938,49 @@ + ; If you only provide the name of the extension, PHP will look for it in its + ; default extension directory. + ; +-; Windows Extensions +-; Note that ODBC support is built in, so no dll is needed for it. +-; Note that many DLL files are located in the extensions/ (PHP 4) ext/ (PHP 5) +-; extension folders as well as the separate PECL DLL download (PHP 5). +-; Be sure to appropriately set the extension_dir directive. +-; +-;extension=php_bz2.dll +-;extension=php_curl.dll +-;extension=php_fileinfo.dll +-;extension=php_gd2.dll +-;extension=php_gettext.dll +-;extension=php_gmp.dll +-;extension=php_intl.dll +-;extension=php_imap.dll +-;extension=php_interbase.dll +-;extension=php_ldap.dll +-;extension=php_mbstring.dll +-;extension=php_exif.dll ; Must be after mbstring as it depends on it +-;extension=php_mysql.dll +-;extension=php_mysqli.dll +-;extension=php_oci8.dll ; Use with Oracle 10gR2 Instant Client +-;extension=php_oci8_11g.dll ; Use with Oracle 11g Instant Client +-;extension=php_openssl.dll +-;extension=php_pdo_firebird.dll +-;extension=php_pdo_mssql.dll +-;extension=php_pdo_mysql.dll +-;extension=php_pdo_oci.dll +-;extension=php_pdo_odbc.dll +-;extension=php_pdo_pgsql.dll +-;extension=php_pdo_sqlite.dll +-;extension=php_pgsql.dll +-;extension=php_pspell.dll +-;extension=php_shmop.dll +- +-; The MIBS data available in the PHP distribution must be installed. +-; See http://www.php.net/manual/en/snmp.installation.php +-;extension=php_snmp.dll +- +-;extension=php_soap.dll +-;extension=php_sockets.dll +-;extension=php_sqlite.dll +-;extension=php_sqlite3.dll +-;extension=php_sybase_ct.dll +-;extension=php_tidy.dll +-;extension=php_xmlrpc.dll +-;extension=php_xsl.dll +-;extension=php_zip.dll ++;extension=bcmath.so ++;extension=bz2.so ++;extension=calendar.so ++;extension=curl.so ++;extension=dba.so ++;extension=enchant.so ++;extension=exif.so ++;extension=ftp.so ++;extension=gd.so ++extension=gettext.so ++;extension=gmp.so ++;extension=iconv.so ++;extension=imap.so ++;extension=intl.so ++;extension=json.so ++;extension=ldap.so ++;extension=mcrypt.so ++;extension=mssql.so ++;extension=mysqli.so ++;extension=mysql.so ++;extension=odbc.so ++;extension=openssl.so ++;extension=pdo_mysql.so ++;extension=pdo_odbc.so ++;extension=pdo_pgsql.so ++;extension=pdo_sqlite.so ++;extension=pgsql.so ++;extension=phar.so ++;extension=posix.so ++;extension=pspell.so ++;extension=shmop.so ++;extension=snmp.so ++;extension=soap.so ++;extension=sockets.so ++;extension=sqlite3.so ++;extension=sqlite.so ++;extension=sysvmsg.so ++;extension=sysvsem.so ++;extension=sysvshm.so ++;extension=tidy.so ++;extension=xmlrpc.so ++;extension=xsl.so ++;extension=zip.so + + ;;;;;;;;;;;;;;;;;;; + ; Module Settings ; diff --git a/staging/php/rc.d.php-fpm b/staging/php/rc.d.php-fpm new file mode 100644 index 000000000..54bcf4d5b --- /dev/null +++ b/staging/php/rc.d.php-fpm @@ -0,0 +1,158 @@ +#!/bin/bash + +. /etc/rc.conf +. /etc/rc.d/functions + + +wait_for_pid () { + try=0 + while test $try -lt 35 ; do + case "$1" in + 'created') + if [ -f "$2" ] ; then + try='' + break + fi + ;; + 'removed') + if [ ! -f "$2" ] ; then + try='' + break + fi + ;; + esac + + stat_append '.' + try=`expr $try + 1` + sleep 1 + done +} + +test_config() { + stat_busy 'Checking configuration' + if [ $(id -u) -ne 0 ]; then + stat_append '(This script must be run as root)' + stat_die + fi + + if [ ! -r /etc/php/php-fpm.conf ]; then + stat_append '(/etc/php/php-fpm.conf not found)' + stat_die + fi + + local test=$(/usr/sbin/php-fpm -t 2>&1) + if [ $? -gt 0 ]; then + stat_append '(error in /etc/php/php-fpm.conf)' + stat_die + elif echo $test | grep -qi 'error'; then + stat_append '(error in /etc/php/php.ini)' + stat_die + fi + + [ -d /var/run/php-fpm ] || install -d -m755 /var/run/php-fpm + + stat_done +} + +case "$1" in + start) + test_config + stat_busy 'Starting php-fpm' + + /usr/sbin/php-fpm + + if [ "$?" != 0 ] ; then + stat_fail + exit 1 + fi + + wait_for_pid created /var/run/php-fpm/php-fpm.pid + + if [ -n "$try" ] ; then + stat_fail + exit 1 + else + add_daemon php-fpm + stat_done + fi + ;; + + stop) + test_config + stat_busy 'Gracefully shutting down php-fpm' + + if [ ! -r /var/run/php-fpm/php-fpm.pid ] ; then + stat_fail + exit 1 + fi + + kill -QUIT `cat /var/run/php-fpm/php-fpm.pid` + + wait_for_pid removed /var/run/php-fpm.pid + + if [ -n "$try" ] ; then + stat_fail + exit 1 + else + rm_daemon php-fpm + stat_done + fi + ;; + + force-quit) + stat_busy 'Terminating php-fpm' + + if [ ! -r /var/run/php-fpm/php-fpm.pid ] ; then + stat_fail + exit 1 + fi + + kill -TERM `cat /var/run/php-fpm/php-fpm.pid` + + wait_for_pid removed /var/run/php-fpm/php-fpm.pid + + if [ -n "$try" ] ; then + stat_fail + exit 1 + else + rm_daemon php-fpm + stat_done + fi + ;; + + restart) + $0 stop + $0 start + ;; + + reload) + test_config + stat_busy 'Reload service php-fpm' + + if [ ! -r /var/run/php-fpm/php-fpm.pid ] ; then + stat_fail + exit 1 + fi + + kill -USR2 `cat /var/run/php-fpm/php-fpm.pid` + stat_done + ;; + + logrotate) + stat_busy 'Reopen php-fpm log' + + if [ ! -r /var/run/php-fpm/php-fpm.pid ] ; then + stat_fail + exit 1 + fi + + kill -USR1 `cat /var/run/php-fpm/php-fpm.pid` + stat_done + ;; + + *) + echo "usage: $0 {start|stop|force-quit|restart|reload|logrotate}" + exit 1 + ;; + +esac diff --git a/staging/php/suhosin-patch-5.3.6-0.9.10.patch.gz b/staging/php/suhosin-patch-5.3.6-0.9.10.patch.gz Binary files differnew file mode 100644 index 000000000..7167ce2d0 --- /dev/null +++ b/staging/php/suhosin-patch-5.3.6-0.9.10.patch.gz diff --git a/staging/xulrunner/PKGBUILD b/staging/xulrunner/PKGBUILD new file mode 100644 index 000000000..d9ad07e87 --- /dev/null +++ b/staging/xulrunner/PKGBUILD @@ -0,0 +1,63 @@ +# $Id: PKGBUILD 126161 2011-06-01 20:45:14Z ibiru $ +# Maintainer: Jan de Groot <jgc@archlinux.org> +# Contributor: Alexander Baldeck <alexander@archlinux.org> +pkgname=xulrunner +pkgver=2.0.1 +_ffoxver=4.0.1 +pkgrel=2 +pkgdesc="Mozilla Runtime Environment" +arch=('i686' 'x86_64') +license=('MPL' 'GPL' 'LGPL') +depends=('gtk2' 'gcc-libs' 'libidl2' 'mozilla-common' 'nss' 'libxt' 'libxrender' 'hunspell' 'startup-notification' 'mime-types' 'dbus-glib' 'alsa-lib' 'libevent' 'sqlite3>=3.7.4' 'libnotify') +makedepends=('zip' 'pkg-config' 'diffutils' 'python2' 'wireless_tools' 'yasm' 'mesa') +url="http://wiki.mozilla.org/XUL:Xul_Runner" +source=(http://releases.mozilla.org/pub/mozilla.org/firefox/releases/${_ffoxver}/source/firefox-${_ffoxver}.source.tar.bz2 + mozconfig + mozilla-pkgconfig.patch + xulrunner-version.patch + xulrunner-omnijar.patch + port_gnomevfs_to_gio.patch) +options=('!emptydirs') +md5sums=('9abda7d23151e97913c8555a64c13f34' + '2358a2ddd35bcdd62ff42442dfe548d9' + '639ea80e823543dd415b90c0ee804186' + 'a0236f6c3e55f60b7888d8cf137ff209' + '0bf82bc6677e3ce57fd20a147fe8d7b1' + '42f83468b296452fb754a81a4317ca64') +build() { + cd "${srcdir}/mozilla-2.0" + cp "${srcdir}/mozconfig" .mozconfig + + #fix libdir/sdkdir - fedora + patch -Np1 -i "${srcdir}/mozilla-pkgconfig.patch" + + #Force installation to the same path for every version + patch -Np1 -i "${srcdir}/xulrunner-version.patch" + + #https://bugzilla.mozilla.org/show_bug.cgi?id=620931 + patch -Np1 -i "${srcdir}/xulrunner-omnijar.patch" + + #https://bugzilla.mozilla.org/show_bug.cgi?id=494163 + patch -Np1 -i "${srcdir}/port_gnomevfs_to_gio.patch" + + unset CFLAGS + unset CXXFLAGS + + export CXXFLAGS="-fpermissive" + + make -j1 -f client.mk build MOZ_MAKE_FLAGS="$MAKEFLAGS" +} + +package() { + cd "${srcdir}/mozilla-2.0" + make -j1 -f client.mk DESTDIR="${pkgdir}" install + + #Remove included dictionaries, add symlink to system myspell path. + #Note: this will cause file conflicts when users have installed dictionaries in the old location + rm -rf "${pkgdir}/usr/lib/xulrunner-2.0/dictionaries" + ln -sf /usr/share/myspell/dicts "${pkgdir}/usr/lib/xulrunner-2.0/dictionaries" + + # add xulrunner library path to ld.so.conf + install -d ${pkgdir}/etc/ld.so.conf.d + echo "/usr/lib/xulrunner-2.0" > ${pkgdir}/etc/ld.so.conf.d/xulrunner.conf +} diff --git a/staging/xulrunner/mozconfig b/staging/xulrunner/mozconfig new file mode 100644 index 000000000..03a352920 --- /dev/null +++ b/staging/xulrunner/mozconfig @@ -0,0 +1,56 @@ +. $topsrcdir/xulrunner/config/mozconfig + +ac_add_options --prefix=/usr +ac_add_options --libdir=/usr/lib +ac_add_options --with-system-nspr +ac_add_options --with-system-nss +ac_add_options --with-system-jpeg +ac_add_options --with-system-zlib +ac_add_options --with-system-bz2 +ac_add_options --with-system-png +ac_add_options --with-system-libevent +ac_add_options --enable-system-hunspell +ac_add_options --enable-system-sqlite +ac_add_options --enable-system-cairo +ac_add_options --with-pthreads + +ac_add_options --enable-default-toolkit=cairo-gtk2 +ac_add_options --enable-safe-browsing +ac_add_options --enable-extensions=default +ac_add_options --enable-startup-notification +ac_add_options --enable-pango +ac_add_options --enable-svg +ac_add_options --enable-canvas +ac_add_options --enable-smil +ac_add_options --enable-canvas3d +ac_add_options --enable-places +ac_add_options --enable-shared-js +ac_add_options --enable-url-classifier + +ac_add_options --enable-optimize +ac_add_options --enable-strip +ac_add_options --enable-install-strip +ac_add_options --enable-jemalloc +ac_add_options --enable-xterm-updates +ac_add_options --enable-printing +ac_add_options --enable-xinerama + +ac_add_options --disable-javaxpcom +ac_add_options --disable-crashreporter +ac_add_options --disable-updater +ac_add_options --disable-tests +ac_add_options --disable-xprint +ac_add_options --disable-mochitest +ac_add_options --disable-debug +ac_add_options --disable-installer +ac_add_options --disable-pedantic + +ac_add_options --enable-gio +ac_add_options --disable-gnomevfs +ac_add_options --enable-gconf +ac_add_options --enable-libnotify + +export BUILD_OFFICIAL=1 +export MOZILLA_OFFICIAL=1 +mk_add_options BUILD_OFFICIAL=1 +mk_add_options MOZILLA_OFFICIAL=1 diff --git a/staging/xulrunner/mozilla-pkgconfig.patch b/staging/xulrunner/mozilla-pkgconfig.patch new file mode 100644 index 000000000..2203efcde --- /dev/null +++ b/staging/xulrunner/mozilla-pkgconfig.patch @@ -0,0 +1,60 @@ +diff -Nur mozilla-2.0.orig/xulrunner/installer/libxul-embedding.pc.in mozilla-2.0/xulrunner/installer/libxul-embedding.pc.in +--- mozilla-2.0.orig/xulrunner/installer/libxul-embedding.pc.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/libxul-embedding.pc.in 2011-03-13 03:25:12.350027359 -0700 +@@ -6,5 +6,6 @@ + Name: libxul-embedding + Description: Static library for version-independent embedding of the Mozilla runtime + Version: %MOZILLA_VERSION% ++Requires: %NSPR_NAME% >= %NSPR_VERSION% + Libs: -L${sdkdir}/lib -lxpcomglue -ldl + Cflags: -DXPCOM_GLUE -I${includedir} %WCHAR_CFLAGS% +diff -Nur mozilla-2.0.orig/xulrunner/installer/libxul.pc.in mozilla-2.0/xulrunner/installer/libxul.pc.in +--- mozilla-2.0.orig/xulrunner/installer/libxul.pc.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/libxul.pc.in 2011-03-13 03:25:28.010027359 -0700 +@@ -1,5 +1,6 @@ + prefix=%prefix% + sdkdir=%sdkdir% ++libdir=%libdir% + includedir=%includedir% + idldir=%idldir% + +diff -Nur mozilla-2.0.orig/xulrunner/installer/Makefile.in mozilla-2.0/xulrunner/installer/Makefile.in +--- mozilla-2.0.orig/xulrunner/installer/Makefile.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/Makefile.in 2011-03-13 03:25:55.570027359 -0700 +@@ -121,6 +121,7 @@ + -e "s|%includedir%|$(includedir)|" \ + -e "s|%idldir%|$(idldir)|" \ + -e "s|%sdkdir%|$(sdkdir)|" \ ++ -e "s|%libdir%|$(installdir)|" \ + -e "s|%MOZ_APP_NAME%|$(MOZ_APP_NAME)|" \ + -e "s|%MOZILLA_VERSION%|$(MOZ_APP_VERSION)|" \ + -e "s|%WCHAR_CFLAGS%|$(WCHAR_CFLAGS)|" \ +diff -Nur mozilla-2.0.orig/xulrunner/installer/mozilla-gtkmozembed-embedding.pc.in mozilla-2.0/xulrunner/installer/mozilla-gtkmozembed-embedding.pc.in +--- mozilla-2.0.orig/xulrunner/installer/mozilla-gtkmozembed-embedding.pc.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/mozilla-gtkmozembed-embedding.pc.in 2011-03-13 03:26:18.676694023 -0700 +@@ -1,5 +1,6 @@ + prefix=%prefix% + sdkdir=%sdkdir% ++libdir=%libdir% + includedir=%includedir% + + Name: mozilla-gtkembedmoz +diff -Nur mozilla-2.0.orig/xulrunner/installer/mozilla-gtkmozembed.pc.in mozilla-2.0/xulrunner/installer/mozilla-gtkmozembed.pc.in +--- mozilla-2.0.orig/xulrunner/installer/mozilla-gtkmozembed.pc.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/mozilla-gtkmozembed.pc.in 2011-03-13 03:26:41.566694025 -0700 +@@ -1,5 +1,6 @@ + prefix=%prefix% + sdkdir=%sdkdir% ++libdir=%libdir% + includedir=%includedir% + + Name: mozilla-gtkembedmoz +diff -Nur mozilla-2.0.orig/xulrunner/installer/mozilla-js.pc.in mozilla-2.0/xulrunner/installer/mozilla-js.pc.in +--- mozilla-2.0.orig/xulrunner/installer/mozilla-js.pc.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/mozilla-js.pc.in 2011-03-13 03:27:19.680027357 -0700 +@@ -7,4 +7,4 @@ + Version: %MOZILLA_VERSION% + Requires: %NSPR_NAME% >= %NSPR_VERSION% + Libs: -L${sdkdir}/lib %MOZ_JS_LINK% +-Cflags: -I${includedir} -DXP_UNIX -DJS_THREADSAFE ++Cflags: -I${includedir} -I${includedir}/js -DXP_UNIX -DJS_THREADSAFE diff --git a/staging/xulrunner/port_gnomevfs_to_gio.patch b/staging/xulrunner/port_gnomevfs_to_gio.patch new file mode 100644 index 000000000..797baff42 --- /dev/null +++ b/staging/xulrunner/port_gnomevfs_to_gio.patch @@ -0,0 +1,1316 @@ +diff -r 49a1b2aa43c5 extensions/gio/Makefile.in +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 ++++ b/extensions/gio/Makefile.in Tue Jan 11 11:17:52 2011 +0100 +@@ -0,0 +1,69 @@ ++# vim:set ts=8 sw=8 sts=8 noet: ++# ***** BEGIN LICENSE BLOCK ***** ++# Version: MPL 1.1/GPL 2.0/LGPL 2.1 ++# ++# The contents of this file are subject to the Mozilla Public License Version ++# 1.1 (the "License"); you may not use this file except in compliance with ++# the License. You may obtain a copy of the License at ++# http://www.mozilla.org/MPL/ ++# ++# Software distributed under the License is distributed on an "AS IS" basis, ++# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License ++# for the specific language governing rights and limitations under the ++# License. ++# ++# The Original Code is the Mozilla gnome-vfs extension. ++# ++# The Initial Developer of the Original Code is IBM Corporation. ++# ++# Portions created by IBM Corporation are Copyright (C) 2004 ++# IBM Corporation. All Rights Reserved. ++# ++# Contributor(s): ++# Darin Fisher <darin@meer.net> ++# Jan Horak <jhorak@redhat.com> ++# ++# Alternatively, the contents of this file may be used under the terms of ++# either the GNU General Public License Version 2 or later (the "GPL"), or ++# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), ++# in which case the provisions of the GPL or the LGPL are applicable instead ++# of those above. If you wish to allow use of your version of this file only ++# under the terms of either the GPL or the LGPL, and not to allow others to ++# use your version of this file under the terms of the MPL, indicate your ++# decision by deleting the provisions above and replace them with the notice ++# and other provisions required by the GPL or the LGPL. If you do not delete ++# the provisions above, a recipient may use your version of this file under ++# the terms of any one of the MPL, the GPL or the LGPL. ++# ++# ***** END LICENSE BLOCK ***** ++ ++DEPTH = ../.. ++topsrcdir = @top_srcdir@ ++srcdir = @srcdir@ ++VPATH = @srcdir@ ++ ++include $(DEPTH)/config/autoconf.mk ++ ++MODULE = nkgio ++LIBRARY_NAME = nkgio ++SHORT_LIBNAME = nkgio ++IS_COMPONENT = 1 ++ ++CPPSRCS = \ ++ nsGIOProtocolHandler.cpp \ ++ $(NULL) ++ ++LOCAL_INCLUDES = $(MOZ_GIO_CFLAGS) ++ ++EXTRA_DSO_LDOPTS = \ ++ $(XPCOM_GLUE_LDOPTS) \ ++ $(NSPR_LIBS) \ ++ $(MOZ_GIO_LIBS) \ ++ $(NULL) ++ ++# make sure this component is never statically linked into the main ++# application. this is necessary since we don't want to force users ++# to install gio in order to use the rest of mozilla ;-) ++FORCE_SHARED_LIB= 1 ++ ++include $(topsrcdir)/config/rules.mk +diff -r 49a1b2aa43c5 extensions/gio/makefiles.sh +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 ++++ b/extensions/gio/makefiles.sh Tue Jan 11 11:17:52 2011 +0100 +@@ -0,0 +1,41 @@ ++#! /bin/sh ++# ***** BEGIN LICENSE BLOCK ***** ++# Version: MPL 1.1/GPL 2.0/LGPL 2.1 ++# ++# The contents of this file are subject to the Mozilla Public License Version ++# 1.1 (the "License"); you may not use this file except in compliance with ++# the License. You may obtain a copy of the License at ++# http://www.mozilla.org/MPL/ ++# ++# Software distributed under the License is distributed on an "AS IS" basis, ++# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License ++# for the specific language governing rights and limitations under the ++# License. ++# ++# The Original Code is Mozilla Build System ++# ++# The Initial Developer of the Original Code is ++# Ben Turner <mozilla@songbirdnest.com> ++# ++# Portions created by the Initial Developer are Copyright (C) 2007 ++# the Initial Developer. All Rights Reserved. ++# ++# Contributor(s): ++# ++# Alternatively, the contents of this file may be used under the terms of ++# either the GNU General Public License Version 2 or later (the "GPL"), or ++# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), ++# in which case the provisions of the GPL or the LGPL are applicable instead ++# of those above. If you wish to allow use of your version of this file only ++# under the terms of either the GPL or the LGPL, and not to allow others to ++# use your version of this file under the terms of the MPL, indicate your ++# decision by deleting the provisions above and replace them with the notice ++# and other provisions required by the GPL or the LGPL. If you do not delete ++# the provisions above, a recipient may use your version of this file under ++# the terms of any one of the MPL, the GPL or the LGPL. ++# ++# ***** END LICENSE BLOCK ***** ++ ++add_makefiles " ++ extensions/gio/Makefile ++" +diff -r 49a1b2aa43c5 extensions/gio/nsGIOProtocolHandler.cpp +--- /dev/null Thu Jan 01 00:00:00 1970 +0000 ++++ b/extensions/gio/nsGIOProtocolHandler.cpp Tue Jan 11 11:17:52 2011 +0100 +@@ -0,0 +1,1163 @@ ++/* vim:set ts=2 sw=2 et cindent: */ ++/* ***** BEGIN LICENSE BLOCK ***** ++ * Version: MPL 1.1/GPL 2.0/LGPL 2.1 ++ * ++ * The contents of this file are subject to the Mozilla Public License Version ++ * 1.1 (the "License"); you may not use this file except in compliance with ++ * the License. You may obtain a copy of the License at ++ * http://www.mozilla.org/MPL/ ++ * ++ * Software distributed under the License is distributed on an "AS IS" basis, ++ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License ++ * for the specific language governing rights and limitations under the ++ * License. ++ * ++ * The Original Code is the Mozilla gnome-vfs extension. ++ * ++ * The Initial Developer of the Original Code is IBM Corporation. ++ * ++ * Portions created by IBM Corporation are Copyright (C) 2004 ++ * IBM Corporation. All Rights Reserved. ++ * ++ * Contributor(s): ++ * Darin Fisher <darin@meer.net> ++ * Jan Horak <jhorak@redhat.com> ++ * ++ * Alternatively, the contents of this file may be used under the terms of ++ * either the GNU General Public License Version 2 or later (the "GPL"), or ++ * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), ++ * in which case the provisions of the GPL or the LGPL are applicable instead ++ * of those above. If you wish to allow use of your version of this file only ++ * under the terms of either the GPL or the LGPL, and not to allow others to ++ * use your version of this file under the terms of the MPL, indicate your ++ * decision by deleting the provisions above and replace them with the notice ++ * and other provisions required by the GPL or the LGPL. If you do not delete ++ * the provisions above, a recipient may use your version of this file under ++ * the terms of any one of the MPL, the GPL or the LGPL. ++ * ++ * ***** END LICENSE BLOCK ***** */ ++ ++/* ++ * This code is based on original Mozilla gnome-vfs extension. It implements ++ * input stream provided by GVFS/GIO. ++*/ ++#include "mozilla/ModuleUtils.h" ++#include "nsIPrefService.h" ++#include "nsIPrefBranch2.h" ++#include "nsIObserver.h" ++#include "nsThreadUtils.h" ++#include "nsProxyRelease.h" ++#include "nsIStringBundle.h" ++#include "nsIStandardURL.h" ++#include "nsMimeTypes.h" ++#include "nsNetUtil.h" ++#include "mozilla/Monitor.h" ++#include <gio/gio.h> ++ ++#define MOZ_GIO_SCHEME "moz-gio" ++#define MOZ_GIO_SUPPORTED_PROTOCOLS "network.gio.supported-protocols" ++ ++//----------------------------------------------------------------------------- ++ ++// NSPR_LOG_MODULES=gio:5 ++#ifdef PR_LOGGING ++static PRLogModuleInfo *sGIOLog; ++#define LOG(args) PR_LOG(sGIOLog, PR_LOG_DEBUG, args) ++#else ++#define LOG(args) ++#endif ++ ++ ++//----------------------------------------------------------------------------- ++static nsresult ++MapGIOResult(gint code) ++{ ++ switch (code) ++ { ++ case G_IO_ERROR_NOT_FOUND: return NS_ERROR_FILE_NOT_FOUND; // shows error ++ case G_IO_ERROR_INVALID_ARGUMENT: return NS_ERROR_INVALID_ARG; ++ case G_IO_ERROR_NOT_SUPPORTED: return NS_ERROR_NOT_AVAILABLE; ++ case G_IO_ERROR_NO_SPACE: return NS_ERROR_FILE_NO_DEVICE_SPACE; ++ case G_IO_ERROR_READ_ONLY: return NS_ERROR_FILE_READ_ONLY; ++ case G_IO_ERROR_PERMISSION_DENIED: return NS_ERROR_FILE_ACCESS_DENIED; // wrong password/login ++ case G_IO_ERROR_CLOSED: return NS_BASE_STREAM_CLOSED; // was EOF ++ case G_IO_ERROR_NOT_DIRECTORY: return NS_ERROR_FILE_NOT_DIRECTORY; ++ case G_IO_ERROR_PENDING: return NS_ERROR_IN_PROGRESS; ++ case G_IO_ERROR_EXISTS: return NS_ERROR_FILE_ALREADY_EXISTS; ++ case G_IO_ERROR_IS_DIRECTORY: return NS_ERROR_FILE_IS_DIRECTORY; ++ case G_IO_ERROR_NOT_MOUNTED: return NS_ERROR_NOT_CONNECTED; // shows error ++ case G_IO_ERROR_HOST_NOT_FOUND: return NS_ERROR_UNKNOWN_HOST; // shows error ++ case G_IO_ERROR_CANCELLED: return NS_ERROR_ABORT; ++ case G_IO_ERROR_NOT_EMPTY: return NS_ERROR_FILE_DIR_NOT_EMPTY; ++ case G_IO_ERROR_FILENAME_TOO_LONG: return NS_ERROR_FILE_NAME_TOO_LONG; ++ case G_IO_ERROR_INVALID_FILENAME: return NS_ERROR_FILE_INVALID_PATH; ++ case G_IO_ERROR_TIMED_OUT: return NS_ERROR_NET_TIMEOUT; // shows error ++ case G_IO_ERROR_WOULD_BLOCK: return NS_BASE_STREAM_WOULD_BLOCK; ++ case G_IO_ERROR_FAILED_HANDLED: return NS_ERROR_ABORT; // Cancel on login dialog ++ ++/* unhandled: ++ G_IO_ERROR_NOT_REGULAR_FILE, ++ G_IO_ERROR_NOT_SYMBOLIC_LINK, ++ G_IO_ERROR_NOT_MOUNTABLE_FILE, ++ G_IO_ERROR_TOO_MANY_LINKS, ++ G_IO_ERROR_ALREADY_MOUNTED, ++ G_IO_ERROR_CANT_CREATE_BACKUP, ++ G_IO_ERROR_WRONG_ETAG, ++ G_IO_ERROR_WOULD_RECURSE, ++ G_IO_ERROR_BUSY, ++ G_IO_ERROR_WOULD_MERGE, ++ G_IO_ERROR_TOO_MANY_OPEN_FILES ++*/ ++ // Make GCC happy ++ default: ++ return NS_ERROR_FAILURE; ++ } ++ ++ return NS_ERROR_FAILURE; ++} ++ ++static nsresult ++MapGIOResult(GError *result) ++{ ++ if (!result) ++ return NS_OK; ++ else ++ return MapGIOResult(result->code); ++} ++/** Return values for mount operation. ++ * These enums are used as mount operation return values. ++ */ ++typedef enum { ++ MOUNT_OPERATION_IN_PROGRESS, /** \enum operation in progress */ ++ MOUNT_OPERATION_SUCCESS, /** \enum operation successful */ ++ MOUNT_OPERATION_FAILED /** \enum operation not successful */ ++} MountOperationResult; ++//----------------------------------------------------------------------------- ++/** ++ * Sort function compares according to file type (directory/file) ++ * and alphabethical order ++ * @param a pointer to GFileInfo object to compare ++ * @param b pointer to GFileInfo object to compare ++ * @return -1 when first object should be before the second, 0 when equal, ++ * +1 when second object should be before the first ++ */ ++static gint ++FileInfoComparator(gconstpointer a, gconstpointer b) ++{ ++ GFileInfo *ia = ( GFileInfo *) a; ++ GFileInfo *ib = ( GFileInfo *) b; ++ if (g_file_info_get_file_type(ia) == G_FILE_TYPE_DIRECTORY ++ && g_file_info_get_file_type(ib) != G_FILE_TYPE_DIRECTORY) ++ return -1; ++ if (g_file_info_get_file_type(ib) == G_FILE_TYPE_DIRECTORY ++ && g_file_info_get_file_type(ia) != G_FILE_TYPE_DIRECTORY) ++ return 1; ++ ++ return strcasecmp(g_file_info_get_name(ia), g_file_info_get_name(ib)); ++} ++ ++/* Declaration of mount callback functions */ ++static void mount_enclosing_volume_finished (GObject *source_object, ++ GAsyncResult *res, ++ gpointer user_data); ++static void mount_operation_ask_password (GMountOperation *mount_op, ++ const char *message, ++ const char *default_user, ++ const char *default_domain, ++ GAskPasswordFlags flags, ++ gpointer user_data); ++//----------------------------------------------------------------------------- ++ ++class nsGIOInputStream : public nsIInputStream ++{ ++ public: ++ NS_DECL_ISUPPORTS ++ NS_DECL_NSIINPUTSTREAM ++ ++ nsGIOInputStream(const nsCString &uriSpec) ++ : mSpec(uriSpec) ++ , mChannel(nsnull) ++ , mHandle(nsnull) ++ , mStream(nsnull) ++ , mBytesRemaining(PR_UINT32_MAX) ++ , mStatus(NS_OK) ++ , mDirList(nsnull) ++ , mDirListPtr(nsnull) ++ , mDirBufCursor(0) ++ , mDirOpen(PR_FALSE) ++ , mMonitorMountInProgress("GIOInputStream::MountFinished") { } ++ ++ ~nsGIOInputStream() { Close(); } ++ ++ void SetChannel(nsIChannel *channel) ++ { ++ // We need to hold an owning reference to our channel. This is done ++ // so we can access the channel's notification callbacks to acquire ++ // a reference to a nsIAuthPrompt if we need to handle an interactive ++ // mount operation. ++ // ++ // However, the channel can only be accessed on the main thread, so ++ // we have to be very careful with ownership. Moreover, it doesn't ++ // support threadsafe addref/release, so proxying is the answer. ++ // ++ // Also, it's important to note that this likely creates a reference ++ // cycle since the channel likely owns this stream. This reference ++ // cycle is broken in our Close method. ++ ++ NS_ADDREF(mChannel = channel); ++ } ++ void SetMountResult(MountOperationResult result, gint error_code); ++ private: ++ nsresult DoOpen(); ++ nsresult DoRead(char *aBuf, PRUint32 aCount, PRUint32 *aCountRead); ++ nsresult SetContentTypeOfChannel(const char *contentType); ++ nsresult MountVolume(); ++ nsresult DoOpenDirectory(); ++ nsresult DoOpenFile(GFileInfo *info); ++ nsCString mSpec; ++ nsIChannel *mChannel; // manually refcounted ++ GFile *mHandle; ++ GFileInputStream *mStream; ++ PRUint64 mBytesRemaining; ++ nsresult mStatus; ++ GList *mDirList; ++ GList *mDirListPtr; ++ nsCString mDirBuf; ++ PRUint32 mDirBufCursor; ++ PRPackedBool mDirOpen; ++ MountOperationResult mMountRes; ++ mozilla::Monitor mMonitorMountInProgress; ++ gint mMountErrorCode; ++}; ++/** ++ * Set result of mount operation and notify monitor waiting for results. ++ * This method is called in main thread as long as it is used only ++ * in mount_enclosing_volume_finished function. ++ * @param result Result of mount operation ++ */ ++void ++nsGIOInputStream::SetMountResult(MountOperationResult result, gint error_code) ++{ ++ mozilla::MonitorAutoEnter mon(mMonitorMountInProgress); ++ mMountRes = result; ++ mMountErrorCode = error_code; ++ mon.Notify(); ++} ++ ++/** ++ * Start mount operation and wait in loop until it is finished. This method is ++ * called from thread which is trying to read from location. ++ */ ++nsresult ++nsGIOInputStream::MountVolume() { ++ GMountOperation* mount_op = g_mount_operation_new(); ++ g_signal_connect (mount_op, "ask-password", ++ G_CALLBACK (mount_operation_ask_password), mChannel); ++ mMountRes = MOUNT_OPERATION_IN_PROGRESS; ++ /* g_file_mount_enclosing_volume uses a dbus request to mount the volume. ++ Callback mount_enclosing_volume_finished is called in main thread ++ (not this thread on which this method is called). */ ++ g_file_mount_enclosing_volume(mHandle, ++ G_MOUNT_MOUNT_NONE, ++ mount_op, ++ NULL, ++ mount_enclosing_volume_finished, ++ this); ++ mozilla::MonitorAutoEnter mon(mMonitorMountInProgress); ++ /* Waiting for finish of mount operation thread */ ++ while (mMountRes == MOUNT_OPERATION_IN_PROGRESS) ++ mon.Wait(); ++ ++ g_object_unref(mount_op); ++ ++ if (mMountRes == MOUNT_OPERATION_FAILED) { ++ return MapGIOResult(mMountErrorCode); ++ } else { ++ return NS_OK; ++ } ++} ++ ++/** ++ * Create list of infos about objects in opened directory ++ * Return: NS_OK when list obtained, otherwise error code according ++ * to failed operation. ++ */ ++nsresult ++nsGIOInputStream::DoOpenDirectory() ++{ ++ GError *error = NULL; ++ ++ GFileEnumerator *f_enum = g_file_enumerate_children(mHandle, ++ "standard::*,time::*", ++ G_FILE_QUERY_INFO_NONE, ++ NULL, ++ &error); ++ if (!f_enum) { ++ nsresult rv = MapGIOResult(error); ++ g_warning("Cannot read from directory: %s", error->message); ++ g_error_free(error); ++ return rv; ++ } ++ // fill list of file infos ++ GFileInfo *info = g_file_enumerator_next_file(f_enum, NULL, &error); ++ while (info) { ++ mDirList = g_list_append(mDirList, info); ++ info = g_file_enumerator_next_file(f_enum, NULL, &error); ++ } ++ g_object_unref(f_enum); ++ if (error) { ++ g_warning("Error reading directory content: %s", error->message); ++ nsresult rv = MapGIOResult(error); ++ g_error_free(error); ++ return rv; ++ } ++ mDirOpen = PR_TRUE; ++ ++ // Sort list of file infos by using FileInfoComparator function ++ mDirList = g_list_sort(mDirList, FileInfoComparator); ++ mDirListPtr = mDirList; ++ ++ // Write base URL (make sure it ends with a '/') ++ mDirBuf.Append("300: "); ++ mDirBuf.Append(mSpec); ++ if (mSpec.get()[mSpec.Length() - 1] != '/') ++ mDirBuf.Append('/'); ++ mDirBuf.Append('\n'); ++ ++ // Write column names ++ mDirBuf.Append("200: filename content-length last-modified file-type\n"); ++ ++ // Write charset (assume UTF-8) ++ // XXX is this correct? ++ mDirBuf.Append("301: UTF-8\n"); ++ SetContentTypeOfChannel(APPLICATION_HTTP_INDEX_FORMAT); ++ return NS_OK; ++} ++ ++/** ++ * Create file stream and set mime type for channel ++ * @param info file info used to determine mime type ++ * @return NS_OK when file stream created successfuly, error code otherwise ++ */ ++nsresult ++nsGIOInputStream::DoOpenFile(GFileInfo *info) ++{ ++ GError *error = NULL; ++ ++ mStream = g_file_read(mHandle, NULL, &error); ++ if (!mStream) { ++ nsresult rv = MapGIOResult(error); ++ g_warning("Cannot read from file: %s", error->message); ++ g_error_free(error); ++ return rv; ++ } ++ ++ const char * content_type = g_file_info_get_content_type(info); ++ if (content_type) { ++ char *mime_type = g_content_type_get_mime_type(content_type); ++ if (mime_type) { ++ if (strcmp(mime_type, APPLICATION_OCTET_STREAM) != 0) { ++ SetContentTypeOfChannel(mime_type); ++ } ++ g_free(mime_type); ++ } ++ } else { ++ g_warning("Missing content type."); ++ } ++ ++ mBytesRemaining = g_file_info_get_size(info); ++ // Update the content length attribute on the channel. We do this ++ // synchronously without proxying. This hack is not as bad as it looks! ++ mChannel->SetContentLength(mBytesRemaining); ++ ++ return NS_OK; ++} ++ ++/** ++ * Start file open operation, mount volume when needed and according to file type ++ * create file output stream or read directory content. ++ * @return NS_OK when file or directory opened successfully, error code otherwise ++ */ ++nsresult ++nsGIOInputStream::DoOpen() ++{ ++ nsresult rv; ++ GError *error = NULL; ++ ++ NS_ASSERTION(mHandle == nsnull, "already open"); ++ ++ mHandle = g_file_new_for_uri( mSpec.get() ); ++ ++ GFileInfo *info = g_file_query_info(mHandle, ++ "standard::*", ++ G_FILE_QUERY_INFO_NONE, ++ NULL, ++ &error); ++ ++ if (error) { ++ if (error->domain == G_IO_ERROR && error->code == G_IO_ERROR_NOT_MOUNTED) { ++ // location is not yet mounted, try to mount ++ g_error_free(error); ++ if (NS_IsMainThread()) ++ return NS_ERROR_NOT_CONNECTED; ++ error = NULL; ++ rv = MountVolume(); ++ if (rv != NS_OK) { ++ return rv; ++ } ++ // get info again ++ info = g_file_query_info(mHandle, ++ "standard::*", ++ G_FILE_QUERY_INFO_NONE, ++ NULL, ++ &error); ++ // second try to get file info from remote files after media mount ++ if (!info) { ++ g_warning("Unable to get file info: %s", error->message); ++ rv = MapGIOResult(error); ++ g_error_free(error); ++ return rv; ++ } ++ } else { ++ g_warning("Unable to get file info: %s", error->message); ++ rv = MapGIOResult(error); ++ g_error_free(error); ++ return rv; ++ } ++ } ++ // Get file type to handle directories and file differently ++ GFileType f_type = g_file_info_get_file_type(info); ++ if (f_type == G_FILE_TYPE_DIRECTORY) { ++ // directory ++ rv = DoOpenDirectory(); ++ } else if (f_type != G_FILE_TYPE_UNKNOWN) { ++ // file ++ rv = DoOpenFile(info); ++ } else { ++ g_warning("Unable to get file type."); ++ rv = NS_ERROR_FILE_NOT_FOUND; ++ } ++ if (info) ++ g_object_unref(info); ++ return rv; ++} ++ ++/** ++ * Read content of file or create file list from directory ++ * @param aBuf read destination buffer ++ * @param aCount length of destination buffer ++ * @param aCountRead number of read characters ++ * @return NS_OK when read successfully, NS_BASE_STREAM_CLOSED when end of file, ++ * error code otherwise ++ */ ++nsresult ++nsGIOInputStream::DoRead(char *aBuf, PRUint32 aCount, PRUint32 *aCountRead) ++{ ++ nsresult rv = NS_ERROR_NOT_AVAILABLE; ++ if (mStream) { ++ // file read ++ GError *error = NULL; ++ PRUint32 bytes_read = g_input_stream_read(G_INPUT_STREAM(mStream), ++ aBuf, ++ aCount, ++ NULL, ++ &error); ++ if (error) { ++ rv = MapGIOResult(error); ++ *aCountRead = 0; ++ g_warning("Cannot read from file: %s", error->message); ++ g_error_free(error); ++ return rv; ++ } ++ *aCountRead = bytes_read; ++ mBytesRemaining -= *aCountRead; ++ return NS_OK; ++ } ++ else if (mDirOpen) { ++ // directory read ++ while (aCount && rv != NS_BASE_STREAM_CLOSED) ++ { ++ // Copy data out of our buffer ++ PRUint32 bufLen = mDirBuf.Length() - mDirBufCursor; ++ if (bufLen) ++ { ++ PRUint32 n = PR_MIN(bufLen, aCount); ++ memcpy(aBuf, mDirBuf.get() + mDirBufCursor, n); ++ *aCountRead += n; ++ aBuf += n; ++ aCount -= n; ++ mDirBufCursor += n; ++ } ++ ++ if (!mDirListPtr) // Are we at the end of the directory list? ++ { ++ rv = NS_BASE_STREAM_CLOSED; ++ } ++ else if (aCount) // Do we need more data? ++ { ++ GFileInfo *info = (GFileInfo *) mDirListPtr->data; ++ ++ // Prune '.' and '..' from directory listing. ++ const char * fname = g_file_info_get_name(info); ++ if (fname && fname[0] == '.' && ++ (fname[1] == '\0' || (fname[1] == '.' && fname[2] == '\0'))) ++ { ++ mDirListPtr = mDirListPtr->next; ++ continue; ++ } ++ ++ mDirBuf.Assign("201: "); ++ ++ // The "filename" field ++ nsCString escName; ++ nsCOMPtr<nsINetUtil> nu = do_GetService(NS_NETUTIL_CONTRACTID); ++ if (nu && fname) { ++ nu->EscapeString(nsDependentCString(fname), ++ nsINetUtil::ESCAPE_URL_PATH, escName); ++ ++ mDirBuf.Append(escName); ++ mDirBuf.Append(' '); ++ } ++ ++ // The "content-length" field ++ // XXX truncates size from 64-bit to 32-bit ++ mDirBuf.AppendInt(PRInt32(g_file_info_get_size(info))); ++ mDirBuf.Append(' '); ++ ++ // The "last-modified" field ++ // ++ // NSPR promises: PRTime is compatible with time_t ++ // we just need to convert from seconds to microseconds ++ GTimeVal gtime; ++ g_file_info_get_modification_time(info, >ime); ++ ++ PRExplodedTime tm; ++ PRTime pt = ((PRTime) gtime.tv_sec) * 1000000; ++ PR_ExplodeTime(pt, PR_GMTParameters, &tm); ++ { ++ char buf[64]; ++ PR_FormatTimeUSEnglish(buf, sizeof(buf), ++ "%a,%%20%d%%20%b%%20%Y%%20%H:%M:%S%%20GMT ", &tm); ++ mDirBuf.Append(buf); ++ } ++ ++ // The "file-type" field ++ switch (g_file_info_get_file_type(info)) ++ { ++ case G_FILE_TYPE_REGULAR: ++ mDirBuf.Append("FILE "); ++ break; ++ case G_FILE_TYPE_DIRECTORY: ++ mDirBuf.Append("DIRECTORY "); ++ break; ++ case G_FILE_TYPE_SYMBOLIC_LINK: ++ mDirBuf.Append("SYMBOLIC-LINK "); ++ break; ++ default: ++ break; ++ } ++ mDirBuf.Append('\n'); ++ ++ mDirBufCursor = 0; ++ mDirListPtr = mDirListPtr->next; ++ } ++ } ++ } ++ return rv; ++} ++ ++/** ++ * This class is used to implement SetContentTypeOfChannel. ++ */ ++class nsGIOSetContentTypeEvent : public nsRunnable ++{ ++ public: ++ nsGIOSetContentTypeEvent(nsIChannel *channel, const char *contentType) ++ : mChannel(channel), mContentType(contentType) ++ { ++ // stash channel reference in mChannel. no AddRef here! see note ++ // in SetContentTypeOfchannel. ++ } ++ ++ NS_IMETHOD Run() ++ { ++ mChannel->SetContentType(mContentType); ++ return NS_OK; ++ } ++ ++ private: ++ nsIChannel *mChannel; ++ nsCString mContentType; ++}; ++ ++nsresult ++nsGIOInputStream::SetContentTypeOfChannel(const char *contentType) ++{ ++ // We need to proxy this call over to the main thread. We post an ++ // asynchronous event in this case so that we don't delay reading data, and ++ // we know that this is safe to do since the channel's reference will be ++ // released asynchronously as well. We trust the ordering of the main ++ // thread's event queue to protect us against memory corruption. ++ ++ nsresult rv; ++ nsCOMPtr<nsIRunnable> ev = ++ new nsGIOSetContentTypeEvent(mChannel, contentType); ++ if (!ev) ++ { ++ rv = NS_ERROR_OUT_OF_MEMORY; ++ } ++ else ++ { ++ rv = NS_DispatchToMainThread(ev); ++ } ++ return rv; ++} ++ ++NS_IMPL_THREADSAFE_ISUPPORTS1(nsGIOInputStream, nsIInputStream) ++ ++/** ++ * Free all used memory and close stream. ++ */ ++NS_IMETHODIMP ++nsGIOInputStream::Close() ++{ ++ if (mStream) ++ { ++ g_object_unref(mStream); ++ mStream = nsnull; ++ } ++ ++ if (mHandle) ++ { ++ g_object_unref(mHandle); ++ mHandle = nsnull; ++ } ++ ++ if (mDirList) ++ { ++ // Destroy the list of GIOFileInfo objects... ++ g_list_foreach(mDirList, (GFunc) g_object_unref, nsnull); ++ g_list_free(mDirList); ++ mDirList = nsnull; ++ mDirListPtr = nsnull; ++ } ++ ++ if (mChannel) ++ { ++ nsresult rv = NS_OK; ++ ++ nsCOMPtr<nsIThread> thread = do_GetMainThread(); ++ if (thread) ++ rv = NS_ProxyRelease(thread, mChannel); ++ ++ NS_ASSERTION(thread && NS_SUCCEEDED(rv), "leaking channel reference"); ++ mChannel = nsnull; ++ } ++ ++ mSpec.Truncate(); // free memory ++ ++ // Prevent future reads from re-opening the handle. ++ if (NS_SUCCEEDED(mStatus)) ++ mStatus = NS_BASE_STREAM_CLOSED; ++ ++ return NS_OK; ++} ++ ++/** ++ * Return number of remaining bytes available on input ++ * @param aResult remaining bytes ++ */ ++NS_IMETHODIMP ++nsGIOInputStream::Available(PRUint32 *aResult) ++{ ++ if (NS_FAILED(mStatus)) ++ return mStatus; ++ ++ /* When remaining bytes are bigger than max PRUint32 value an aResult must ++ be set to PRUint32 maximum */ ++ if (mBytesRemaining > PR_UINT32_MAX) ++ *aResult = PR_UINT32_MAX; ++ else ++ *aResult = mBytesRemaining; ++ ++ return NS_OK; ++} ++ ++/** ++ * Trying to read from stream. When location is not available it tries to mount it. ++ * @param aBuf buffer to put read data ++ * @param aCount length of aBuf ++ * @param aCountRead number of bytes actually read ++ */ ++NS_IMETHODIMP ++nsGIOInputStream::Read(char *aBuf, ++ PRUint32 aCount, ++ PRUint32 *aCountRead) ++{ ++ *aCountRead = 0; ++ // Check if file is already opened, otherwise open it ++ if (!mStream && !mDirOpen && mStatus == NS_OK) { ++ mStatus = DoOpen(); ++ if (NS_FAILED(mStatus)) { ++ return mStatus; ++ } ++ } ++ ++ mStatus = DoRead(aBuf, aCount, aCountRead); ++ // Check if all data has been read ++ if (mStatus == NS_BASE_STREAM_CLOSED) ++ return NS_OK; ++ ++ // Check whenever any error appears while reading ++ return mStatus; ++} ++ ++NS_IMETHODIMP ++nsGIOInputStream::ReadSegments(nsWriteSegmentFun aWriter, ++ void *aClosure, ++ PRUint32 aCount, ++ PRUint32 *aResult) ++{ ++ // There is no way to implement this using GnomeVFS, but fortunately ++ // that doesn't matter. Because we are a blocking input stream, Necko ++ // isn't going to call our ReadSegments method. ++ NS_NOTREACHED("nsGIOInputStream::ReadSegments"); ++ return NS_ERROR_NOT_IMPLEMENTED; ++} ++ ++NS_IMETHODIMP ++nsGIOInputStream::IsNonBlocking(PRBool *aResult) ++{ ++ *aResult = PR_FALSE; ++ return NS_OK; ++} ++ ++//----------------------------------------------------------------------------- ++ ++/** ++ * Called when finishing mount operation. Result of operation is set in ++ * nsGIOInputStream. This function is called in main thread as an async request ++ * typically from dbus. ++ * @param source_object GFile object which requested the mount ++ * @param res result object ++ * @param user_data pointer to nsGIOInputStream ++ */ ++static void ++mount_enclosing_volume_finished (GObject *source_object, ++ GAsyncResult *res, ++ gpointer user_data) ++{ ++ GError *error = NULL; ++ ++ nsGIOInputStream* istream = static_cast<nsGIOInputStream*>(user_data); ++ ++ g_file_mount_enclosing_volume_finish(G_FILE (source_object), res, &error); ++ ++ if (error) { ++ g_warning("Mount failed: %s %d", error->message, error->code); ++ istream->SetMountResult(MOUNT_OPERATION_FAILED, error->code); ++ g_error_free(error); ++ } else { ++ istream->SetMountResult(MOUNT_OPERATION_SUCCESS, 0); ++ } ++} ++ ++/** ++ * This function is called when username or password are requested from user. ++ * This function is called in main thread as async request from dbus. ++ * @param mount_op mount operation ++ * @param message message to show to user ++ * @param default_user preffered user ++ * @param default_domain domain name ++ * @param flags what type of information is required ++ * @param user_data nsIChannel ++ */ ++static void ++mount_operation_ask_password (GMountOperation *mount_op, ++ const char *message, ++ const char *default_user, ++ const char *default_domain, ++ GAskPasswordFlags flags, ++ gpointer user_data) ++{ ++ nsIChannel *channel = (nsIChannel *) user_data; ++ if (!channel) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ // We can't handle request for domain ++ if (flags & G_ASK_PASSWORD_NEED_DOMAIN) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ ++ nsCOMPtr<nsIAuthPrompt> prompt; ++ NS_QueryNotificationCallbacks(channel, prompt); ++ ++ // If no auth prompt, then give up. We could failover to using the ++ // WindowWatcher service, but that might defeat a consumer's purposeful ++ // attempt to disable authentication (for whatever reason). ++ if (!prompt) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ // Parse out the host and port... ++ nsCOMPtr<nsIURI> uri; ++ channel->GetURI(getter_AddRefs(uri)); ++ if (!uri) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ ++ nsCAutoString scheme, hostPort; ++ uri->GetScheme(scheme); ++ uri->GetHostPort(hostPort); ++ ++ // It doesn't make sense for either of these strings to be empty. What kind ++ // of funky URI is this? ++ if (scheme.IsEmpty() || hostPort.IsEmpty()) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ // Construct the single signon key. Altering the value of this key will ++ // cause people's remembered passwords to be forgotten. Think carefully ++ // before changing the way this key is constructed. ++ nsAutoString key, realm; ++ ++ NS_ConvertUTF8toUTF16 dispHost(scheme); ++ dispHost.Append(NS_LITERAL_STRING("://")); ++ dispHost.Append(NS_ConvertUTF8toUTF16(hostPort)); ++ ++ key = dispHost; ++ if (*default_domain != '\0') ++ { ++ // We assume the realm string is ASCII. That might be a bogus assumption, ++ // but we have no idea what encoding GnomeVFS is using, so for now we'll ++ // limit ourselves to ISO-Latin-1. XXX What is a better solution? ++ realm.Append('"'); ++ realm.Append(NS_ConvertASCIItoUTF16(default_domain)); ++ realm.Append('"'); ++ key.Append(' '); ++ key.Append(realm); ++ } ++ // Construct the message string... ++ // ++ // We use Necko's string bundle here. This code really should be encapsulated ++ // behind some Necko API, after all this code is based closely on the code in ++ // nsHttpChannel.cpp. ++ nsCOMPtr<nsIStringBundleService> bundleSvc = ++ do_GetService(NS_STRINGBUNDLE_CONTRACTID); ++ if (!bundleSvc) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ nsCOMPtr<nsIStringBundle> bundle; ++ bundleSvc->CreateBundle("chrome://global/locale/commonDialogs.properties", ++ getter_AddRefs(bundle)); ++ if (!bundle) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ nsAutoString nsmessage; ++ ++ if (flags & G_ASK_PASSWORD_NEED_PASSWORD) { ++ if (flags & G_ASK_PASSWORD_NEED_USERNAME) { ++ if (!realm.IsEmpty()) { ++ const PRUnichar *strings[] = { realm.get(), dispHost.get() }; ++ bundle->FormatStringFromName(NS_LITERAL_STRING("EnterLoginForRealm").get(), ++ strings, 2, getter_Copies(nsmessage)); ++ } else { ++ const PRUnichar *strings[] = { dispHost.get() }; ++ bundle->FormatStringFromName(NS_LITERAL_STRING("EnterUserPasswordFor").get(), ++ strings, 1, getter_Copies(nsmessage)); ++ } ++ } else { ++ NS_ConvertUTF8toUTF16 userName(default_user); ++ const PRUnichar *strings[] = { userName.get(), dispHost.get() }; ++ bundle->FormatStringFromName(NS_LITERAL_STRING("EnterPasswordFor").get(), ++ strings, 2, getter_Copies(nsmessage)); ++ } ++ } else { ++ g_warning("Unknown mount operation request (flags: %x)", flags); ++ } ++ ++ if (nsmessage.IsEmpty()) { ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ // Prompt the user... ++ nsresult rv; ++ PRBool retval = PR_FALSE; ++ PRUnichar *user = nsnull, *pass = nsnull; ++ if (default_user) { ++ // user will be freed by PromptUsernameAndPassword ++ user = ToNewUnicode(NS_ConvertUTF8toUTF16(default_user)); ++ } ++ if (flags & G_ASK_PASSWORD_NEED_USERNAME) { ++ rv = prompt->PromptUsernameAndPassword(nsnull, nsmessage.get(), ++ key.get(), ++ nsIAuthPrompt::SAVE_PASSWORD_PERMANENTLY, ++ &user, &pass, &retval); ++ } else { ++ rv = prompt->PromptPassword(nsnull, nsmessage.get(), ++ key.get(), ++ nsIAuthPrompt::SAVE_PASSWORD_PERMANENTLY, ++ &pass, &retval); ++ } ++ if (NS_FAILED(rv) || !retval) { // was || user == '\0' || pass == '\0' ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_ABORTED); ++ return; ++ } ++ /* GIO should accept UTF8 */ ++ g_mount_operation_set_username(mount_op, NS_ConvertUTF16toUTF8(user).get()); ++ g_mount_operation_set_password(mount_op, NS_ConvertUTF16toUTF8(pass).get()); ++ nsMemory::Free(user); ++ nsMemory::Free(pass); ++ g_mount_operation_reply(mount_op, G_MOUNT_OPERATION_HANDLED); ++} ++ ++//----------------------------------------------------------------------------- ++ ++class nsGIOProtocolHandler : public nsIProtocolHandler ++ , public nsIObserver ++{ ++ public: ++ NS_DECL_ISUPPORTS ++ NS_DECL_NSIPROTOCOLHANDLER ++ NS_DECL_NSIOBSERVER ++ ++ nsresult Init(); ++ ++ private: ++ void InitSupportedProtocolsPref(nsIPrefBranch *prefs); ++ PRBool IsSupportedProtocol(const nsCString &spec); ++ ++ nsCString mSupportedProtocols; ++}; ++ ++NS_IMPL_ISUPPORTS2(nsGIOProtocolHandler, nsIProtocolHandler, nsIObserver) ++ ++nsresult ++nsGIOProtocolHandler::Init() ++{ ++#ifdef PR_LOGGING ++ sGIOLog = PR_NewLogModule("gio"); ++#endif ++ ++ nsCOMPtr<nsIPrefBranch2> prefs = do_GetService(NS_PREFSERVICE_CONTRACTID); ++ if (prefs) ++ { ++ InitSupportedProtocolsPref(prefs); ++ prefs->AddObserver(MOZ_GIO_SUPPORTED_PROTOCOLS, this, PR_FALSE); ++ } ++ ++ return NS_OK; ++} ++ ++void ++nsGIOProtocolHandler::InitSupportedProtocolsPref(nsIPrefBranch *prefs) ++{ ++ // Get user preferences to determine which protocol is supported. ++ // Gvfs/GIO has a set of supported protocols like obex, network, archive, ++ // computer, dav, cdda, gphoto2, trash, etc. Some of these seems to be ++ // irrelevant to process by browser. By default accept only smb and sftp ++ // protocols so far. ++ nsresult rv = prefs->GetCharPref(MOZ_GIO_SUPPORTED_PROTOCOLS, ++ getter_Copies(mSupportedProtocols)); ++ if (NS_SUCCEEDED(rv)) { ++ mSupportedProtocols.StripWhitespace(); ++ ToLowerCase(mSupportedProtocols); ++ } ++ else ++ mSupportedProtocols.Assign("smb:,sftp:"); // use defaults ++ ++ LOG(("gio: supported protocols \"%s\"\n", mSupportedProtocols.get())); ++} ++ ++PRBool ++nsGIOProtocolHandler::IsSupportedProtocol(const nsCString &aSpec) ++{ ++ const char *specString = aSpec.get(); ++ const char *colon = strchr(specString, ':'); ++ if (!colon) ++ return PR_FALSE; ++ ++ PRUint32 length = colon - specString + 1; ++ ++ // <scheme> + ':' ++ nsCString scheme(specString, length); ++ ++ char *found = PL_strcasestr(mSupportedProtocols.get(), scheme.get()); ++ if (!found) ++ return PR_FALSE; ++ ++ if (found[length] != ',' && found[length] != '\0') ++ return PR_FALSE; ++ ++ return PR_TRUE; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::GetScheme(nsACString &aScheme) ++{ ++ aScheme.Assign(MOZ_GIO_SCHEME); ++ return NS_OK; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::GetDefaultPort(PRInt32 *aDefaultPort) ++{ ++ *aDefaultPort = -1; ++ return NS_OK; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::GetProtocolFlags(PRUint32 *aProtocolFlags) ++{ ++ // Is URI_STD true of all GnomeVFS URI types? ++ *aProtocolFlags = URI_STD | URI_DANGEROUS_TO_LOAD; ++ return NS_OK; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::NewURI(const nsACString &aSpec, ++ const char *aOriginCharset, ++ nsIURI *aBaseURI, ++ nsIURI **aResult) ++{ ++ const nsCString flatSpec(aSpec); ++ LOG(("gio: NewURI [spec=%s]\n", flatSpec.get())); ++ ++ if (!aBaseURI) ++ { ++ // XXX Is it good to support all GIO protocols? ++ if (!IsSupportedProtocol(flatSpec)) ++ return NS_ERROR_UNKNOWN_PROTOCOL; ++ ++ PRInt32 colon_location = flatSpec.FindChar(':'); ++ if (colon_location <= 0) ++ return NS_ERROR_UNKNOWN_PROTOCOL; ++ ++ // Verify that GIO supports this URI scheme. ++ PRBool uri_scheme_supported = PR_FALSE; ++ ++ GVfs *gvfs = g_vfs_get_default(); ++ ++ if (!gvfs) { ++ g_warning("Cannot get GVfs object."); ++ return NS_ERROR_UNKNOWN_PROTOCOL; ++ } ++ ++ const gchar* const * uri_schemes = g_vfs_get_supported_uri_schemes(gvfs); ++ ++ while (*uri_schemes != NULL) { ++ // While flatSpec ends with ':' the uri_scheme does not. Therefore do not ++ // compare last character. ++ if (StringHead(flatSpec, colon_location).Equals(*uri_schemes)) { ++ uri_scheme_supported = PR_TRUE; ++ break; ++ } ++ uri_schemes++; ++ } ++ ++ if (!uri_scheme_supported) { ++ return NS_ERROR_UNKNOWN_PROTOCOL; ++ } ++ } ++ ++ nsresult rv; ++ nsCOMPtr<nsIStandardURL> url = ++ do_CreateInstance(NS_STANDARDURL_CONTRACTID, &rv); ++ if (NS_FAILED(rv)) ++ return rv; ++ ++ rv = url->Init(nsIStandardURL::URLTYPE_STANDARD, -1, flatSpec, ++ aOriginCharset, aBaseURI); ++ if (NS_SUCCEEDED(rv)) ++ rv = CallQueryInterface(url, aResult); ++ return rv; ++ ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::NewChannel(nsIURI *aURI, nsIChannel **aResult) ++{ ++ NS_ENSURE_ARG_POINTER(aURI); ++ nsresult rv; ++ ++ nsCAutoString spec; ++ rv = aURI->GetSpec(spec); ++ if (NS_FAILED(rv)) ++ return rv; ++ ++ nsRefPtr<nsGIOInputStream> stream = new nsGIOInputStream(spec); ++ if (!stream) ++ { ++ rv = NS_ERROR_OUT_OF_MEMORY; ++ } ++ else ++ { ++ // start out assuming an unknown content-type. we'll set the content-type ++ // to something better once we open the URI. ++ rv = NS_NewInputStreamChannel(aResult, ++ aURI, ++ stream, ++ NS_LITERAL_CSTRING(UNKNOWN_CONTENT_TYPE)); ++ if (NS_SUCCEEDED(rv)) ++ stream->SetChannel(*aResult); ++ } ++ return rv; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::AllowPort(PRInt32 aPort, ++ const char *aScheme, ++ PRBool *aResult) ++{ ++ // Don't override anything. ++ *aResult = PR_FALSE; ++ return NS_OK; ++} ++ ++NS_IMETHODIMP ++nsGIOProtocolHandler::Observe(nsISupports *aSubject, ++ const char *aTopic, ++ const PRUnichar *aData) ++{ ++ if (strcmp(aTopic, NS_PREFBRANCH_PREFCHANGE_TOPIC_ID) == 0) { ++ nsCOMPtr<nsIPrefBranch> prefs = do_QueryInterface(aSubject); ++ InitSupportedProtocolsPref(prefs); ++ } ++ return NS_OK; ++} ++ ++//----------------------------------------------------------------------------- ++ ++#define NS_GIOPROTOCOLHANDLER_CID \ ++{ /* ee706783-3af8-4d19-9e84-e2ebfe213480 */ \ ++ 0xee706783, \ ++ 0x3af8, \ ++ 0x4d19, \ ++ {0x9e, 0x84, 0xe2, 0xeb, 0xfe, 0x21, 0x34, 0x80} \ ++} ++ ++NS_GENERIC_FACTORY_CONSTRUCTOR_INIT(nsGIOProtocolHandler, Init) ++NS_DEFINE_NAMED_CID(NS_GIOPROTOCOLHANDLER_CID); ++ ++static const mozilla::Module::CIDEntry kVFSCIDs[] = { ++ { &kNS_GIOPROTOCOLHANDLER_CID, false, NULL, nsGIOProtocolHandlerConstructor }, ++ { NULL } ++}; ++ ++static const mozilla::Module::ContractIDEntry kVFSContracts[] = { ++ { NS_NETWORK_PROTOCOL_CONTRACTID_PREFIX MOZ_GIO_SCHEME, &kNS_GIOPROTOCOLHANDLER_CID }, ++ { NULL } ++}; ++ ++static const mozilla::Module kVFSModule = { ++ mozilla::Module::kVersion, ++ kVFSCIDs, ++ kVFSContracts ++}; ++ ++NSMODULE_DEFN(nsGIOModule) = &kVFSModule; +diff -r 49a1b2aa43c5 netwerk/base/src/nsIOService.cpp +--- a/netwerk/base/src/nsIOService.cpp Tue Dec 21 12:42:59 2010 +0100 ++++ b/netwerk/base/src/nsIOService.cpp Tue Jan 11 11:17:52 2011 +0100 +@@ -454,6 +454,27 @@ + } + + #ifdef MOZ_X11 ++ // check to see whether GVFS can handle this URI scheme. if it can ++ // create a nsIURI for the "scheme:", then we assume it has support for ++ // the requested protocol. otherwise, we failover to using the default ++ // protocol handler. ++ ++ rv = CallGetService(NS_NETWORK_PROTOCOL_CONTRACTID_PREFIX"moz-gio", ++ result); ++ if (NS_SUCCEEDED(rv)) { ++ nsCAutoString spec(scheme); ++ spec.Append(':'); ++ ++ nsIURI *uri; ++ rv = (*result)->NewURI(spec, nsnull, nsnull, &uri); ++ if (NS_SUCCEEDED(rv)) { ++ NS_RELEASE(uri); ++ return rv; ++ } ++ ++ NS_RELEASE(*result); ++ } ++ + // check to see whether GnomeVFS can handle this URI scheme. if it can + // create a nsIURI for the "scheme:", then we assume it has support for + // the requested protocol. otherwise, we failover to using the default diff --git a/staging/xulrunner/xulrunner-omnijar.patch b/staging/xulrunner/xulrunner-omnijar.patch new file mode 100644 index 000000000..66ec5206c --- /dev/null +++ b/staging/xulrunner/xulrunner-omnijar.patch @@ -0,0 +1,1737 @@ +# HG changeset patch +# Parent a7dea879b4b445a23186f438900562155bb39e99 +Bug 620931 part 1 - Use chrome manifest to register resource://gre-resources/ + +diff --git a/layout/style/jar.mn b/layout/style/jar.mn +--- a/layout/style/jar.mn ++++ b/layout/style/jar.mn +@@ -1,8 +1,10 @@ + toolkit.jar: + * res/ua.css (ua.css) + res/html.css (html.css) + res/quirk.css (quirk.css) + res/viewsource.css (viewsource.css) + * res/forms.css (forms.css) + res/arrow.gif (arrow.gif) + res/arrowd.gif (arrowd.gif) ++ ++% resource gre-resources %res/ +diff --git a/netwerk/protocol/res/nsResProtocolHandler.cpp b/netwerk/protocol/res/nsResProtocolHandler.cpp +--- a/netwerk/protocol/res/nsResProtocolHandler.cpp ++++ b/netwerk/protocol/res/nsResProtocolHandler.cpp +@@ -75,17 +75,16 @@ static nsResProtocolHandler *gResHandler + // + // this enables PR_LOG_ALWAYS level information and places all output in + // the file log.txt + // + static PRLogModuleInfo *gResLog; + #endif + + #define kGRE NS_LITERAL_CSTRING("gre") +-#define kGRE_RESOURCES NS_LITERAL_CSTRING("gre-resources") + + //---------------------------------------------------------------------------- + // nsResURL : overrides nsStandardURL::GetFile to provide nsIFile resolution + //---------------------------------------------------------------------------- + + nsresult + nsResURL::EnsureFile() + { +@@ -197,28 +196,16 @@ nsResProtocolHandler::Init() + NS_ENSURE_SUCCESS(rv, rv); + + // + // make resource://gre/ point to the GRE directory + // + rv = AddSpecialDir(NS_GRE_DIR, kGRE); + NS_ENSURE_SUCCESS(rv, rv); + +- // make resource://gre-resources/ point to gre toolkit[.jar]/res +- nsCOMPtr<nsIURI> greURI; +- nsCOMPtr<nsIURI> greResURI; +- GetSubstitution(kGRE, getter_AddRefs(greURI)); +-#ifdef MOZ_CHROME_FILE_FORMAT_JAR +- NS_NAMED_LITERAL_CSTRING(strGRE_RES_URL, "jar:chrome/toolkit.jar!/res/"); +-#else +- NS_NAMED_LITERAL_CSTRING(strGRE_RES_URL, "chrome/toolkit/res/"); +-#endif +- rv = mIOService->NewURI(strGRE_RES_URL, nsnull, greURI, +- getter_AddRefs(greResURI)); +- SetSubstitution(kGRE_RESOURCES, greResURI); + //XXXbsmedberg Neil wants a resource://pchrome/ for the profile chrome dir... + // but once I finish multiple chrome registration I'm not sure that it is needed + + // XXX dveditz: resource://pchrome/ defeats profile directory salting + // if web content can load it. Tread carefully. + + return rv; + } +@@ -242,22 +229,16 @@ nsResProtocolHandler::Init(nsIFile *aOmn + // these entries should be kept in sync with the normal Init function + + // resource:/// points to jar:omni.jar!/ + SetSubstitution(EmptyCString(), uri); + + // resource://gre/ points to jar:omni.jar!/ + SetSubstitution(kGRE, uri); + +- urlStr += "chrome/toolkit/res/"; +- rv = mIOService->NewURI(urlStr, nsnull, nsnull, getter_AddRefs(uri)); +- NS_ENSURE_SUCCESS(rv, rv); +- +- // resource://gre-resources/ points to jar:omni.jar!/chrome/toolkit/res/ +- SetSubstitution(kGRE_RESOURCES, uri); + return NS_OK; + } + #endif + + #ifdef MOZ_IPC + static PLDHashOperator + EnumerateSubstitution(const nsACString& aKey, + nsIURI* aURI, +# HG changeset patch +# Parent 3038cccba1a071d6b418e15442d0f2d9f3dcb11d +Bug 620931 part 2 - When building --with-libxul-sdk, use the right preferences directory + +diff --git a/browser/locales/Makefile.in b/browser/locales/Makefile.in +--- a/browser/locales/Makefile.in ++++ b/browser/locales/Makefile.in +@@ -183,17 +183,17 @@ install:: $(addsuffix .xml,$(SEARCH_PLUG + $(SYSINSTALL) $(IFLAGS1) $^ $(DESTDIR)$(mozappdir)/searchplugins + + + libs-%: + $(NSINSTALL) -D $(DIST)/install + @$(MAKE) -C ../../toolkit/locales libs-$* BOTH_MANIFESTS=1 + @$(MAKE) -C ../../services/sync/locales AB_CD=$* XPI_NAME=locale-$* BOTH_MANIFESTS=1 + @$(MAKE) -C ../../extensions/spellcheck/locales AB_CD=$* XPI_NAME=locale-$* BOTH_MANIFESTS=1 +- @$(MAKE) libs AB_CD=$* XPI_NAME=locale-$* PREF_DIR=defaults/pref BOTH_MANIFESTS=1 ++ @$(MAKE) libs AB_CD=$* XPI_NAME=locale-$* PREF_DIR=$(PREF_DIR) BOTH_MANIFESTS=1 + @$(MAKE) -C $(DEPTH)/$(MOZ_BRANDING_DIRECTORY)/locales AB_CD=$* XPI_NAME=locale-$* BOTH_MANIFESTS=1 + + + repackage-win32-installer: WIN32_INSTALLER_OUT="$(_ABS_DIST)/$(PKG_INST_PATH)$(PKG_INST_BASENAME).exe" + repackage-win32-installer: $(WIN32_INSTALLER_IN) $(SUBMAKEFILES) + @echo "Repackaging $(WIN32_INSTALLER_IN) into $(WIN32_INSTALLER_OUT)." + $(MAKE) -C $(DEPTH)/$(MOZ_BRANDING_DIRECTORY) export + $(MAKE) -C ../installer/windows CONFIG_DIR=l10ngen l10ngen/setup.exe l10ngen/7zSD.sfx +diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk +--- a/toolkit/mozapps/installer/packager.mk ++++ b/toolkit/mozapps/installer/packager.mk +@@ -307,17 +307,17 @@ OMNIJAR_FILES = \ + res \ + defaults \ + greprefs.js \ + jsloader \ + $(NULL) + + NON_OMNIJAR_FILES += \ + chrome/icons/\* \ +- defaults/pref/channel-prefs.js \ ++ $(PREF_DIR)/channel-prefs.js \ + res/cursors/\* \ + res/MainMenu.nib/\* \ + $(NULL) + + PACK_OMNIJAR = \ + rm -f omni.jar components/binary.manifest && \ + grep -h '^binary-component' components/*.manifest > binary.manifest ; \ + sed -e 's/^binary-component/\#binary-component/' components/components.manifest > components.manifest && \ +# HG changeset patch +# Parent cd8df8030f7ad7530692bd7c4391a8009df56a02 +Bug 620931 part 3 - Allow GRE and XUL application to use omni.jar independently + +We now store two independent locations for an omni.jar, allowing GRE/XRE and +XUL application to each have their own omni.jar. And since xulrunner setups +are very independent from the XUL applications, we implement support for both +omni.jar and non omni.jar cases in the same runtime, with the side effect of +allowing to switch from one to the other manually without rebuilding the +binaries. + +We let the mozilla::Omnijar API handle both cases, so that callers don't need +too much work to support them. + +We also make the preferences service load the same set of preferences in all +the various cases (unified vs. separate, omni.jar vs. no omni.jar). + +The child process launcher for IPC is modified to pass the base directories +needed for the mozilla::Omnijar API initialization in the child process. + +Finally, the startupcache file name canonicalization is modified to separate +APP and GRE resources. + +diff --git a/ipc/glue/GeckoChildProcessHost.cpp b/ipc/glue/GeckoChildProcessHost.cpp +--- a/ipc/glue/GeckoChildProcessHost.cpp ++++ b/ipc/glue/GeckoChildProcessHost.cpp +@@ -440,26 +440,29 @@ GeckoChildProcessHost::PerformAsyncLaunc + // other end of the socketpair() from us + + std::vector<std::string> childArgv; + + childArgv.push_back(exePath.value()); + + childArgv.insert(childArgv.end(), aExtraOpts.begin(), aExtraOpts.end()); + +-#ifdef MOZ_OMNIJAR + // Make sure the child process can find the omnijar + // See XRE_InitCommandLine in nsAppRunner.cpp +- nsCAutoString omnijarPath; +- if (mozilla::OmnijarPath()) { +- mozilla::OmnijarPath()->GetNativePath(omnijarPath); +- childArgv.push_back("-omnijar"); +- childArgv.push_back(omnijarPath.get()); ++ nsCAutoString path; ++ nsCOMPtr<nsIFile> file = mozilla::Omnijar::GetBase(mozilla::Omnijar::GRE); ++ if (file && NS_SUCCEEDED(file->GetNativePath(path))) { ++ childArgv.push_back("-grebase"); ++ childArgv.push_back(path.get()); + } +-#endif ++ file = mozilla::Omnijar::GetBase(mozilla::Omnijar::APP); ++ if (file && NS_SUCCEEDED(file->GetNativePath(path))) { ++ childArgv.push_back("-appbase"); ++ childArgv.push_back(path.get()); ++ } + + childArgv.push_back(pidstring); + + #if defined(MOZ_CRASHREPORTER) + # if defined(OS_LINUX) + int childCrashFd, childCrashRemapFd; + if (!CrashReporter::CreateNotificationPipeForChild( + &childCrashFd, &childCrashRemapFd)) +@@ -552,26 +555,29 @@ GeckoChildProcessHost::PerformAsyncLaunc + for (std::vector<std::string>::iterator it = aExtraOpts.begin(); + it != aExtraOpts.end(); + ++it) { + cmdLine.AppendLooseValue(UTF8ToWide(*it)); + } + + cmdLine.AppendLooseValue(std::wstring(mGroupId.get())); + +-#ifdef MOZ_OMNIJAR + // Make sure the child process can find the omnijar + // See XRE_InitCommandLine in nsAppRunner.cpp +- nsAutoString omnijarPath; +- if (mozilla::OmnijarPath()) { +- mozilla::OmnijarPath()->GetPath(omnijarPath); +- cmdLine.AppendLooseValue(UTF8ToWide("-omnijar")); +- cmdLine.AppendLooseValue(omnijarPath.get()); ++ nsAutoString path; ++ nsCOMPtr<nsIFile> file = mozilla::Omnijar::GetBase(mozilla::Omnijar::GRE); ++ if (file && NS_SUCCEEDED(file->GetPath(path))) { ++ cmdLine.AppendLooseValue(UTF8ToWide("-grebase")); ++ cmdLine.AppendLooseValue(path.get()); + } +-#endif ++ file = mozilla::Omnijar::GetBase(mozilla::Omnijar::APP); ++ if (file && NS_SUCCEEDED(file->GetPath(path))) { ++ cmdLine.AppendLooseValue(UTF8ToWide("-appbase")); ++ cmdLine.AppendLooseValue(path.get()); ++ } + + cmdLine.AppendLooseValue(UTF8ToWide(pidstring)); + + #if defined(MOZ_CRASHREPORTER) + cmdLine.AppendLooseValue( + UTF8ToWide(CrashReporter::GetChildNotificationPipe())); + #endif + +diff --git a/js/src/xpconnect/loader/mozJSComponentLoader.cpp b/js/src/xpconnect/loader/mozJSComponentLoader.cpp +--- a/js/src/xpconnect/loader/mozJSComponentLoader.cpp ++++ b/js/src/xpconnect/loader/mozJSComponentLoader.cpp +@@ -81,16 +81,17 @@ + #include "nsIConsoleService.h" + #include "nsIStorageStream.h" + #include "nsIStringStream.h" + #include "prmem.h" + #if defined(XP_WIN) + #include "nsILocalFileWin.h" + #endif + #include "xpcprivate.h" ++#include "nsIResProtocolHandler.h" + + #ifdef MOZ_ENABLE_LIBXUL + #include "mozilla/scache/StartupCache.h" + #include "mozilla/scache/StartupCacheUtils.h" + #endif + #include "mozilla/Omnijar.h" + + #include "jsdbgapi.h" +@@ -621,34 +622,21 @@ mozJSComponentLoader::LoadModule(nsILoca + + const mozilla::Module* + mozJSComponentLoader::LoadModuleFromJAR(nsILocalFile *aJarFile, + const nsACString &aComponentPath) + { + #if !defined(XPCONNECT_STANDALONE) + nsresult rv; + +- nsCAutoString fullSpec; +- +-#ifdef MOZ_OMNIJAR +- PRBool equal; +- rv = aJarFile->Equals(mozilla::OmnijarPath(), &equal); +- if (NS_SUCCEEDED(rv) && equal) { +- fullSpec = "resource://gre/"; +- } else { +-#endif +- nsCAutoString fileSpec; +- NS_GetURLSpecFromActualFile(aJarFile, fileSpec); +- fullSpec = "jar:"; +- fullSpec += fileSpec; +- fullSpec += "!/"; +-#ifdef MOZ_OMNIJAR +- } +-#endif +- ++ nsCAutoString fullSpec, fileSpec; ++ NS_GetURLSpecFromActualFile(aJarFile, fileSpec); ++ fullSpec = "jar:"; ++ fullSpec += fileSpec; ++ fullSpec += "!/"; + fullSpec += aComponentPath; + + nsCOMPtr<nsIURI> uri; + rv = NS_NewURI(getter_AddRefs(uri), fullSpec); + if (NS_FAILED(rv)) + return NULL; + + nsAutoString hashstring; +@@ -833,57 +821,138 @@ class JSScriptHolder + JSScriptHolder(JSContext *cx, JSScript *script) + : mCx(cx), mScript(script) {} + ~JSScriptHolder() { ::JS_DestroyScript(mCx, mScript); } + private: + JSContext *mCx; + JSScript *mScript; + }; + ++static const char baseName[2][5] = { "gre/", "app/" }; ++ ++static inline PRBool ++canonicalizeBase(nsCAutoString &spec, nsACString &out, mozilla::Omnijar::Type aType) ++{ ++ nsCAutoString base; ++ nsresult rv = mozilla::Omnijar::GetURIString(aType, base); ++ ++ if (NS_FAILED(rv) || !base.Length()) ++ return PR_FALSE; ++ ++ if (base.Compare(spec.get(), PR_FALSE, base.Length())) ++ return PR_FALSE; ++ ++ out.Append("/resource/"); ++ out.Append(baseName[aType]); ++ out.Append(Substring(spec, base.Length())); ++ return PR_TRUE; ++} + /** + * PathifyURI transforms mozilla .js uris into useful zip paths + * to make it makes it easier to manipulate startup cache entries + * using standard zip tools. + * Transformations applied: +- * * jsloader/<scheme> prefix is used to group mozJSComponentLoader cache entries in ++ * * jsloader/ prefix is used to group mozJSComponentLoader cache entries in + * a top-level zip directory. +- * * In MOZ_OMNIJAR case resource:/// and resource://gre/ URIs refer to the same path +- * so treat both of them as resource://gre/ ++ * * resource:// URIs are resolved to their corresponding file/jar URI to ++ * canonicalize resources URIs other than gre and app. ++ * * Paths under GRE or APP directory have their base path replaced with ++ * resource/gre or resource/app to avoid depending on install location. ++ * * jar:file:///path/to/file.jar!/sub/path urls are replaced with ++ * /path/to/file.jar/sub/path + * * .bin suffix is added to the end of the path to indicate that jsloader/ entries + * are binary representations of JS source. + * For example: +- * resource://gre/modules/XPCOMUtils.jsm becomes +- * jsloader/resource/gre/modules/XPCOMUtils.jsm.bin ++ * resource://gre/modules/XPCOMUtils.jsm or ++ * file://$GRE_DIR/modules/XPCOMUtils.jsm or ++ * jar:file://$GRE_DIR/omni.jar!/modules/XPCOMUtils.jsm become ++ * jsloader/resource/gre/modules/XPCOMUtils.jsm.bin ++ * file://$PROFILE_DIR/extensions/{uuid}/components/component.js becomes ++ * jsloader/$PROFILE_DIR/extensions/%7Buuid%7D/components/component.js.bin ++ * jar:file://$PROFILE_DIR/extensions/some.xpi!/components/component.js becomes ++ * jsloader/$PROFILE_DIR/extensions/some.xpi/components/component.js.bin + */ + static nsresult + PathifyURI(nsIURI *in, nsACString &out) + { +- out = "jsloader/"; +- nsCAutoString scheme; +- nsresult rv = in->GetScheme(scheme); +- NS_ENSURE_SUCCESS(rv, rv); +- out.Append(scheme); +- nsCAutoString host; +- // OK for GetHost to fail since it's not implemented sometimes +- in->GetHost(host); +-#ifdef MOZ_OMNIJAR +- if (scheme.Equals("resource") && host.Length() == 0){ +- host = "gre"; +- } +-#endif +- if (host.Length()) { +- out.Append("/"); +- out.Append(host); +- } +- nsCAutoString path; +- rv = in->GetPath(path); +- NS_ENSURE_SUCCESS(rv, rv); +- out.Append(path); +- out.Append(".bin"); +- return NS_OK; ++ PRBool equals; ++ nsresult rv; ++ nsCOMPtr<nsIURI> uri = in; ++ nsCAutoString spec; ++ ++ out = "jsloader"; ++ ++ // Resolve resource:// URIs. At the end of this if/else block, we ++ // have both spec and uri variables identifying the same URI. ++ if (NS_SUCCEEDED(in->SchemeIs("resource", &equals)) && equals) { ++ nsCOMPtr<nsIIOService> ioService = do_GetIOService(&rv); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCOMPtr<nsIProtocolHandler> ph; ++ rv = ioService->GetProtocolHandler("resource", getter_AddRefs(ph)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCOMPtr<nsIResProtocolHandler> irph(do_QueryInterface(ph, &rv)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ rv = irph->ResolveURI(in, spec); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ rv = ioService->NewURI(spec, nsnull, nsnull, getter_AddRefs(uri)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ } else { ++ rv = in->GetSpec(spec); ++ NS_ENSURE_SUCCESS(rv, rv); ++ } ++ ++ if (!canonicalizeBase(spec, out, mozilla::Omnijar::GRE) && ++ !canonicalizeBase(spec, out, mozilla::Omnijar::APP)) { ++ if (NS_SUCCEEDED(uri->SchemeIs("file", &equals)) && equals) { ++ nsCOMPtr<nsIFileURL> baseFileURL; ++ baseFileURL = do_QueryInterface(uri, &rv); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCAutoString path; ++ rv = baseFileURL->GetPath(path); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ out.Append(path); ++ } else if (NS_SUCCEEDED(uri->SchemeIs("jar", &equals)) && equals) { ++ nsCOMPtr<nsIJARURI> jarURI = do_QueryInterface(uri, &rv); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCOMPtr<nsIURI> jarFileURI; ++ rv = jarURI->GetJARFile(getter_AddRefs(jarFileURI)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCOMPtr<nsIFileURL> jarFileURL; ++ jarFileURL = do_QueryInterface(jarFileURI, &rv); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ nsCAutoString path; ++ rv = jarFileURL->GetPath(path); ++ NS_ENSURE_SUCCESS(rv, rv); ++ out.Append(path); ++ ++ rv = jarURI->GetJAREntry(path); ++ NS_ENSURE_SUCCESS(rv, rv); ++ out.Append("/"); ++ out.Append(path); ++ } else { // Very unlikely ++ nsCAutoString spec; ++ rv = uri->GetSpec(spec); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ out.Append("/"); ++ out.Append(spec); ++ } ++ } ++ ++ out.Append(".bin"); ++ return NS_OK; + } + + /* static */ + #ifdef MOZ_ENABLE_LIBXUL + nsresult + mozJSComponentLoader::ReadScript(StartupCache* cache, nsIURI *uri, + JSContext *cx, JSScript **script) + { +diff --git a/modules/libjar/nsJAR.cpp b/modules/libjar/nsJAR.cpp +--- a/modules/libjar/nsJAR.cpp ++++ b/modules/libjar/nsJAR.cpp +@@ -171,26 +171,23 @@ nsJAR::Open(nsIFile* zipFile) + if (mLock) return NS_ERROR_FAILURE; // Already open! + + mZipFile = zipFile; + mOuterZipEntry.Truncate(); + + mLock = PR_NewLock(); + NS_ENSURE_TRUE(mLock, NS_ERROR_OUT_OF_MEMORY); + +-#ifdef MOZ_OMNIJAR + // The omnijar is special, it is opened early on and closed late + // this avoids reopening it +- PRBool equals; +- nsresult rv = zipFile->Equals(mozilla::OmnijarPath(), &equals); +- if (NS_SUCCEEDED(rv) && equals) { +- mZip = mozilla::OmnijarReader(); ++ nsZipArchive *zip = mozilla::Omnijar::GetReader(zipFile); ++ if (zip) { ++ mZip = zip; + return NS_OK; + } +-#endif + return mZip->OpenArchive(zipFile); + } + + NS_IMETHODIMP + nsJAR::OpenInner(nsIZipReader *aZipReader, const char *aZipEntry) + { + NS_ENSURE_ARG_POINTER(aZipReader); + NS_ENSURE_ARG_POINTER(aZipEntry); +@@ -234,23 +231,22 @@ nsJAR::Close() + mLock = nsnull; + } + + mParsedManifest = PR_FALSE; + mManifestData.Reset(); + mGlobalStatus = JAR_MANIFEST_NOT_PARSED; + mTotalItemsInManifest = 0; + +-#ifdef MOZ_OMNIJAR +- if (mZip == mozilla::OmnijarReader()) { ++ if ((mZip == mozilla::Omnijar::GetReader(mozilla::Omnijar::GRE)) || ++ (mZip == mozilla::Omnijar::GetReader(mozilla::Omnijar::APP))) { + mZip.forget(); + mZip = new nsZipArchive(); + return NS_OK; + } +-#endif + return mZip->CloseArchive(); + } + + NS_IMETHODIMP + nsJAR::Test(const char *aEntryName) + { + return mZip->Test(aEntryName); + } +@@ -391,22 +387,21 @@ nsJAR::GetInputStreamWithSpec(const nsAC + NS_IMETHODIMP + nsJAR::GetCertificatePrincipal(const char* aFilename, nsIPrincipal** aPrincipal) + { + //-- Parameter check + if (!aPrincipal) + return NS_ERROR_NULL_POINTER; + *aPrincipal = nsnull; + +-#ifdef MOZ_OMNIJAR + // Don't check signatures in the omnijar - this is only + // interesting for extensions/XPIs. +- if (mZip == mozilla::OmnijarReader()) ++ if ((mZip == mozilla::Omnijar::GetReader(mozilla::Omnijar::GRE)) || ++ (mZip == mozilla::Omnijar::GetReader(mozilla::Omnijar::APP))) + return NS_OK; +-#endif + + //-- Parse the manifest + nsresult rv = ParseManifest(); + if (NS_FAILED(rv)) return rv; + if (mGlobalStatus == JAR_NO_MANIFEST) + return NS_OK; + + PRInt16 requestedStatus; +diff --git a/modules/libpref/src/nsPrefService.cpp b/modules/libpref/src/nsPrefService.cpp +--- a/modules/libpref/src/nsPrefService.cpp ++++ b/modules/libpref/src/nsPrefService.cpp +@@ -67,20 +67,18 @@ + + #include "prefapi.h" + #include "prefread.h" + #include "prefapi_private_data.h" + #include "PrefTuple.h" + + #include "nsITimelineService.h" + +-#ifdef MOZ_OMNIJAR + #include "mozilla/Omnijar.h" + #include "nsZipArchive.h" +-#endif + + // Definitions + #define INITIAL_PREF_FILES 10 + static NS_DEFINE_CID(kZipReaderCID, NS_ZIPREADER_CID); + + // Prototypes + static nsresult openPrefFile(nsIFile* aFile); + static nsresult pref_InitInitialObjects(void); +@@ -793,124 +791,144 @@ static nsresult pref_LoadPrefsInDirList( + pref_LoadPrefsInDir(dir, nsnull, 0); + } + } + } + } + return NS_OK; + } + +-//---------------------------------------------------------------------------------------- +-// Initialize default preference JavaScript buffers from +-// appropriate TEXT resources +-//---------------------------------------------------------------------------------------- +-static nsresult pref_InitDefaults() +-{ +- nsCOMPtr<nsIFile> greprefsFile; +- nsresult rv; +- +- rv = NS_GetSpecialDirectory(NS_GRE_DIR, getter_AddRefs(greprefsFile)); +- NS_ENSURE_SUCCESS(rv, rv); +- +- rv = greprefsFile->AppendNative(NS_LITERAL_CSTRING("greprefs.js")); +- NS_ENSURE_SUCCESS(rv, rv); +- +- rv = openPrefFile(greprefsFile); +- if (NS_FAILED(rv)) { +- NS_WARNING("Error parsing GRE default preferences. Is this an old-style embedding app?"); +- } +- +- return NS_OK; +-} +- +-#ifdef MOZ_OMNIJAR + static nsresult pref_ReadPrefFromJar(nsZipArchive* jarReader, const char *name) + { + nsZipItemPtr<char> manifest(jarReader, name, true); + NS_ENSURE_TRUE(manifest.Buffer(), NS_ERROR_NOT_AVAILABLE); + + PrefParseState ps; + PREF_InitParseState(&ps, PREF_ReaderCallback, NULL); + nsresult rv = PREF_ParseBuf(&ps, manifest, manifest.Length()); + PREF_FinalizeParseState(&ps); + + return rv; + } + +-static nsresult pref_InitAppDefaultsFromOmnijar() +-{ +- nsresult rv; +- +- nsZipArchive* jarReader = mozilla::OmnijarReader(); +- if (!jarReader) +- return pref_InitDefaults(); +- +- rv = pref_ReadPrefFromJar(jarReader, "greprefs.js"); +- NS_ENSURE_SUCCESS(rv, rv); +- +- nsZipFind *findPtr; +- rv = jarReader->FindInit("defaults/pref/*.js$", &findPtr); +- NS_ENSURE_SUCCESS(rv, rv); +- +- nsAutoPtr<nsZipFind> find(findPtr); +- +- nsTArray<nsCString> prefEntries; +- const char *entryName; +- PRUint16 entryNameLen; +- while (NS_SUCCEEDED(find->FindNext(&entryName, &entryNameLen))) { +- prefEntries.AppendElement(Substring(entryName, entryName + entryNameLen)); +- } +- +- prefEntries.Sort(); +- for (PRUint32 i = prefEntries.Length(); i--; ) { +- rv = pref_ReadPrefFromJar(jarReader, prefEntries[i].get()); +- if (NS_FAILED(rv)) +- NS_WARNING("Error parsing preferences."); +- } +- +- return NS_OK; +-} +-#endif +- ++//---------------------------------------------------------------------------------------- ++// Initialize default preference JavaScript buffers from ++// appropriate TEXT resources ++//---------------------------------------------------------------------------------------- + static nsresult pref_InitInitialObjects() + { + nsresult rv; + +- // first we parse the GRE default prefs. This also works if we're not using a GRE, +-#ifdef MOZ_OMNIJAR +- rv = pref_InitAppDefaultsFromOmnijar(); +-#else +- rv = pref_InitDefaults(); +-#endif +- NS_ENSURE_SUCCESS(rv, rv); ++ // In omni.jar case, we load the following prefs: ++ // - jar:$gre/omni.jar!/greprefs.js ++ // - jar:$gre/omni.jar!/defaults/pref/*.js ++ // In non omni.jar case, we load: ++ // - $gre/greprefs.js ++ // ++ // When $app == $gre, we additionally load, in all cases: ++ // - $gre/defaults/pref/*.js ++ // This is kept for bug 591866 (channel-prefs.js should not be in omni.jar). ++ // We load all files instead of channel-prefs.js only to have the same ++ // behaviour as $app != $gre. ++ // ++ // When $app != $gre, we additionally load, in omni.jar case: ++ // - jar:$app/omni.jar!/defaults/preferences/*.js ++ // - $app/defaults/preferences/*.js ++ // and in non omni.jar case: ++ // - $app/defaults/preferences/*.js + +- nsCOMPtr<nsIFile> defaultPrefDir; +- // now parse the "application" default preferences +- rv = NS_GetSpecialDirectory(NS_APP_PREF_DEFAULTS_50_DIR, getter_AddRefs(defaultPrefDir)); +- NS_ENSURE_SUCCESS(rv, rv); ++ nsZipFind *findPtr; ++ nsAutoPtr<nsZipFind> find; ++ nsTArray<nsCString> prefEntries; ++ const char *entryName; ++ PRUint16 entryNameLen; + +- /* these pref file names should not be used: we process them after all other application pref files for backwards compatibility */ +- static const char* specialFiles[] = { ++ nsZipArchive* jarReader = mozilla::Omnijar::GetReader(mozilla::Omnijar::GRE); ++ if (jarReader) { ++ // Load jar:$gre/omni.jar!/greprefs.js ++ rv = pref_ReadPrefFromJar(jarReader, "greprefs.js"); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ // Load jar:$gre/omni.jar!/defaults/pref/*.js ++ rv = jarReader->FindInit("defaults/pref/*.js$", &findPtr); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ find = findPtr; ++ while (NS_SUCCEEDED(find->FindNext(&entryName, &entryNameLen))) { ++ prefEntries.AppendElement(Substring(entryName, entryName + entryNameLen)); ++ } ++ ++ prefEntries.Sort(); ++ for (PRUint32 i = prefEntries.Length(); i--; ) { ++ rv = pref_ReadPrefFromJar(jarReader, prefEntries[i].get()); ++ if (NS_FAILED(rv)) ++ NS_WARNING("Error parsing preferences."); ++ } ++ } else { ++ // Load $gre/greprefs.js ++ nsCOMPtr<nsIFile> greprefsFile; ++ rv = NS_GetSpecialDirectory(NS_GRE_DIR, getter_AddRefs(greprefsFile)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ rv = greprefsFile->AppendNative(NS_LITERAL_CSTRING("greprefs.js")); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ rv = openPrefFile(greprefsFile); ++ if (NS_FAILED(rv)) ++ NS_WARNING("Error parsing GRE default preferences. Is this an old-style embedding app?"); ++ } ++ ++ if (!mozilla::Omnijar::HasOmnijar(mozilla::Omnijar::APP)) { ++ // Load $gre/defaults/pref/*.js ++ nsCOMPtr<nsIFile> defaultPrefDir; ++ ++ rv = NS_GetSpecialDirectory(NS_APP_PREF_DEFAULTS_50_DIR, getter_AddRefs(defaultPrefDir)); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ /* these pref file names should not be used: we process them after all other application pref files for backwards compatibility */ ++ static const char* specialFiles[] = { + #if defined(XP_MAC) || defined(XP_MACOSX) + "macprefs.js" + #elif defined(XP_WIN) + "winpref.js" + #elif defined(XP_UNIX) + "unix.js" +-#if defined(_AIX) ++#if defined(VMS) ++ , "openvms.js" ++#elif defined(_AIX) + , "aix.js" + #endif + #elif defined(XP_OS2) + "os2pref.js" ++#elif defined(XP_BEOS) ++ "beos.js" + #endif +- }; ++ }; + +- rv = pref_LoadPrefsInDir(defaultPrefDir, specialFiles, NS_ARRAY_LENGTH(specialFiles)); +- if (NS_FAILED(rv)) { +- NS_WARNING("Error parsing application default preferences."); ++ rv = pref_LoadPrefsInDir(defaultPrefDir, specialFiles, NS_ARRAY_LENGTH(specialFiles)); ++ if (NS_FAILED(rv)) ++ NS_WARNING("Error parsing application default preferences."); ++ } ++ ++ // Load jar:$app/omni.jar!/defaults/preferences/*.js ++ nsZipArchive *appJarReader = mozilla::Omnijar::GetReader(mozilla::Omnijar::APP); ++ if (appJarReader) { ++ rv = appJarReader->FindInit("defaults/preferences/*.js$", &findPtr); ++ NS_ENSURE_SUCCESS(rv, rv); ++ find = findPtr; ++ prefEntries.Clear(); ++ while (NS_SUCCEEDED(find->FindNext(&entryName, &entryNameLen))) { ++ prefEntries.AppendElement(Substring(entryName, entryName + entryNameLen)); ++ } ++ prefEntries.Sort(); ++ for (PRUint32 i = prefEntries.Length(); i--; ) { ++ rv = pref_ReadPrefFromJar(appJarReader, prefEntries[i].get()); ++ if (NS_FAILED(rv)) ++ NS_WARNING("Error parsing preferences."); ++ } + } + + rv = pref_LoadPrefsInDirList(NS_APP_PREFS_DEFAULTS_DIR_LIST); + NS_ENSURE_SUCCESS(rv, rv); + + NS_CreateServicesFromCategory(NS_PREFSERVICE_APPDEFAULTS_TOPIC_ID, + nsnull, NS_PREFSERVICE_APPDEFAULTS_TOPIC_ID); + +diff --git a/netwerk/protocol/res/nsResProtocolHandler.cpp b/netwerk/protocol/res/nsResProtocolHandler.cpp +--- a/netwerk/protocol/res/nsResProtocolHandler.cpp ++++ b/netwerk/protocol/res/nsResProtocolHandler.cpp +@@ -152,97 +152,62 @@ nsResProtocolHandler::nsResProtocolHandl + } + + nsResProtocolHandler::~nsResProtocolHandler() + { + gResHandler = nsnull; + } + + nsresult +-nsResProtocolHandler::AddSpecialDir(const char* aSpecialDir, const nsACString& aSubstitution) +-{ +- nsCOMPtr<nsIFile> file; +- nsresult rv = NS_GetSpecialDirectory(aSpecialDir, getter_AddRefs(file)); +- NS_ENSURE_SUCCESS(rv, rv); +- +- nsCOMPtr<nsIURI> uri; +- rv = mIOService->NewFileURI(file, getter_AddRefs(uri)); +- NS_ENSURE_SUCCESS(rv, rv); +- +- return SetSubstitution(aSubstitution, uri); +-} +- +-nsresult + nsResProtocolHandler::Init() + { + if (!mSubstitutions.Init(32)) + return NS_ERROR_UNEXPECTED; + + nsresult rv; + + mIOService = do_GetIOService(&rv); + NS_ENSURE_SUCCESS(rv, rv); + +-#ifdef MOZ_OMNIJAR +- nsCOMPtr<nsIFile> omniJar(mozilla::OmnijarPath()); +- if (omniJar) +- return Init(omniJar); +-#endif +- +- // these entries should be kept in sync with the omnijar Init function ++ nsCAutoString appURI, greURI; ++ rv = mozilla::Omnijar::GetURIString(mozilla::Omnijar::APP, appURI); ++ NS_ENSURE_SUCCESS(rv, rv); ++ rv = mozilla::Omnijar::GetURIString(mozilla::Omnijar::GRE, greURI); ++ NS_ENSURE_SUCCESS(rv, rv); + + // +- // make resource:/// point to the application directory ++ // make resource:/// point to the application directory or omnijar + // +- rv = AddSpecialDir(NS_OS_CURRENT_PROCESS_DIR, EmptyCString()); ++ nsCOMPtr<nsIURI> uri; ++ rv = NS_NewURI(getter_AddRefs(uri), appURI.Length() ? appURI : greURI); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ rv = SetSubstitution(EmptyCString(), uri); + NS_ENSURE_SUCCESS(rv, rv); + + // + // make resource://gre/ point to the GRE directory + // +- rv = AddSpecialDir(NS_GRE_DIR, kGRE); ++ if (appURI.Length()) { // We already have greURI in uri if appURI.Length() is 0. ++ rv = NS_NewURI(getter_AddRefs(uri), greURI); ++ NS_ENSURE_SUCCESS(rv, rv); ++ } ++ ++ rv = SetSubstitution(kGRE, uri); + NS_ENSURE_SUCCESS(rv, rv); + + //XXXbsmedberg Neil wants a resource://pchrome/ for the profile chrome dir... + // but once I finish multiple chrome registration I'm not sure that it is needed + + // XXX dveditz: resource://pchrome/ defeats profile directory salting + // if web content can load it. Tread carefully. + + return rv; + } + +-#ifdef MOZ_OMNIJAR +-nsresult +-nsResProtocolHandler::Init(nsIFile *aOmniJar) +-{ +- nsresult rv; +- nsCOMPtr<nsIURI> uri; +- nsCAutoString omniJarSpec; +- NS_GetURLSpecFromActualFile(aOmniJar, omniJarSpec, mIOService); +- +- nsCAutoString urlStr("jar:"); +- urlStr += omniJarSpec; +- urlStr += "!/"; +- +- rv = mIOService->NewURI(urlStr, nsnull, nsnull, getter_AddRefs(uri)); +- NS_ENSURE_SUCCESS(rv, rv); +- +- // these entries should be kept in sync with the normal Init function +- +- // resource:/// points to jar:omni.jar!/ +- SetSubstitution(EmptyCString(), uri); +- +- // resource://gre/ points to jar:omni.jar!/ +- SetSubstitution(kGRE, uri); +- +- return NS_OK; +-} +-#endif +- + #ifdef MOZ_IPC + static PLDHashOperator + EnumerateSubstitution(const nsACString& aKey, + nsIURI* aURI, + void* aArg) + { + nsTArray<ResourceMapping>* resources = + static_cast<nsTArray<ResourceMapping>*>(aArg); +diff --git a/startupcache/StartupCache.cpp b/startupcache/StartupCache.cpp +--- a/startupcache/StartupCache.cpp ++++ b/startupcache/StartupCache.cpp +@@ -237,27 +237,36 @@ StartupCache::GetBuffer(const char* id, + nsZipItemPtr<char> zipItem(mArchive, id, true); + if (zipItem) { + *outbuf = zipItem.Forget(); + *length = zipItem.Length(); + return NS_OK; + } + } + +-#ifdef MOZ_OMNIJAR +- if (mozilla::OmnijarReader()) { ++ if (mozilla::Omnijar::GetReader(mozilla::Omnijar::APP)) { + // no need to checksum omnijarred entries +- nsZipItemPtr<char> zipItem(mozilla::OmnijarReader(), id); ++ nsZipItemPtr<char> zipItem(mozilla::Omnijar::GetReader(mozilla::Omnijar::APP), id); + if (zipItem) { + *outbuf = zipItem.Forget(); + *length = zipItem.Length(); + return NS_OK; + } + } +-#endif ++ ++ if (mozilla::Omnijar::GetReader(mozilla::Omnijar::GRE)) { ++ // no need to checksum omnijarred entries ++ nsZipItemPtr<char> zipItem(mozilla::Omnijar::GetReader(mozilla::Omnijar::GRE), id); ++ if (zipItem) { ++ *outbuf = zipItem.Forget(); ++ *length = zipItem.Length(); ++ return NS_OK; ++ } ++ } ++ + return NS_ERROR_NOT_AVAILABLE; + } + + // Makes a copy of the buffer, client retains ownership of inbuf. + nsresult + StartupCache::PutBuffer(const char* id, const char* inbuf, PRUint32 len) + { + WaitOnWriteThread(); +diff --git a/toolkit/xre/nsAppRunner.cpp b/toolkit/xre/nsAppRunner.cpp +--- a/toolkit/xre/nsAppRunner.cpp ++++ b/toolkit/xre/nsAppRunner.cpp +@@ -3897,35 +3897,45 @@ XRE_InitCommandLine(int aArgc, char* aAr + CommandLine::Init(aArgc, canonArgs); + + for (int i = 0; i < aArgc; ++i) + free(canonArgs[i]); + delete[] canonArgs; + #endif + #endif + +-#ifdef MOZ_OMNIJAR +- const char *omnijarPath = nsnull; +- ArgResult ar = CheckArg("omnijar", PR_FALSE, &omnijarPath); ++ const char *path = nsnull; ++ ArgResult ar = CheckArg("grebase", PR_FALSE, &path); + if (ar == ARG_BAD) { +- PR_fprintf(PR_STDERR, "Error: argument -omnijar requires an omnijar path\n"); ++ PR_fprintf(PR_STDERR, "Error: argument -grebase requires a path argument\n"); + return NS_ERROR_FAILURE; + } + +- if (!omnijarPath) ++ if (!path) + return rv; + +- nsCOMPtr<nsILocalFile> omnijar; +- rv = NS_NewNativeLocalFile(nsDependentCString(omnijarPath), PR_TRUE, +- getter_AddRefs(omnijar)); +- if (NS_SUCCEEDED(rv)) +- mozilla::SetOmnijar(omnijar); +-#endif +- +- return rv; ++ nsCOMPtr<nsILocalFile> greBase; ++ rv = XRE_GetFileFromPath(path, getter_AddRefs(greBase)); ++ if (NS_FAILED(rv)) ++ return rv; ++ ++ ar = CheckArg("appbase", PR_FALSE, &path); ++ if (ar == ARG_BAD) { ++ PR_fprintf(PR_STDERR, "Error: argument -appbase requires a path argument\n"); ++ return NS_ERROR_FAILURE; ++ } ++ ++ nsCOMPtr<nsILocalFile> appBase; ++ if (path) { ++ rv = XRE_GetFileFromPath(path, getter_AddRefs(appBase)); ++ if (NS_FAILED(rv)) ++ return rv; ++ } ++ ++ return mozilla::Omnijar::SetBase(greBase, appBase); + } + + nsresult + XRE_DeinitCommandLine() + { + nsresult rv = NS_OK; + + #if defined(MOZ_IPC) +diff --git a/toolkit/xre/nsEmbedFunctions.cpp b/toolkit/xre/nsEmbedFunctions.cpp +--- a/toolkit/xre/nsEmbedFunctions.cpp ++++ b/toolkit/xre/nsEmbedFunctions.cpp +@@ -512,19 +512,17 @@ XRE_InitChildProcess(int aArgc, + } + + // Run the UI event loop on the main thread. + uiMessageLoop.MessageLoop::Run(); + + // Allow ProcessChild to clean up after itself before going out of + // scope and being deleted + process->CleanUp(); +-#ifdef MOZ_OMNIJAR +- mozilla::SetOmnijar(nsnull); +-#endif ++ mozilla::Omnijar::SetBase(nsnull, nsnull); + } + } + + NS_LogTerm(); + return XRE_DeinitCommandLine(); + } + + MessageLoop* +diff --git a/xpcom/build/Makefile.in b/xpcom/build/Makefile.in +--- a/xpcom/build/Makefile.in ++++ b/xpcom/build/Makefile.in +@@ -64,28 +64,25 @@ CSRCS = \ + $(NULL) + + CPPSRCS = \ + $(XPCOM_GLUE_SRC_LCPPSRCS) \ + $(XPCOM_GLUENS_SRC_LCPPSRCS) \ + nsXPComInit.cpp \ + nsXPCOMStrings.cpp \ + Services.cpp \ ++ Omnijar.cpp \ + $(NULL) + + ifndef MOZ_ENABLE_LIBXUL + ifeq (,$(filter-out WINNT WINCE OS2,$(OS_ARCH))) + CPPSRCS += dlldeps.cpp + endif + endif + +-ifdef MOZ_OMNIJAR +-CPPSRCS += Omnijar.cpp +-endif +- + SHARED_LIBRARY_LIBS = \ + $(DEPTH)/chrome/src/$(LIB_PREFIX)chrome_s.$(LIB_SUFFIX) \ + ../ds/$(LIB_PREFIX)xpcomds_s.$(LIB_SUFFIX) \ + ../io/$(LIB_PREFIX)xpcomio_s.$(LIB_SUFFIX) \ + ../components/$(LIB_PREFIX)xpcomcomponents_s.$(LIB_SUFFIX) \ + ../threads/$(LIB_PREFIX)xpcomthreads_s.$(LIB_SUFFIX) \ + ../proxy/src/$(LIB_PREFIX)xpcomproxy_s.$(LIB_SUFFIX) \ + ../base/$(LIB_PREFIX)xpcombase_s.$(LIB_SUFFIX) \ +diff --git a/xpcom/build/Omnijar.cpp b/xpcom/build/Omnijar.cpp +--- a/xpcom/build/Omnijar.cpp ++++ b/xpcom/build/Omnijar.cpp +@@ -16,16 +16,17 @@ + * + * The Initial Developer of the Original Code is + * Mozilla Foundation. + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Michael Wu <mwu@mozilla.com> ++ * Mike Hommey <mh@glandium.org> + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your +@@ -33,69 +34,175 @@ + * and other provisions required by the GPL or the LGPL. If you do not delete + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + + #include "Omnijar.h" + +-#include "nsILocalFile.h" +-#include "nsXULAppAPI.h" ++#include "nsIFile.h" + #include "nsZipArchive.h" ++#include "nsNetUtil.h" + +-static nsILocalFile* sOmnijarPath = nsnull; +-static nsZipArchive* sOmnijarReader = nsnull; ++namespace mozilla { + +-static void +-SetupReader() ++nsIFile *Omnijar::sPath[2] = { nsnull, nsnull }; ++PRBool Omnijar::sIsOmnijar[2] = { PR_FALSE, PR_FALSE }; ++ ++#ifdef MOZ_ENABLE_LIBXUL ++nsZipArchive *Omnijar::sReader[2] = { nsnull, nsnull }; ++#endif ++ ++static already_AddRefed<nsIFile> ++ComputePath(nsIFile *aPath, PRBool &aIsOmnijar) + { +- if (!sOmnijarPath) { +- return; ++ PRBool isDir; ++ aIsOmnijar = PR_FALSE; ++ if (!aPath || NS_FAILED(aPath->IsDirectory(&isDir)) || !isDir) ++ return nsnull; ++ ++ nsCOMPtr<nsIFile> path; ++#ifdef MOZ_ENABLE_LIBXUL ++ // Search for omni.jar in the given directory ++ if (!isDir || NS_FAILED(aPath->Clone(getter_AddRefs(path)))) ++ return nsnull; ++ ++ if (NS_FAILED(path->AppendNative(NS_LITERAL_CSTRING("omni.jar")))) ++ return nsnull; ++ ++ if (NS_FAILED(path->Exists(&aIsOmnijar))) ++ return nsnull; ++#endif ++ ++ if (!aIsOmnijar && NS_FAILED(aPath->Clone(getter_AddRefs(path)))) ++ return nsnull; ++ ++ return path.forget(); ++} ++ ++nsresult ++Omnijar::SetBase(nsIFile *aGrePath, nsIFile *aAppPath) ++{ ++ NS_ABORT_IF_FALSE(aGrePath || !aAppPath, "Omnijar::SetBase(NULL, something) call forbidden"); ++ ++#ifdef MOZ_ENABLE_LIBXUL ++ if (sReader[GRE]) { ++ sReader[GRE]->CloseArchive(); ++ delete sReader[GRE]; ++ } ++ if (sReader[APP]) { ++ sReader[APP]->CloseArchive(); ++ delete sReader[APP]; ++ } ++ sReader[APP] = sReader[GRE] = nsnull; ++#endif ++ ++ nsresult rv; ++ PRBool equals; ++ if (aAppPath) { ++ rv = aAppPath->Equals(aGrePath, &equals); ++ NS_ENSURE_SUCCESS(rv, rv); ++ } else { ++ equals = PR_TRUE; + } + +- nsZipArchive* zipReader = new nsZipArchive(); +- if (!zipReader) { +- NS_IF_RELEASE(sOmnijarPath); +- return; ++ nsCOMPtr<nsIFile> grePath = ComputePath(aGrePath, sIsOmnijar[GRE]); ++ nsCOMPtr<nsIFile> appPath = ComputePath(equals ? nsnull : aAppPath, sIsOmnijar[APP]); ++ ++ NS_IF_RELEASE(sPath[GRE]); ++ sPath[GRE] = grePath; ++ NS_IF_ADDREF(sPath[GRE]); ++ ++ NS_IF_RELEASE(sPath[APP]); ++ sPath[APP] = appPath; ++ NS_IF_ADDREF(sPath[APP]); ++ ++ return NS_OK; ++} ++ ++already_AddRefed<nsIFile> ++Omnijar::GetBase(Type aType) ++{ ++ NS_ABORT_IF_FALSE(sPath[0], "Omnijar not initialized"); ++ ++ if (!sIsOmnijar[aType]) { ++ NS_IF_ADDREF(sPath[aType]); ++ return sPath[aType]; + } + +- if (NS_FAILED(zipReader->OpenArchive(sOmnijarPath))) { ++ nsCOMPtr<nsIFile> file, path; ++ if (NS_FAILED(sPath[aType]->Clone(getter_AddRefs(file)))) ++ return nsnull; ++ ++ if (NS_FAILED(file->GetParent(getter_AddRefs(path)))) ++ return nsnull; ++ return path.forget(); ++} ++ ++#ifdef MOZ_ENABLE_LIBXUL ++nsZipArchive * ++Omnijar::GetReader(Type aType) ++{ ++ if (!sIsOmnijar[aType]) ++ return nsnull; ++ ++ if (sReader[aType]) ++ return sReader[aType]; ++ ++ nsZipArchive* zipReader = new nsZipArchive(); ++ if (!zipReader) ++ return nsnull; ++ ++ if (NS_FAILED(zipReader->OpenArchive(sPath[aType]))) { + delete zipReader; +- NS_IF_RELEASE(sOmnijarPath); +- return; ++ return nsnull; + } + +- sOmnijarReader = zipReader; ++ return (sReader[aType] = zipReader); + } + +-nsILocalFile* +-mozilla::OmnijarPath() ++nsZipArchive * ++Omnijar::GetReader(nsIFile *aPath) + { +- if (!sOmnijarReader) +- SetupReader(); ++ PRBool equals; ++ nsresult rv; + +- return sOmnijarPath; ++ if (sIsOmnijar[GRE]) { ++ rv = sPath[GRE]->Equals(aPath, &equals); ++ if (NS_SUCCEEDED(rv) && equals) ++ return GetReader(GRE); ++ } ++ if (sIsOmnijar[APP]) { ++ rv = sPath[APP]->Equals(aPath, &equals); ++ if (NS_SUCCEEDED(rv) && equals) ++ return GetReader(APP); ++ } ++ return nsnull; ++} ++#endif ++ ++nsresult ++Omnijar::GetURIString(Type aType, nsCString &result) ++{ ++ NS_ABORT_IF_FALSE(sPath[0], "Omnijar not initialized"); ++ ++ result = ""; ++ ++ if ((aType == APP) && (!sPath[APP])) ++ return NS_OK; ++ ++ nsCAutoString omniJarSpec; ++ nsresult rv = NS_GetURLSpecFromActualFile(sPath[aType], omniJarSpec); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ if (sIsOmnijar[aType]) { ++ result = "jar:"; ++ result += omniJarSpec; ++ result += "!"; ++ } else { ++ result = omniJarSpec; ++ } ++ result += "/"; ++ return NS_OK; + } + +-nsZipArchive* +-mozilla::OmnijarReader() +-{ +- if (!sOmnijarReader) +- SetupReader(); +- +- return sOmnijarReader; +-} +- +-void +-mozilla::SetOmnijar(nsILocalFile* aPath) +-{ +- NS_IF_RELEASE(sOmnijarPath); +- if (sOmnijarReader) { +- sOmnijarReader->CloseArchive(); +- delete sOmnijarReader; +- sOmnijarReader = nsnull; +- } +- +- sOmnijarPath = aPath; +- NS_IF_ADDREF(sOmnijarPath); +-} +- ++} /* namespace mozilla */ +diff --git a/xpcom/build/Omnijar.h b/xpcom/build/Omnijar.h +--- a/xpcom/build/Omnijar.h ++++ b/xpcom/build/Omnijar.h +@@ -16,16 +16,17 @@ + * + * The Initial Developer of the Original Code is + * Mozilla Foundation. + * Portions created by the Initial Developer are Copyright (C) 2010 + * the Initial Developer. All Rights Reserved. + * + * Contributor(s): + * Michael Wu <mwu@mozilla.com> ++ * Mike Hommey <mh@glandium.org> + * + * Alternatively, the contents of this file may be used under the terms of + * either the GNU General Public License Version 2 or later (the "GPL"), or + * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), + * in which case the provisions of the GPL or the LGPL are applicable instead + * of those above. If you wish to allow use of your version of this file only + * under the terms of either the GPL or the LGPL, and not to allow others to + * use your version of this file under the terms of the MPL, indicate your +@@ -34,29 +35,137 @@ + * the provisions above, a recipient may use your version of this file under + * the terms of any one of the MPL, the GPL or the LGPL. + * + * ***** END LICENSE BLOCK ***** */ + + #ifndef mozilla_Omnijar_h + #define mozilla_Omnijar_h + +-class nsILocalFile; ++#include "nscore.h" ++#include "nsTArray.h" ++#include "nsCOMPtr.h" ++#include "nsString.h" ++ ++class nsIFile; + class nsZipArchive; +- +-#ifdef MOZ_OMNIJAR ++class nsIURI; + + namespace mozilla { + ++#ifdef MOZ_ENABLE_LIBXUL ++#define OMNIJAR_EXPORT ++#else ++#define OMNIJAR_EXPORT NS_EXPORT ++#endif ++ ++class OMNIJAR_EXPORT Omnijar { ++private: + /** +- * This returns the path to the omnijar. +- * If the omnijar isn't available, this function will return null. +- * Callers should fallback to flat packaging if null. ++ * Store an nsIFile for either a base directory when there is no omni.jar, ++ * or omni.jar itself. We can store two paths here, one for GRE ++ * (corresponding to resource://gre/) and one for APP ++ * (corresponding to resource:/// and resource://app/), but only ++ * store one when both point to the same location (unified). + */ +-nsILocalFile *OmnijarPath(); +-nsZipArchive *OmnijarReader(); +-void SetOmnijar(nsILocalFile* aPath); ++static nsIFile *sPath[2]; ++/** ++ * Store whether the corresponding sPath is an omni.jar or a directory ++ */ ++static PRBool sIsOmnijar[2]; ++ ++#ifdef MOZ_ENABLE_LIBXUL ++/** ++ * Cached nsZipArchives for the corresponding sPath ++ */ ++static nsZipArchive *sReader[2]; ++#endif ++ ++public: ++enum Type { ++ GRE = 0, ++ APP = 1 ++}; ++ ++/** ++ * Returns whether SetBase has been called at least once with ++ * a valid nsIFile ++ */ ++static PRBool ++IsInitialized() ++{ ++ // GRE path is always set after initialization. ++ return sPath[0] != nsnull; ++} ++ ++/** ++ * Sets the base directories for GRE and APP. APP base directory ++ * may be nsnull, in case the APP and GRE directories are the same. ++ */ ++static nsresult SetBase(nsIFile *aGrePath, nsIFile *aAppPath); ++ ++/** ++ * Returns an nsIFile pointing to the omni.jar file for GRE or APP. ++ * Returns nsnull when there is no corresponding omni.jar. ++ * Also returns nsnull for APP in the unified case. ++ */ ++static already_AddRefed<nsIFile> ++GetPath(Type aType) ++{ ++ NS_ABORT_IF_FALSE(sPath[0], "Omnijar not initialized"); ++ ++ if (sIsOmnijar[aType]) { ++ NS_IF_ADDREF(sPath[aType]); ++ return sPath[aType]; ++ } ++ return nsnull; ++} ++ ++/** ++ * Returns whether GRE or APP use an omni.jar. Returns PR_False when ++ * using an omni.jar in the unified case. ++ */ ++static PRBool ++HasOmnijar(Type aType) ++{ ++ return sIsOmnijar[aType]; ++} ++ ++/** ++ * Returns the base directory for GRE or APP. In the unified case, ++ * returns nsnull for APP. ++ */ ++static already_AddRefed<nsIFile> GetBase(Type aType); ++ ++/** ++ * Returns a nsZipArchive pointer for the omni.jar file for GRE or ++ * APP. Returns nsnull in the same cases GetPath() would. ++ */ ++#ifdef MOZ_ENABLE_LIBXUL ++static nsZipArchive *GetReader(Type aType); ++#else ++static nsZipArchive *GetReader(Type aType) { return nsnull; } ++#endif ++ ++/** ++ * Returns a nsZipArchive pointer for the given path IAOI the given ++ * path is the omni.jar for either GRE or APP. ++ */ ++#ifdef MOZ_ENABLE_LIBXUL ++static nsZipArchive *GetReader(nsIFile *aPath); ++#else ++static nsZipArchive *GetReader(nsIFile *aPath) { return nsnull; } ++#endif ++ ++/** ++ * Returns the URI string corresponding to the omni.jar or directory ++ * for GRE or APP. i.e. jar:/path/to/omni.jar!/ for omni.jar and ++ * /path/to/base/dir/ otherwise. Returns an empty string for APP in ++ * the unified case. ++ * The returned URI is guaranteed to end with a slash. ++ */ ++static nsresult GetURIString(Type aType, nsCString &result); ++ ++}; /* class Omnijar */ + + } /* namespace mozilla */ + +-#endif /* MOZ_OMNIJAR */ +- + #endif /* mozilla_Omnijar_h */ +diff --git a/xpcom/build/nsXPComInit.cpp b/xpcom/build/nsXPComInit.cpp +--- a/xpcom/build/nsXPComInit.cpp ++++ b/xpcom/build/nsXPComInit.cpp +@@ -462,35 +462,35 @@ NS_InitXPCOM2(nsIServiceManager* *result + nsDirectoryService::gService->Set(NS_XPCOM_LIBRARY_FILE, xpcomLib); + } + + if (appFileLocationProvider) { + rv = nsDirectoryService::gService->RegisterProvider(appFileLocationProvider); + if (NS_FAILED(rv)) return rv; + } + +-#ifdef MOZ_OMNIJAR + NS_TIME_FUNCTION_MARK("Next: Omnijar init"); + +- if (!mozilla::OmnijarPath()) { +- nsCOMPtr<nsILocalFile> omnijar; ++ if (!mozilla::Omnijar::IsInitialized()) { ++ nsCOMPtr<nsILocalFile> greDir, appDir; + nsCOMPtr<nsIFile> file; + +- rv = NS_ERROR_FAILURE; + nsDirectoryService::gService->Get(NS_GRE_DIR, + NS_GET_IID(nsIFile), + getter_AddRefs(file)); +- if (file) +- rv = file->Append(NS_LITERAL_STRING("omni.jar")); +- if (NS_SUCCEEDED(rv)) +- omnijar = do_QueryInterface(file); +- if (NS_SUCCEEDED(rv)) +- mozilla::SetOmnijar(omnijar); ++ greDir = do_QueryInterface(file); ++ ++ nsDirectoryService::gService->Get(NS_XPCOM_CURRENT_PROCESS_DIR, ++ NS_GET_IID(nsIFile), ++ getter_AddRefs(file)); ++ appDir = do_QueryInterface(file); ++ ++ rv = mozilla::Omnijar::SetBase(greDir, appDir); ++ NS_ENSURE_SUCCESS(rv, rv); + } +-#endif + + #ifdef MOZ_IPC + if ((sCommandLineWasInitialized = !CommandLine::IsInitialized())) { + NS_TIME_FUNCTION_MARK("Next: IPC command line init"); + + #ifdef OS_WIN + CommandLine::Init(0, nsnull); + #else +@@ -769,18 +769,16 @@ ShutdownXPCOM(nsIServiceManager* servMgr + sCommandLineWasInitialized = false; + } + if (sExitManager) { + delete sExitManager; + sExitManager = nsnull; + } + #endif + +-#ifdef MOZ_OMNIJAR +- mozilla::SetOmnijar(nsnull); +-#endif ++ mozilla::Omnijar::SetBase(nsnull, nsnull); + + NS_LogTerm(); + + return NS_OK; + } + + } // namespace mozilla +diff --git a/xpcom/components/nsComponentManager.cpp b/xpcom/components/nsComponentManager.cpp +--- a/xpcom/components/nsComponentManager.cpp ++++ b/xpcom/components/nsComponentManager.cpp +@@ -175,18 +175,16 @@ NS_DEFINE_CID(kCategoryManagerCID, NS_CA + #define COMPMGR_TIME_FUNCTION_CONTRACTID(cid) \ + NS_TIME_FUNCTION_MIN_FMT(5, "%s (line %d) (contractid: %s)", MOZ_FUNCTION_NAME, \ + __LINE__, (cid)) + #else + #define COMPMGR_TIME_FUNCTION_CID(cid) do {} while (0) + #define COMPMGR_TIME_FUNCTION_CONTRACTID(cid) do {} while (0) + #endif + +-#define kOMNIJAR_PREFIX NS_LITERAL_CSTRING("resource:///") +- + nsresult + nsGetServiceFromCategory::operator()(const nsIID& aIID, void** aInstancePtr) const + { + nsresult rv; + nsXPIDLCString value; + nsCOMPtr<nsICategoryManager> catman; + nsComponentManagerImpl *compMgr = nsComponentManagerImpl::gComponentManager; + if (!compMgr) { +@@ -390,47 +388,44 @@ nsresult nsComponentManagerImpl::Init() + + nsCategoryManager::GetSingleton()->SuppressNotifications(true); + + RegisterModule(&kXPCOMModule, NULL); + + for (PRUint32 i = 0; i < sStaticModules->Length(); ++i) + RegisterModule((*sStaticModules)[i], NULL); + +-#ifdef MOZ_OMNIJAR +- if (mozilla::OmnijarPath()) { +- nsCOMPtr<nsIZipReader> omnijarReader = new nsJAR(); +- rv = omnijarReader->Open(mozilla::OmnijarPath()); +- if (NS_SUCCEEDED(rv)) +- RegisterJarManifest(omnijarReader, "chrome.manifest", false); ++ nsCOMPtr<nsIFile> appOmnijar = mozilla::Omnijar::GetPath(mozilla::Omnijar::APP); ++ if (appOmnijar) { ++ cl = sModuleLocations->InsertElementAt(1); // Insert after greDir ++ cl->type = NS_COMPONENT_LOCATION; ++ cl->location = do_QueryInterface(appOmnijar); ++ cl->jar = true; + } +-#endif ++ nsCOMPtr<nsIFile> greOmnijar = mozilla::Omnijar::GetPath(mozilla::Omnijar::GRE); ++ if (greOmnijar) { ++ cl = sModuleLocations->InsertElementAt(0); ++ cl->type = NS_COMPONENT_LOCATION; ++ cl->location = do_QueryInterface(greOmnijar); ++ cl->jar = true; ++ } + + for (PRUint32 i = 0; i < sModuleLocations->Length(); ++i) { + ComponentLocation& l = sModuleLocations->ElementAt(i); + if (!l.jar) { + RegisterManifestFile(l.type, l.location, false); + continue; + } + + nsCOMPtr<nsIZipReader> reader = do_CreateInstance(kZipReaderCID, &rv); + rv = reader->Open(l.location); + if (NS_SUCCEEDED(rv)) + RegisterJarManifest(reader, "chrome.manifest", false); + } + +-#ifdef MOZ_OMNIJAR +- if (mozilla::OmnijarPath()) { +- cl = sModuleLocations->InsertElementAt(0); +- cl->type = NS_COMPONENT_LOCATION; +- cl->location = mozilla::OmnijarPath(); +- cl->jar = true; +- } +-#endif +- + nsCategoryManager::GetSingleton()->SuppressNotifications(false); + + mStatus = NORMAL; + + return NS_OK; + } + + void +# HG changeset patch +# Parent ff1b810f78226d7f4010909d3cde05a57fdcf20c +Bug 620931 part 4 - Fix resource://app/ to always point to the same as resource:/// + +diff --git a/netwerk/protocol/res/nsResProtocolHandler.cpp b/netwerk/protocol/res/nsResProtocolHandler.cpp +--- a/netwerk/protocol/res/nsResProtocolHandler.cpp ++++ b/netwerk/protocol/res/nsResProtocolHandler.cpp +@@ -74,16 +74,17 @@ static nsResProtocolHandler *gResHandler + // set NSPR_LOG_FILE=log.txt + // + // this enables PR_LOG_ALWAYS level information and places all output in + // the file log.txt + // + static PRLogModuleInfo *gResLog; + #endif + ++#define kAPP NS_LITERAL_CSTRING("app") + #define kGRE NS_LITERAL_CSTRING("gre") + + //---------------------------------------------------------------------------- + // nsResURL : overrides nsStandardURL::GetFile to provide nsIFile resolution + //---------------------------------------------------------------------------- + + nsresult + nsResURL::EnsureFile() +@@ -179,16 +180,22 @@ nsResProtocolHandler::Init() + nsCOMPtr<nsIURI> uri; + rv = NS_NewURI(getter_AddRefs(uri), appURI.Length() ? appURI : greURI); + NS_ENSURE_SUCCESS(rv, rv); + + rv = SetSubstitution(EmptyCString(), uri); + NS_ENSURE_SUCCESS(rv, rv); + + // ++ // make resource://app/ point to the application directory or omnijar ++ // ++ rv = SetSubstitution(kAPP, uri); ++ NS_ENSURE_SUCCESS(rv, rv); ++ ++ // + // make resource://gre/ point to the GRE directory + // + if (appURI.Length()) { // We already have greURI in uri if appURI.Length() is 0. + rv = NS_NewURI(getter_AddRefs(uri), greURI); + NS_ENSURE_SUCCESS(rv, rv); + } + + rv = SetSubstitution(kGRE, uri); +diff --git a/toolkit/xre/nsXREDirProvider.cpp b/toolkit/xre/nsXREDirProvider.cpp +--- a/toolkit/xre/nsXREDirProvider.cpp ++++ b/toolkit/xre/nsXREDirProvider.cpp +@@ -300,19 +300,16 @@ nsXREDirProvider::GetFile(const char* aP + } + } + else if (!strcmp(aProperty, XRE_EXECUTABLE_FILE) && gArgv[0]) { + nsCOMPtr<nsILocalFile> lf; + rv = XRE_GetBinaryPath(gArgv[0], getter_AddRefs(lf)); + if (NS_SUCCEEDED(rv)) + file = lf; + } +- else if (!strcmp(aProperty, "resource:app")) { +- rv = GetAppDir()->Clone(getter_AddRefs(file)); +- } + + else if (!strcmp(aProperty, NS_APP_PROFILE_DIR_STARTUP) && mProfileDir) { + return mProfileDir->Clone(aFile); + } + else if (!strcmp(aProperty, NS_APP_PROFILE_LOCAL_DIR_STARTUP)) { + if (mProfileLocalDir) + return mProfileLocalDir->Clone(aFile); + +# HG changeset patch +# Parent 7d2228db71a299afca60babff632a967d2d6c456 +Bug 620931 part 5 - Enable omni.jar by default on xulrunner + +diff --git a/xulrunner/confvars.sh b/xulrunner/confvars.sh +--- a/xulrunner/confvars.sh ++++ b/xulrunner/confvars.sh +@@ -36,15 +36,16 @@ + # + # ***** END LICENSE BLOCK ***** + + MOZ_APP_NAME=xulrunner + MOZ_APP_DISPLAYNAME=XULRunner + MOZ_UPDATER=1 + MOZ_XULRUNNER=1 + MOZ_ENABLE_LIBXUL=1 ++MOZ_CHROME_FILE_FORMAT=omni + MOZ_STATIC_BUILD_UNSUPPORTED=1 + MOZ_APP_VERSION=$MOZILLA_VERSION + if test "$MOZ_STORAGE"; then + MOZ_PLACES=1 + fi + MOZ_EXTENSIONS_DEFAULT=" gnomevfs" + MOZ_URL_CLASSIFIER=1 diff --git a/staging/xulrunner/xulrunner-version.patch b/staging/xulrunner/xulrunner-version.patch new file mode 100644 index 000000000..8e81b23ea --- /dev/null +++ b/staging/xulrunner/xulrunner-version.patch @@ -0,0 +1,12 @@ +diff -Nur mozilla-2.0.orig/xulrunner/installer/Makefile.in mozilla-2.0/xulrunner/installer/Makefile.in +--- mozilla-2.0.orig/xulrunner/installer/Makefile.in 2011-03-03 14:12:04.000000000 -0800 ++++ mozilla-2.0/xulrunner/installer/Makefile.in 2011-03-13 01:58:19.663360705 -0800 +@@ -44,6 +44,8 @@ + + include $(DEPTH)/config/autoconf.mk + ++MOZ_APP_VERSION="2.0" ++ + NO_PKG_FILES = \ + xulrunner-config \ + regchrome* \ diff --git a/testing/dvdrip/PKGBUILD b/testing/dvdrip/PKGBUILD index 17838b169..119a2a1d9 100644 --- a/testing/dvdrip/PKGBUILD +++ b/testing/dvdrip/PKGBUILD @@ -1,10 +1,10 @@ -# $Id: PKGBUILD 125147 2011-05-25 19:02:09Z foutrelis $ +# $Id: PKGBUILD 126069 2011-06-01 10:23:05Z foutrelis $ # Maintainer: Giovanni Scafora <giovanni@archlinux.org> # Contributor: Fredrik Hammar <Horney_C86@Hotmail.com> pkgname=dvdrip pkgver=0.98.11 -pkgrel=7 +pkgrel=8 pkgdesc="A Gtk frontend for transcode writen in Perl" arch=('i686' 'x86_64') license=('custom') diff --git a/testing/dvdrip/dvdrip.install b/testing/dvdrip/dvdrip.install index 4cd6160c4..e111ef946 100644 --- a/testing/dvdrip/dvdrip.install +++ b/testing/dvdrip/dvdrip.install @@ -1,19 +1,11 @@ -# this is the scrollkeeper handling sample file - post_install() { - echo "update desktop mime database..." update-desktop-database -q } post_upgrade() { - post_install $1 -} - -pre_remove() { - /bin/true + post_install } post_remove() { - echo "update desktop mime database..." - update-desktop-database -q + post_install } diff --git a/testing/imagemagick/PKGBUILD b/testing/imagemagick/PKGBUILD index 39f9e8830..29f188e18 100644 --- a/testing/imagemagick/PKGBUILD +++ b/testing/imagemagick/PKGBUILD @@ -1,9 +1,9 @@ -# $Id: PKGBUILD 124573 2011-05-23 01:23:35Z eric $ +# $Id: PKGBUILD 126048 2011-06-01 05:22:12Z eric $ # Maintainer: Eric Bélanger <eric@archlinux.org> pkgbase=imagemagick pkgname=('imagemagick' 'imagemagick-doc') -pkgver=6.6.9.10 +pkgver=6.7.0.2 pkgrel=1 arch=('i686' 'x86_64') url="http://www.imagemagick.org/" @@ -13,9 +13,9 @@ depends=('libtool' 'lcms' 'libxt' 'gcc-libs' 'bzip2' 'xz' 'freetype2' 'fontconfi makedepends=('ghostscript' 'openexr' 'libwmf' 'librsvg' 'libxml2' 'jasper' 'libpng') source=(ftp://ftp.imagemagick.org/pub/ImageMagick/ImageMagick-${pkgver%.*}-${pkgver##*.}.tar.xz \ perlmagick.rpath.patch) -md5sums=('cf8940e964be608ddae152b90a576282' +md5sums=('23a53b96b8e75c3ffd8cbbbfc1041b2f' 'ff9974decbfe9846f8e347239d87e4eb') -sha1sums=('18d141e904853c68a43f53862bc67e3c8b66d664' +sha1sums=('ff8e666d58a27af1ce3ab2c5408d10c233a4d809' '23405f80904b1de94ebd7bd6fe2a332471b8c283') build() { diff --git a/testing/module-init-tools/PKGBUILD b/testing/module-init-tools/PKGBUILD new file mode 100644 index 000000000..4ecfab9c9 --- /dev/null +++ b/testing/module-init-tools/PKGBUILD @@ -0,0 +1,39 @@ +# $Id: PKGBUILD 126053 2011-06-01 05:32:56Z andyrtr $ +# Maintainer: Aaron Griffin <aaron@archlinux.org> +# Contributor: judd <jvinet@zeroflux.org> + +pkgname=module-init-tools +pkgver=3.13 +pkgrel=1 +pkgdesc="utilities needed by Linux systems for managing loadable kernel modules" +arch=('i686' 'x86_64') +url="http://kerneltools.org" +license=('GPL') +depends=('glibc') +backup=('etc/modprobe.d/modprobe.conf') +source=(http://www.kernel.org/pub/linux/utils/kernel/module-init-tools/module-init-tools-$pkgver.tar.bz2 + modprobe.conf) +md5sums=('dc575e7df00d9f745bf23b32f927b7a6' + '316f1bda4c21af02b30252eb014a0a55') + +build() { + cd $srcdir/$pkgname-$pkgver + + # do not regenerate man pages + touch *.{5,8} + + ./configure --prefix=/usr --exec-prefix=/ --enable-zlib + make +} + +package() { + cd $srcdir/$pkgname-$pkgver + + make DESTDIR=$pkgdir install + + # Install our custom (read: empty) modprobe.conf + install -Dm644 $srcdir/modprobe.conf $pkgdir/etc/modprobe.d/modprobe.conf + + # fix man page (FS#17559) + sed -i "s#mod#man5/mod#" $pkgdir/usr/share/man/man5/modprobe.d.5 +} diff --git a/testing/module-init-tools/modprobe.conf b/testing/module-init-tools/modprobe.conf new file mode 100644 index 000000000..83865a3af --- /dev/null +++ b/testing/module-init-tools/modprobe.conf @@ -0,0 +1,3 @@ +# +# /etc/modprobe.d/modprobe.conf (for v2.6 kernels) +# diff --git a/testing/pidgin/PKGBUILD b/testing/pidgin/PKGBUILD new file mode 100644 index 000000000..3ed646b74 --- /dev/null +++ b/testing/pidgin/PKGBUILD @@ -0,0 +1,114 @@ +# $Id: PKGBUILD 124935 2011-05-25 09:46:33Z foutrelis $ +# Maintainer: Evangelos Foutras <foutrelis@gmail.com> +# Contributor: Ionut Biru <ibiru@archlinux.org> +# Contributor: Andrea Scarpino <andrea@archlinux.org> +# Contributor: Alexander Fehr <pizzapunk gmail com> +# Contributor: Lucien Immink <l.immink@student.fnt.hvu.nl> + +pkgname=('pidgin' 'libpurple' 'finch') +pkgver=2.7.11 +pkgrel=6 +arch=('i686' 'x86_64') +url="http://pidgin.im/" +license=('GPL') +makedepends=('startup-notification' 'gtkspell' 'libxss' 'nss' 'libsasl' 'libsm' + 'python2' 'hicolor-icon-theme' 'silc-toolkit' 'gstreamer0.10' + 'farsight2' 'avahi' 'tk' 'ca-certificates' 'intltool' + 'networkmanager') +options=('!libtool') +source=(http://downloads.sourceforge.net/$pkgname/$pkgname-$pkgver.tar.bz2 + nm09-pidgin.patch + nm09-more.patch) +md5sums=('07c2a2535b4d7436b5ec7685fe063fec' + '744a21b4dbaf949dba7cd3b75b12b4fe' + 'a673659d86c7a65aa710f7c8c7feda82') + +build() { + cd "$srcdir/$pkgname-$pkgver" + + # Update for NetworkManager 0.9 connection states + # (http://developer.pidgin.im/ticket/13505) + # (http://developer.pidgin.im/ticket/13859) + patch -Np1 -i "$srcdir/nm09-pidgin.patch" + patch -Np1 -i "$srcdir/nm09-more.patch" + + # Use Python 2 + sed -i 's/env python$/\02/' */plugins/*.py \ + libpurple/purple-{remote,notifications-example,url-handler} + + ./configure \ + --prefix=/usr \ + --sysconfdir=/etc \ + --disable-schemas-install \ + --disable-meanwhile \ + --disable-gnutls \ + --enable-cyrus-sasl \ + --disable-doxygen \ + --enable-nm \ + --with-python=/usr/bin/python2 \ + --with-system-ssl-certs=/etc/ssl/certs + make +} + +package_pidgin(){ + pkgdesc="Multi-protocol instant messaging client" + depends=("libpurple=$pkgver-$pkgrel" 'startup-notification' 'gtkspell' + 'libxss' 'libsm' 'gstreamer0.10' 'hicolor-icon-theme') + optdepends=('aspell: for spelling correction' + 'ca-certificates: SSL CA certificates' + 'gstreamer0.10-good-plugins: video and voice support' + 'tk: Tcl/Tk scripting support') + install=pidgin.install + + cd "$srcdir/pidgin-$pkgver" + + # For linking + make -C libpurple DESTDIR="$pkgdir" install-libLTLIBRARIES + + make -C pidgin DESTDIR="$pkgdir" install + make -C doc DESTDIR="$pkgdir" install + + # Remove files that are packaged in libpurle + make -C libpurple DESTDIR="$pkgdir" uninstall-libLTLIBRARIES + + install -Dm644 pidgin.desktop "$pkgdir"/usr/share/applications/pidgin.desktop + + rm "$pkgdir/usr/share/man/man1/finch.1" +} + +package_libpurple(){ + pkgdesc="IM library extracted from Pidgin" + depends=('farsight2' 'libsasl' 'dbus-glib' 'silc-toolkit' 'nss' + 'cyrus-sasl-plugins') + optdepends=('avahi: Bonjour protocol support' + 'dbus-python: for purple-remote and purple-url-handler') + + cd "$srcdir/pidgin-$pkgver" + + for _dir in libpurple share/sounds share/ca-certs m4macros po; do + make -C "$_dir" DESTDIR="$pkgdir" install + done +} + +package_finch(){ + pkgdesc="A ncurses-based messaging client" + depends=("libpurple=$pkgver-$pkgrel" 'python2' 'gstreamer0.10') + optdepends=('avahi: Bonjour protocol support' + 'ca-certificates: SSL CA certificates' + 'tk: Tcl/Tk scripting support') + + cd "$srcdir/pidgin-$pkgver" + + # For linking + make -C libpurple DESTDIR="$pkgdir" install-libLTLIBRARIES + + make -C finch DESTDIR="$pkgdir" install + make -C doc DESTDIR="$pkgdir" install + + # Remove files that are packaged in libpurle + make -C libpurple DESTDIR="$pkgdir" uninstall-libLTLIBRARIES + + rm "$pkgdir"/usr/share/man/man1/pidgin.1 +} + +# vim:set ts=2 sw=2 et: diff --git a/testing/pidgin/nm09-more.patch b/testing/pidgin/nm09-more.patch new file mode 100644 index 000000000..8c708df9a --- /dev/null +++ b/testing/pidgin/nm09-more.patch @@ -0,0 +1,49 @@ +diff -up pidgin-2.7.11/libpurple/network.c.nm09more pidgin-2.7.11/libpurple/network.c +--- pidgin-2.7.11/libpurple/network.c.nm09more 2011-04-26 12:01:27.700085246 -0500 ++++ pidgin-2.7.11/libpurple/network.c 2011-05-24 13:13:28.185165657 -0500 +@@ -833,8 +833,20 @@ purple_network_is_available(void) + purple_debug_warning("network", "NetworkManager not active. Assuming connection exists.\n"); + } + +- if (nm_state == NM_STATE_UNKNOWN || nm_state == NM_STATE_CONNECTED) +- return TRUE; ++ switch (nm_state) ++ { ++ case NM_STATE_UNKNOWN: ++#if NM_CHECK_VERSION(0,8,992) ++ case NM_STATE_CONNECTED_LOCAL: ++ case NM_STATE_CONNECTED_SITE: ++ case NM_STATE_CONNECTED_GLOBAL: ++#else ++ case NM_STATE_CONNECTED: ++#endif ++ return TRUE; ++ default: ++ break; ++ } + + return FALSE; + +@@ -1170,9 +1182,14 @@ purple_network_init(void) + NM_DBUS_SERVICE, + NM_DBUS_PATH, + NM_DBUS_INTERFACE); ++ /* NM 0.6 signal */ + dbus_g_proxy_add_signal(nm_proxy, "StateChange", G_TYPE_UINT, G_TYPE_INVALID); + dbus_g_proxy_connect_signal(nm_proxy, "StateChange", + G_CALLBACK(nm_state_change_cb), NULL, NULL); ++ /* NM 0.7 and later signal */ ++ dbus_g_proxy_add_signal(nm_proxy, "StateChanged", G_TYPE_UINT, G_TYPE_INVALID); ++ dbus_g_proxy_connect_signal(nm_proxy, "StateChanged", ++ G_CALLBACK(nm_state_change_cb), NULL, NULL); + + dbus_proxy = dbus_g_proxy_new_for_name(nm_conn, + DBUS_SERVICE_DBUS, +@@ -1207,6 +1224,7 @@ purple_network_uninit(void) + #ifdef HAVE_NETWORKMANAGER + if (nm_proxy) { + dbus_g_proxy_disconnect_signal(nm_proxy, "StateChange", G_CALLBACK(nm_state_change_cb), NULL); ++ dbus_g_proxy_disconnect_signal(nm_proxy, "StateChanged", G_CALLBACK(nm_state_change_cb), NULL); + g_object_unref(G_OBJECT(nm_proxy)); + } + if (dbus_proxy) { diff --git a/testing/pidgin/nm09-pidgin.patch b/testing/pidgin/nm09-pidgin.patch new file mode 100644 index 000000000..1c2471d1f --- /dev/null +++ b/testing/pidgin/nm09-pidgin.patch @@ -0,0 +1,38 @@ +diff -up pidgin-2.7.10/libpurple/network.c.foo pidgin-2.7.10/libpurple/network.c +--- pidgin-2.7.10/libpurple/network.c.foo 2011-03-10 02:21:43.920933267 -0600 ++++ pidgin-2.7.10/libpurple/network.c 2011-03-10 02:23:11.466838793 -0600 +@@ -71,6 +71,10 @@ + #include <dbus/dbus-glib.h> + #include <NetworkManager.h> + ++#if !defined(NM_CHECK_VERSION) ++#define NM_CHECK_VERSION(x,y,z) 0 ++#endif ++ + static DBusGConnection *nm_conn = NULL; + static DBusGProxy *nm_proxy = NULL; + static DBusGProxy *dbus_proxy = NULL; +@@ -863,7 +867,13 @@ nm_update_state(NMState state) + + switch(state) + { ++#if NM_CHECK_VERSION(0,8,992) ++ case NM_STATE_CONNECTED_LOCAL: ++ case NM_STATE_CONNECTED_SITE: ++ case NM_STATE_CONNECTED_GLOBAL: ++#else + case NM_STATE_CONNECTED: ++#endif + /* Call res_init in case DNS servers have changed */ + res_init(); + /* update STUN IP in case we it changed (theoretically we could +@@ -880,6 +890,9 @@ nm_update_state(NMState state) + case NM_STATE_ASLEEP: + case NM_STATE_CONNECTING: + case NM_STATE_DISCONNECTED: ++#if NM_CHECK_VERSION(0,8,992) ++ case NM_STATE_DISCONNECTING: ++#endif + if (prev != NM_STATE_CONNECTED && prev != NM_STATE_UNKNOWN) + break; + if (ui_ops != NULL && ui_ops->network_disconnected != NULL) diff --git a/testing/pidgin/pidgin.install b/testing/pidgin/pidgin.install new file mode 100644 index 000000000..1a05f573e --- /dev/null +++ b/testing/pidgin/pidgin.install @@ -0,0 +1,11 @@ +post_install() { + gtk-update-icon-cache -q -t -f usr/share/icons/hicolor +} + +post_upgrade() { + post_install +} + +post_remove() { + post_install +} diff --git a/testing/subversion/PKGBUILD b/testing/subversion/PKGBUILD new file mode 100644 index 000000000..8c218677d --- /dev/null +++ b/testing/subversion/PKGBUILD @@ -0,0 +1,92 @@ +# $Id: PKGBUILD 126171 2011-06-02 01:15:51Z stephane $ +# Maintainer: Paul Mattal <paul@archlinux.org> +# Contributor: Jason Chu <jason@archlinux.org> + +pkgname=subversion +pkgver=1.6.17 +pkgrel=1 +pkgdesc="Replacement for CVS, another versioning system (SVN)" +arch=('i686' 'x86_64') +license=('apache' 'bsd') +depends=('neon' 'apr-util') +makedepends=('krb5' 'apache' 'python2' 'perl' 'swig' 'ruby' 'java-runtime' + 'autoconf' 'sqlite3' 'db' 'e2fsprogs' 'libgnome-keyring' 'kdelibs') +source=(http://subversion.tigris.org/downloads/$pkgname-$pkgver.tar.bz2 + svnserve svn svnserve.conf svnmerge.py + subversion.rpath.fix.patch + subversion.suppress.deprecation.warnings.patch) + +backup=('etc/xinetd.d/svn' 'etc/conf.d/svnserve') +url="http://subversion.apache.org/" +provides=('svn') +options=('!makeflags' '!libtool') +optdepends=('libgnome-keyring' 'kdeutils-kwallet' 'bash-completion: for svn bash completion') + +build() { + cd ${srcdir}/${pkgname}-${pkgver} + + export PYTHON=/usr/bin/python2 + + # apply patches + patch -p0 < $srcdir/subversion.rpath.fix.patch + patch -p1 -i $srcdir/subversion.suppress.deprecation.warnings.patch + + # configure + autoreconf + ./configure --prefix=/usr --with-apr=/usr --with-apr-util=/usr \ + --with-zlib=/usr --with-neon=/usr --with-apxs \ + --with-sqlite=/usr --with-berkeley-db=:/usr/include/:/usr/lib:db-5.1 \ + --enable-javahl --with-gnome-keyring --with-kwallet + + # build + (make external-all && make LT_LDFLAGS="-L$Fdestdir/usr/lib" local-all ) +} + +package() { + cd ${srcdir}/${pkgname}-${pkgver} + + # install + export LD_LIBRARY_PATH=${pkgdir}/usr/lib:$LD_LIBRARY_PATH + make DESTDIR=${pkgdir} install + + make DESTDIR=${pkgdir} swig-py + make install-swig-py DESTDIR=${pkgdir} + + install -d ${pkgdir}/usr/lib/python2.7 + mv ${pkgdir}/usr/lib/svn-python/ ${pkgdir}/usr/lib/python2.7/site-packages + + install -d ${pkgdir}/usr/share/subversion + install -d -m 755 tools/hook-scripts ${pkgdir}/usr/share/subversion/ + rm -f ${pkgdir}/usr/share/subversion/hook-scripts/*.in + + make DESTDIR=${pkgdir} swig-pl + make install-swig-pl DESTDIR=${pkgdir} INSTALLDIRS=vendor + rm -f ${pkgdir}/usr/lib/perl5/vendor_perl/auto/SVN/_Core/.packlist + rm -rf ${pkgdir}/usr/lib/perl5/core_perl + + make DESTDIR=${pkgdir} swig-rb + make install-swig-rb DESTDIR=${pkgdir} + + make DESTDIR=${pkgdir} javahl + make DESTDIR=${pkgdir} install-javahl + + install -d ${pkgdir}/etc/{rc.d,xinetd.d,conf.d} + + install -m 755 ${srcdir}/svnserve ${pkgdir}/etc/rc.d + install -m 644 ${srcdir}/svn ${pkgdir}/etc/xinetd.d + install -m 644 ${srcdir}/svnserve.conf ${pkgdir}/etc/conf.d/svnserve + install -m 755 ${srcdir}/svnmerge.py ${pkgdir}/usr/bin/svnmerge + install -D -m 644 ${srcdir}/subversion-$pkgver/COPYING \ + ${pkgdir}/usr/share/licenses/$pkgname/LICENSE + + # bash completion + install -Dm 644 ${srcdir}/${pkgname}-${pkgver}/tools/client-side/bash_completion \ + ${pkgdir}/etc/bash_completion.d/subversion +} +md5sums=('81e5dc5beee4b3fc025ac70c0b6caa14' + 'a2b029e8385007ffb99b437b30521c90' + 'a0db6dd43af33952739b6ec089852630' + 'c459e299192552f61578f3438abf0664' + 'a6371baeda7e224504629ecdda2749b4' + '6b4340ba9d8845cd8497e013ae01be3f' + '1166f3b7413d7e7450299b3525680bbe') diff --git a/testing/subversion/subversion.rpath.fix.patch b/testing/subversion/subversion.rpath.fix.patch new file mode 100644 index 000000000..ba6ee9e4e --- /dev/null +++ b/testing/subversion/subversion.rpath.fix.patch @@ -0,0 +1,10 @@ +--- Makefile.in.orig 2009-02-16 14:10:48.000000000 -0200 ++++ Makefile.in 2009-06-04 00:56:29.000000000 -0300 +@@ -678,6 +678,7 @@ + + $(SWIG_PL_DIR)/native/Makefile: $(SWIG_PL_DIR)/native/Makefile.PL + cd $(SWIG_PL_DIR)/native; $(PERL) Makefile.PL ++ cd $(SWIG_PL_DIR)/native; sed -i 's|LD_RUN_PATH|DIE_RPATH_DIE|g' Makefile{,.{client,delta,fs,ra,repos,wc}} + + swig-pl_DEPS = autogen-swig-pl libsvn_swig_perl \ + $(SWIG_PL_DIR)/native/Makefile diff --git a/testing/subversion/subversion.suppress.deprecation.warnings.patch b/testing/subversion/subversion.suppress.deprecation.warnings.patch new file mode 100644 index 000000000..94ce89b18 --- /dev/null +++ b/testing/subversion/subversion.suppress.deprecation.warnings.patch @@ -0,0 +1,22 @@ +diff -urN subversion-1.6.9/subversion/bindings/swig/python/svn/core.py subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py +--- subversion-1.6.9/subversion/bindings/swig/python/svn/core.py 2009-02-13 11:22:26.000000000 -0500 ++++ subversion-1.6.9-fixed/subversion/bindings/swig/python/svn/core.py 2010-02-08 07:46:29.000000000 -0500 +@@ -19,6 +19,7 @@ + from libsvn.core import * + import libsvn.core as _libsvncore + import atexit as _atexit ++import warnings + + class SubversionException(Exception): + def __init__(self, message=None, apr_err=None, child=None, +@@ -44,7 +45,9 @@ + Exception.__init__(self, *args) + + self.apr_err = apr_err +- self.message = message ++ with warnings.catch_warnings(): ++ warnings.simplefilter("ignore", DeprecationWarning) ++ self.message = message + self.child = child + self.file = file + self.line = line diff --git a/testing/subversion/svn b/testing/subversion/svn new file mode 100644 index 000000000..8988aaf63 --- /dev/null +++ b/testing/subversion/svn @@ -0,0 +1,11 @@ +service svn +{ + flags = REUSE + socket_type = stream + wait = no + user = root + server = /usr/bin/svnserve + server_args = -i + log_on_failure += USERID + disable = yes +} diff --git a/testing/subversion/svnmerge.py b/testing/subversion/svnmerge.py new file mode 100644 index 000000000..d8931648f --- /dev/null +++ b/testing/subversion/svnmerge.py @@ -0,0 +1,2370 @@ +#!/usr/bin/env python2 +# -*- coding: utf-8 -*- +# Copyright (c) 2005, Giovanni Bajo +# Copyright (c) 2004-2005, Awarix, Inc. +# All rights reserved. +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA +# +# Author: Archie Cobbs <archie at awarix dot com> +# Rewritten in Python by: Giovanni Bajo <rasky at develer dot com> +# +# Acknowledgments: +# John Belmonte <john at neggie dot net> - metadata and usability +# improvements +# Blair Zajac <blair at orcaware dot com> - random improvements +# Raman Gupta <rocketraman at fastmail dot fm> - bidirectional and transitive +# merging support +# Dustin J. Mitchell <dustin at zmanda dot com> - support for multiple +# location identifier formats +# +# $HeadURL$ +# $LastChangedDate$ +# $LastChangedBy$ +# $LastChangedRevision$ +# +# Requisites: +# svnmerge.py has been tested with all SVN major versions since 1.1 (both +# client and server). It is unknown if it works with previous versions. +# +# Differences from svnmerge.sh: +# - More portable: tested as working in FreeBSD and OS/2. +# - Add double-verbose mode, which shows every svn command executed (-v -v). +# - "svnmerge avail" now only shows commits in source, not also commits in +# other parts of the repository. +# - Add "svnmerge block" to flag some revisions as blocked, so that +# they will not show up anymore in the available list. Added also +# the complementary "svnmerge unblock". +# - "svnmerge avail" has grown two new options: +# -B to display a list of the blocked revisions +# -A to display both the blocked and the available revisions. +# - Improved generated commit message to make it machine parsable even when +# merging commits which are themselves merges. +# - Add --force option to skip working copy check +# - Add --record-only option to "svnmerge merge" to avoid performing +# an actual merge, yet record that a merge happened. +# - Can use a variety of location-identifier formats +# +# TODO: +# - Add "svnmerge avail -R": show logs in reverse order +# +# Information for Hackers: +# +# Identifiers for branches: +# A branch is identified in three ways within this source: +# - as a working copy (variable name usually includes 'dir') +# - as a fully qualified URL +# - as a path identifier (an opaque string indicating a particular path +# in a particular repository; variable name includes 'pathid') +# A "target" is generally user-specified, and may be a working copy or +# a URL. + +import sys, os, getopt, re, types, tempfile, time, locale +from bisect import bisect +from xml.dom import pulldom + +NAME = "svnmerge" +if not hasattr(sys, "version_info") or sys.version_info < (2, 0): + error("requires Python 2.0 or newer") + +# Set up the separator used to separate individual log messages from +# each revision merged into the target location. Also, create a +# regular expression that will find this same separator in already +# committed log messages, so that the separator used for this run of +# svnmerge.py will have one more LOG_SEPARATOR appended to the longest +# separator found in all the commits. +LOG_SEPARATOR = 8 * '.' +LOG_SEPARATOR_RE = re.compile('^((%s)+)' % re.escape(LOG_SEPARATOR), + re.MULTILINE) + +# Each line of the embedded log messages will be prefixed by LOG_LINE_PREFIX. +LOG_LINE_PREFIX = 2 * ' ' + +# Set python to the default locale as per environment settings, same as svn +# TODO we should really parse config and if log-encoding is specified, set +# the locale to match that encoding +locale.setlocale(locale.LC_ALL, '') + +# We want the svn output (such as svn info) to be non-localized +# Using LC_MESSAGES should not affect localized output of svn log, for example +if os.environ.has_key("LC_ALL"): + del os.environ["LC_ALL"] +os.environ["LC_MESSAGES"] = "C" + +############################################################################### +# Support for older Python versions +############################################################################### + +# True/False constants are Python 2.2+ +try: + True, False +except NameError: + True, False = 1, 0 + +def lstrip(s, ch): + """Replacement for str.lstrip (support for arbitrary chars to strip was + added in Python 2.2.2).""" + i = 0 + try: + while s[i] == ch: + i = i+1 + return s[i:] + except IndexError: + return "" + +def rstrip(s, ch): + """Replacement for str.rstrip (support for arbitrary chars to strip was + added in Python 2.2.2).""" + try: + if s[-1] != ch: + return s + i = -2 + while s[i] == ch: + i = i-1 + return s[:i+1] + except IndexError: + return "" + +def strip(s, ch): + """Replacement for str.strip (support for arbitrary chars to strip was + added in Python 2.2.2).""" + return lstrip(rstrip(s, ch), ch) + +def rsplit(s, sep, maxsplits=0): + """Like str.rsplit, which is Python 2.4+ only.""" + L = s.split(sep) + if not 0 < maxsplits <= len(L): + return L + return [sep.join(L[0:-maxsplits])] + L[-maxsplits:] + +############################################################################### + +def kwextract(s): + """Extract info from a svn keyword string.""" + try: + return strip(s, "$").strip().split(": ")[1] + except IndexError: + return "<unknown>" + +__revision__ = kwextract('$Rev$') +__date__ = kwextract('$Date$') + +# Additional options, not (yet?) mapped to command line flags +default_opts = { + "svn": "svn", + "prop": NAME + "-integrated", + "block-prop": NAME + "-blocked", + "commit-verbose": True, + "verbose": 0, +} +logs = {} + +def console_width(): + """Get the width of the console screen (if any).""" + try: + return int(os.environ["COLUMNS"]) + except (KeyError, ValueError): + pass + + try: + # Call the Windows API (requires ctypes library) + from ctypes import windll, create_string_buffer + h = windll.kernel32.GetStdHandle(-11) + csbi = create_string_buffer(22) + res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) + if res: + import struct + (bufx, bufy, + curx, cury, wattr, + left, top, right, bottom, + maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw) + return right - left + 1 + except ImportError: + pass + + # Parse the output of stty -a + if os.isatty(1): + out = os.popen("stty -a").read() + m = re.search(r"columns (\d+);", out) + if m: + return int(m.group(1)) + + # sensible default + return 80 + +def error(s): + """Subroutine to output an error and bail.""" + print >> sys.stderr, "%s: %s" % (NAME, s) + sys.exit(1) + +def report(s): + """Subroutine to output progress message, unless in quiet mode.""" + if opts["verbose"]: + print "%s: %s" % (NAME, s) + +def prefix_lines(prefix, lines): + """Given a string representing one or more lines of text, insert the + specified prefix at the beginning of each line, and return the result. + The input must be terminated by a newline.""" + assert lines[-1] == "\n" + return prefix + lines[:-1].replace("\n", "\n"+prefix) + "\n" + +def recode_stdout_to_file(s): + if locale.getdefaultlocale()[1] is None or not hasattr(sys.stdout, "encoding") \ + or sys.stdout.encoding is None: + return s + u = s.decode(sys.stdout.encoding) + return u.encode(locale.getdefaultlocale()[1]) + +class LaunchError(Exception): + """Signal a failure in execution of an external command. Parameters are the + exit code of the process, the original command line, and the output of the + command.""" + +try: + """Launch a sub-process. Return its output (both stdout and stderr), + optionally split by lines (if split_lines is True). Raise a LaunchError + exception if the exit code of the process is non-zero (failure). + + This function has two implementations, one based on subprocess (preferred), + and one based on popen (for compatibility). + """ + import subprocess + import shlex + + def launch(cmd, split_lines=True): + # Requiring python 2.4 or higher, on some platforms we get + # much faster performance from the subprocess module (where python + # doesn't try to close an exhorbitant number of file descriptors) + stdout = "" + stderr = "" + try: + if os.name == 'nt': + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, \ + close_fds=False, stderr=subprocess.PIPE) + else: + # Use shlex to break up the parameters intelligently, + # respecting quotes. shlex can't handle unicode. + args = shlex.split(cmd.encode('ascii')) + p = subprocess.Popen(args, stdout=subprocess.PIPE, \ + close_fds=False, stderr=subprocess.PIPE) + stdoutAndErr = p.communicate() + stdout = stdoutAndErr[0] + stderr = stdoutAndErr[1] + except OSError, inst: + # Using 1 as failure code; should get actual number somehow? For + # examples see svnmerge_test.py's TestCase_launch.test_failure and + # TestCase_launch.test_failurecode. + raise LaunchError(1, cmd, stdout + " " + stderr + ": " + str(inst)) + + if p.returncode == 0: + if split_lines: + # Setting keepends=True for compatibility with previous logic + # (where file.readlines() preserves newlines) + return stdout.splitlines(True) + else: + return stdout + else: + raise LaunchError(p.returncode, cmd, stdout + stderr) +except ImportError: + # support versions of python before 2.4 (slower on some systems) + def launch(cmd, split_lines=True): + if os.name not in ['nt', 'os2']: + import popen2 + p = popen2.Popen4(cmd) + p.tochild.close() + if split_lines: + out = p.fromchild.readlines() + else: + out = p.fromchild.read() + ret = p.wait() + if ret == 0: + ret = None + else: + ret >>= 8 + else: + i,k = os.popen4(cmd) + i.close() + if split_lines: + out = k.readlines() + else: + out = k.read() + ret = k.close() + + if ret is None: + return out + raise LaunchError(ret, cmd, out) + +def launchsvn(s, show=False, pretend=False, **kwargs): + """Launch SVN and grab its output.""" + username = password = configdir = "" + if opts.get("username", None): + username = "--username=" + opts["username"] + if opts.get("password", None): + password = "--password=" + opts["password"] + if opts.get("config-dir", None): + configdir = "--config-dir=" + opts["config-dir"] + cmd = ' '.join(filter(None, [opts["svn"], "--non-interactive", + username, password, configdir, s])) + if show or opts["verbose"] >= 2: + print cmd + if pretend: + return None + return launch(cmd, **kwargs) + +def svn_command(s): + """Do (or pretend to do) an SVN command.""" + out = launchsvn(s, show=opts["show-changes"] or opts["dry-run"], + pretend=opts["dry-run"], + split_lines=False) + if not opts["dry-run"]: + print out + +def check_dir_clean(dir): + """Check the current status of dir for local mods.""" + if opts["force"]: + report('skipping status check because of --force') + return + report('checking status of "%s"' % dir) + + # Checking with -q does not show unversioned files or external + # directories. Though it displays a debug message for external + # directories, after a blank line. So, practically, the first line + # matters: if it's non-empty there is a modification. + out = launchsvn("status -q %s" % dir) + if out and out[0].strip(): + error('"%s" has local modifications; it must be clean' % dir) + +class PathIdentifier: + """Abstraction for a path identifier, so that we can start talking + about it before we know the form that it takes in the properties (its + external_form). Objects are referenced in the class variable 'locobjs', + keyed by all known forms.""" + + # a map of UUID (or None) to repository root URL. + repo_hints = {} + + # a map from any known string form to the corresponding PathIdentifier + locobjs = {} + + def __init__(self, repo_relative_path, uuid=None, url=None, external_form=None): + self.repo_relative_path = repo_relative_path + self.uuid = uuid + self.url = url + self.external_form = external_form + + def __repr__(self): + return "<PathIdentifier " + ', '.join('%s=%r' % i for i in self.__dict__.items()) + '>' + + def __str__(self): + """Return a printable string representation""" + if self.external_form: + return self.external_form + if self.url: + return self.format('url') + if self.uuid: + return self.format('uuid') + return self.format('path') + + def from_pathid(pathid_str): + """convert pathid_str to a PathIdentifier""" + if not PathIdentifier.locobjs.has_key(pathid_str): + if is_url(pathid_str): + # we can determine every form; PathIdentifier.hint knows how to do that + PathIdentifier.hint(pathid_str) + elif pathid_str[:7] == 'uuid://': + mo = re.match('uuid://([^/]*)(.*)', pathid_str) + if not mo: + error("Invalid path identifier '%s'" % pathid_str) + uuid, repo_relative_path = mo.groups() + pathid = PathIdentifier(repo_relative_path, uuid=uuid) + # we can cache this by uuid:// pathid and by repo-relative path + PathIdentifier.locobjs[pathid_str] = PathIdentifier.locobjs[repo_relative_path] = pathid + elif pathid_str and pathid_str[0] == '/': + # strip any trailing slashes + pathid_str = pathid_str.rstrip('/') + pathid = PathIdentifier(repo_relative_path=pathid_str) + # we can only cache this by repo-relative path + PathIdentifier.locobjs[pathid_str] = pathid + else: + error("Invalid path identifier '%s'" % pathid_str) + return PathIdentifier.locobjs[pathid_str] + from_pathid = staticmethod(from_pathid) + + def from_target(target): + """Convert a target (either a working copy path or an URL) into a + path identifier.""" + # prime the cache first if we don't know about this target yet + if not PathIdentifier.locobjs.has_key(target): + PathIdentifier.hint(target) + + try: + return PathIdentifier.locobjs[target] + except KeyError: + error("Could not recognize path identifier '%s'" % target) + from_target = staticmethod(from_target) + + def hint(target): + """Cache some information about target, as it may be referenced by + repo-relative path in subversion properties; the cache can help to + expand such a relative path to a full path identifier.""" + if PathIdentifier.locobjs.has_key(target): return + if not is_url(target) and not is_wc(target): return + + url = target_to_url(target) + + root = get_repo_root(url) + assert root[-1] != "/" + assert url[:len(root)] == root, "url=%r, root=%r" % (url, root) + repo_relative_path = url[len(root):] + + try: + uuid = get_svninfo(target)['Repository UUID'] + uuid_pathid = 'uuid://%s%s' % (uuid, repo_relative_path) + except KeyError: + uuid = None + uuid_pathid = None + + locobj = PathIdentifier.locobjs.get(url) or \ + (uuid_pathid and PathIdentifier.locobjs.get(uuid_pathid)) + if not locobj: + locobj = PathIdentifier(repo_relative_path, uuid=uuid, url=url) + + PathIdentifier.repo_hints[uuid] = root # (uuid may be None) + + PathIdentifier.locobjs[target] = locobj + PathIdentifier.locobjs[url] = locobj + if uuid_pathid: + PathIdentifier.locobjs[uuid_pathid] = locobj + if not PathIdentifier.locobjs.has_key(repo_relative_path): + PathIdentifier.locobjs[repo_relative_path] = locobj + hint = staticmethod(hint) + + def format(self, fmt): + if fmt == 'path': + return self.repo_relative_path + elif fmt == 'uuid': + return "uuid://%s%s" % (self.uuid, self.repo_relative_path) + elif fmt == 'url': + return self.url + else: + error("Unkonwn path type '%s'" % fmt) + + def match_substring(self, str): + """Test whether str is a substring of any representation of this + PathIdentifier.""" + if self.repo_relative_path.find(str) >= 0: + return True + + if self.uuid: + if ("uuid://%s%s" % (self.uuid, self.repo_relative_path)).find(str) >= 0: + return True + + if self.url: + if (self.url + self.repo_relative_path).find(str) >= 0: + return True + + return False + + def get_url(self): + """Convert a pathid into a URL. If this is not possible, error out.""" + if self.url: + return self.url + # if we have a uuid and happen to know the URL for it, use that + elif self.uuid and PathIdentifier.repo_hints.has_key(self.uuid): + self.url = PathIdentifier.repo_hints[self.uuid] + self.repo_relative_path + PathIdentifier.locobjs[self.url] = self + return self.url + # if we've only seen one rep, use that (a guess, but an educated one) + elif not self.uuid and len(PathIdentifier.repo_hints) == 1: + uuid, root = PathIdentifier.repo_hints.items()[0] + if uuid: + self.uuid = uuid + PathIdentifier.locobjs['uuid://%s%s' % (uuid, self.repo_relative_path)] = self + self.url = root + self.repo_relative_path + PathIdentifier.locobjs[self.url] = self + report("Guessing that '%s' refers to '%s'" % (self, self.url)) + return self.url + else: + error("Cannot determine URL for '%s'; " % self + + "Explicit source argument (-S/--source) required.\n") + +class RevisionLog: + """ + A log of the revisions which affected a given URL between two + revisions. + """ + + def __init__(self, url, begin, end, find_propchanges=False): + """ + Create a new RevisionLog object, which stores, in self.revs, a list + of the revisions which affected the specified URL between begin and + end. If find_propchanges is True, self.propchange_revs will contain a + list of the revisions which changed properties directly on the + specified URL. URL must be the URL for a directory in the repository. + """ + self.url = url + + # Setup the log options (--quiet, so we don't show log messages) + log_opts = '--xml --quiet -r%s:%s "%s"' % (begin, end, url) + if find_propchanges: + # The --verbose flag lets us grab merge tracking information + # by looking at propchanges + log_opts = "--verbose " + log_opts + + # Read the log to look for revision numbers and merge-tracking info + self.revs = [] + self.propchange_revs = [] + repos_pathid = PathIdentifier.from_target(url) + for chg in SvnLogParser(launchsvn("log %s" % log_opts, + split_lines=False)): + self.revs.append(chg.revision()) + for p in chg.paths(): + if p.action() == 'M' and p.pathid() == repos_pathid.repo_relative_path: + self.propchange_revs.append(chg.revision()) + + # Save the range of the log + self.begin = int(begin) + if end == "HEAD": + # If end is not provided, we do not know which is the latest + # revision in the repository. So we set 'end' to the latest + # known revision. + self.end = self.revs[-1] + else: + self.end = int(end) + + self._merges = None + self._blocks = None + + def merge_metadata(self): + """ + Return a VersionedProperty object, with a cached view of the merge + metadata in the range of this log. + """ + + # Load merge metadata if necessary + if not self._merges: + self._merges = VersionedProperty(self.url, opts["prop"]) + self._merges.load(self) + + return self._merges + + def block_metadata(self): + if not self._blocks: + self._blocks = VersionedProperty(self.url, opts["block-prop"]) + self._blocks.load(self) + + return self._blocks + + +class VersionedProperty: + """ + A read-only, cached view of a versioned property. + + self.revs contains a list of the revisions in which the property changes. + self.values stores the new values at each corresponding revision. If the + value of the property is unknown, it is set to None. + + Initially, we set self.revs to [0] and self.values to [None]. This + indicates that, as of revision zero, we know nothing about the value of + the property. + + Later, if you run self.load(log), we cache the value of this property over + the entire range of the log by noting each revision in which the property + was changed. At the end of the range of the log, we invalidate our cache + by adding the value "None" to our cache for any revisions which fall out + of the range of our log. + + Once self.revs and self.values are filled, we can find the value of the + property at any arbitrary revision using a binary search on self.revs. + Once we find the last revision during which the property was changed, + we can lookup the associated value in self.values. (If the associated + value is None, the associated value was not cached and we have to do + a full propget.) + + An example: We know that the 'svnmerge' property was added in r10, and + changed in r21. We gathered log info up until r40. + + revs = [0, 10, 21, 40] + values = [None, "val1", "val2", None] + + What these values say: + - From r0 to r9, we know nothing about the property. + - In r10, the property was set to "val1". This property stayed the same + until r21, when it was changed to "val2". + - We don't know what happened after r40. + """ + + def __init__(self, url, name): + """View the history of a versioned property at URL with name""" + self.url = url + self.name = name + + # We know nothing about the value of the property. Setup revs + # and values to indicate as such. + self.revs = [0] + self.values = [None] + + # We don't have any revisions cached + self._initial_value = None + self._changed_revs = [] + self._changed_values = [] + + def load(self, log): + """ + Load the history of property changes from the specified + RevisionLog object. + """ + + # Get the property value before the range of the log + if log.begin > 1: + self.revs.append(log.begin-1) + try: + self._initial_value = self.raw_get(log.begin-1) + except LaunchError: + # The specified URL might not exist before the + # range of the log. If so, we can safely assume + # that the property was empty at that time. + self._initial_value = { } + self.values.append(self._initial_value) + else: + self._initial_value = { } + self.values[0] = self._initial_value + + # Cache the property values in the log range + old_value = self._initial_value + for rev in log.propchange_revs: + new_value = self.raw_get(rev) + if new_value != old_value: + self._changed_revs.append(rev) + self._changed_values.append(new_value) + self.revs.append(rev) + self.values.append(new_value) + old_value = new_value + + # Indicate that we know nothing about the value of the property + # after the range of the log. + if log.revs: + self.revs.append(log.end+1) + self.values.append(None) + + def raw_get(self, rev=None): + """ + Get the property at revision REV. If rev is not specified, get + the property at revision HEAD. + """ + return get_revlist_prop(self.url, self.name, rev) + + def get(self, rev=None): + """ + Get the property at revision REV. If rev is not specified, get + the property at revision HEAD. + """ + + if rev is not None: + + # Find the index using a binary search + i = bisect(self.revs, rev) - 1 + + # Return the value of the property, if it was cached + if self.values[i] is not None: + return self.values[i] + + # Get the current value of the property + return self.raw_get(rev) + + def changed_revs(self, key=None): + """ + Get a list of the revisions in which the specified dictionary + key was changed in this property. If key is not specified, + return a list of revisions in which any key was changed. + """ + if key is None: + return self._changed_revs + else: + changed_revs = [] + old_val = self._initial_value + for rev, val in zip(self._changed_revs, self._changed_values): + if val.get(key) != old_val.get(key): + changed_revs.append(rev) + old_val = val + return changed_revs + + def initialized_revs(self): + """ + Get a list of the revisions in which keys were added or + removed in this property. + """ + initialized_revs = [] + old_len = len(self._initial_value) + for rev, val in zip(self._changed_revs, self._changed_values): + if len(val) != old_len: + initialized_revs.append(rev) + old_len = len(val) + return initialized_revs + +class RevisionSet: + """ + A set of revisions, held in dictionary form for easy manipulation. If we + were to rewrite this script for Python 2.3+, we would subclass this from + set (or UserSet). As this class does not include branch + information, it's assumed that one instance will be used per + branch. + """ + def __init__(self, parm): + """Constructs a RevisionSet from a string in property form, or from + a dictionary whose keys are the revisions. Raises ValueError if the + input string is invalid.""" + + self._revs = {} + + revision_range_split_re = re.compile('[-:]') + + if isinstance(parm, types.DictType): + self._revs = parm.copy() + elif isinstance(parm, types.ListType): + for R in parm: + self._revs[int(R)] = 1 + else: + parm = parm.strip() + if parm: + for R in parm.split(","): + rev_or_revs = re.split(revision_range_split_re, R) + if len(rev_or_revs) == 1: + self._revs[int(rev_or_revs[0])] = 1 + elif len(rev_or_revs) == 2: + for rev in range(int(rev_or_revs[0]), + int(rev_or_revs[1])+1): + self._revs[rev] = 1 + else: + raise ValueError, 'Ill formatted revision range: ' + R + + def sorted(self): + revnums = self._revs.keys() + revnums.sort() + return revnums + + def normalized(self): + """Returns a normalized version of the revision set, which is an + ordered list of couples (start,end), with the minimum number of + intervals.""" + revnums = self.sorted() + revnums.reverse() + ret = [] + while revnums: + s = e = revnums.pop() + while revnums and revnums[-1] in (e, e+1): + e = revnums.pop() + ret.append((s, e)) + return ret + + def __str__(self): + """Convert the revision set to a string, using its normalized form.""" + L = [] + for s,e in self.normalized(): + if s == e: + L.append(str(s)) + else: + L.append(str(s) + "-" + str(e)) + return ",".join(L) + + def __contains__(self, rev): + return self._revs.has_key(rev) + + def __sub__(self, rs): + """Compute subtraction as in sets.""" + revs = {} + for r in self._revs.keys(): + if r not in rs: + revs[r] = 1 + return RevisionSet(revs) + + def __and__(self, rs): + """Compute intersections as in sets.""" + revs = {} + for r in self._revs.keys(): + if r in rs: + revs[r] = 1 + return RevisionSet(revs) + + def __nonzero__(self): + return len(self._revs) != 0 + + def __len__(self): + """Return the number of revisions in the set.""" + return len(self._revs) + + def __iter__(self): + return iter(self.sorted()) + + def __or__(self, rs): + """Compute set union.""" + revs = self._revs.copy() + revs.update(rs._revs) + return RevisionSet(revs) + +def merge_props_to_revision_set(merge_props, pathid): + """A converter which returns a RevisionSet instance containing the + revisions from PATH as known to BRANCH_PROPS. BRANCH_PROPS is a + dictionary of pathid -> revision set branch integration information + (as returned by get_merge_props()).""" + if not merge_props.has_key(pathid): + error('no integration info available for path "%s"' % pathid) + return RevisionSet(merge_props[pathid]) + +def dict_from_revlist_prop(propvalue): + """Given a property value as a string containing per-source revision + lists, return a dictionary whose key is a source path identifier + and whose value is the revisions for that source.""" + prop = {} + + # Multiple sources are separated by any whitespace. + for L in propvalue.split(): + # We use rsplit to play safe and allow colons in pathids. + pathid_str, revs = rsplit(L.strip(), ":", 1) + + pathid = PathIdentifier.from_pathid(pathid_str) + + # cache the "external" form we saw + pathid.external_form = pathid_str + + prop[pathid] = revs + return prop + +def get_revlist_prop(url_or_dir, propname, rev=None): + """Given a repository URL or working copy path and a property + name, extract the values of the property which store per-source + revision lists and return a dictionary whose key is a source path + identifier, and whose value is the revisions for that source.""" + + # Note that propget does not return an error if the property does + # not exist, it simply does not output anything. So we do not need + # to check for LaunchError here. + args = '--strict "%s" "%s"' % (propname, url_or_dir) + if rev: + args = '-r %s %s' % (rev, args) + out = launchsvn('propget %s' % args, split_lines=False) + + return dict_from_revlist_prop(out) + +def get_merge_props(dir): + """Extract the merged revisions.""" + return get_revlist_prop(dir, opts["prop"]) + +def get_block_props(dir): + """Extract the blocked revisions.""" + return get_revlist_prop(dir, opts["block-prop"]) + +def get_blocked_revs(dir, source_pathid): + p = get_block_props(dir) + if p.has_key(source_pathid): + return RevisionSet(p[source_pathid]) + return RevisionSet("") + +def format_merge_props(props, sep=" "): + """Formats the hash PROPS as a string suitable for use as a + Subversion property value.""" + assert sep in ["\t", "\n", " "] # must be a whitespace + props = props.items() + props.sort() + L = [] + for h, r in props: + L.append("%s:%s" % (h, r)) + return sep.join(L) + +def _run_propset(dir, prop, value): + """Set the property 'prop' of directory 'dir' to value 'value'. We go + through a temporary file to not run into command line length limits.""" + try: + fd, fname = tempfile.mkstemp() + f = os.fdopen(fd, "wb") + except AttributeError: + # Fallback for Python <= 2.3 which does not have mkstemp (mktemp + # suffers from race conditions. Not that we care...) + fname = tempfile.mktemp() + f = open(fname, "wb") + + try: + f.write(value) + f.close() + report("property data written to temp file: %s" % value) + svn_command('propset "%s" -F "%s" "%s"' % (prop, fname, dir)) + finally: + os.remove(fname) + +def set_props(dir, name, props): + props = format_merge_props(props) + if props: + _run_propset(dir, name, props) + else: + # Check if NAME exists on DIR before trying to delete it. + # As of 1.6 propdel no longer supports deleting a + # non-existent property. + out = launchsvn('propget "%s" "%s"' % (name, dir)) + if out: + svn_command('propdel "%s" "%s"' % (name, dir)) + +def set_merge_props(dir, props): + set_props(dir, opts["prop"], props) + +def set_block_props(dir, props): + set_props(dir, opts["block-prop"], props) + +def set_blocked_revs(dir, source_pathid, revs): + props = get_block_props(dir) + if revs: + props[source_pathid] = str(revs) + elif props.has_key(source_pathid): + del props[source_pathid] + set_block_props(dir, props) + +def is_url(url): + """Check if url looks like a valid url.""" + return re.search(r"^[a-zA-Z][-+\.\w]*://[^\s]+$", url) is not None and url[:4] != 'uuid' + +def check_url(url): + """Similar to is_url, but actually invoke get_svninfo to find out""" + return get_svninfo(url) != {} + +def is_pathid(pathid): + return isinstance(pathid, PathIdentifier) + +def is_wc(dir): + """Check if a directory is a working copy.""" + return os.path.isdir(os.path.join(dir, ".svn")) or \ + os.path.isdir(os.path.join(dir, "_svn")) + +_cache_svninfo = {} +def get_svninfo(target): + """Extract the subversion information for a target (through 'svn info'). + This function uses an internal cache to let clients query information + many times.""" + if _cache_svninfo.has_key(target): + return _cache_svninfo[target] + info = {} + for L in launchsvn('info "%s"' % target): + L = L.strip() + if not L: + continue + key, value = L.split(": ", 1) + info[key] = value.strip() + _cache_svninfo[target] = info + return info + +def target_to_url(target): + """Convert working copy path or repos URL to a repos URL.""" + if is_wc(target): + info = get_svninfo(target) + return info["URL"] + return target + +_cache_reporoot = {} +def get_repo_root(target): + """Compute the root repos URL given a working-copy path, or a URL.""" + # Try using "svn info WCDIR". This works only on SVN clients >= 1.3 + if not is_url(target): + try: + info = get_svninfo(target) + root = info["Repository Root"] + _cache_reporoot[root] = None + return root + except KeyError: + pass + url = target_to_url(target) + assert url[-1] != '/' + else: + url = target + + # Go through the cache of the repository roots. This avoids extra + # server round-trips if we are asking the root of different URLs + # in the same repository (the cache in get_svninfo() cannot detect + # that of course and would issue a remote command). + assert is_url(url) + for r in _cache_reporoot: + if url.startswith(r): + return r + + # Try using "svn info URL". This works only on SVN clients >= 1.2 + try: + info = get_svninfo(url) + # info may be {}, in which case we'll see KeyError here + root = info["Repository Root"] + _cache_reporoot[root] = None + return root + except (KeyError, LaunchError): + pass + + # Constrained to older svn clients, we are stuck with this ugly + # trial-and-error implementation. It could be made faster with a + # binary search. + while url: + temp = os.path.dirname(url) + try: + launchsvn('proplist "%s"' % temp) + except LaunchError: + _cache_reporoot[url] = None + return rstrip(url, "/") + url = temp + + error("svn repos root of %s not found" % target) + +class SvnLogParser: + """ + Parse the "svn log", going through the XML output and using pulldom (which + would even allow streaming the command output). + """ + def __init__(self, xml): + self._events = pulldom.parseString(xml) + def __getitem__(self, idx): + for event, node in self._events: + if event == pulldom.START_ELEMENT and node.tagName == "logentry": + self._events.expandNode(node) + return self.SvnLogRevision(node) + raise IndexError, "Could not find 'logentry' tag in xml" + + class SvnLogRevision: + def __init__(self, xmlnode): + self.n = xmlnode + def revision(self): + return int(self.n.getAttribute("revision")) + def author(self): + return self.n.getElementsByTagName("author")[0].firstChild.data + def paths(self): + return [self.SvnLogPath(n) + for n in self.n.getElementsByTagName("path")] + + class SvnLogPath: + def __init__(self, xmlnode): + self.n = xmlnode + def action(self): + return self.n.getAttribute("action") + def pathid(self): + return self.n.firstChild.data + def copyfrom_rev(self): + try: return self.n.getAttribute("copyfrom-rev") + except KeyError: return None + def copyfrom_pathid(self): + try: return self.n.getAttribute("copyfrom-path") + except KeyError: return None + +def get_copyfrom(target): + """Get copyfrom info for a given target (it represents the + repository-relative path from where it was branched). NOTE: + repos root has no copyfrom info. In this case None is returned. + + Returns the: + - source file or directory from which the copy was made + - revision from which that source was copied + - revision in which the copy was committed + """ + repos_path = PathIdentifier.from_target(target).repo_relative_path + for chg in SvnLogParser(launchsvn('log -v --xml --stop-on-copy "%s"' + % target, split_lines=False)): + for p in chg.paths(): + if p.action() == 'A' and p.pathid() == repos_path: + # These values will be None if the corresponding elements are + # not found in the log. + return p.copyfrom_pathid(), p.copyfrom_rev(), chg.revision() + return None,None,None + +def get_latest_rev(url): + """Get the latest revision of the repository of which URL is part.""" + try: + info = get_svninfo(url) + if not info.has_key("Revision"): + error("Not a valid URL: %s" % url) + return info["Revision"] + except LaunchError: + # Alternative method for latest revision checking (for svn < 1.2) + report('checking latest revision of "%s"' % url) + L = launchsvn('proplist --revprop -r HEAD "%s"' % opts["source-url"])[0] + rev = re.search("revision (\d+)", L).group(1) + report('latest revision of "%s" is %s' % (url, rev)) + return rev + +def get_created_rev(url): + """Lookup the revision at which the path identified by the + provided URL was first created.""" + oldest_rev = -1 + report('determining oldest revision for URL "%s"' % url) + ### TODO: Refactor this to use a modified RevisionLog class. + lines = None + cmd = "log -r1:HEAD --stop-on-copy -q " + url + try: + lines = launchsvn(cmd + " --limit=1") + except LaunchError: + # Assume that --limit isn't supported by the installed 'svn'. + lines = launchsvn(cmd) + if lines and len(lines) > 1: + i = lines[1].find(" ") + if i != -1: + oldest_rev = int(lines[1][1:i]) + if oldest_rev == -1: + error('unable to determine oldest revision for URL "%s"' % url) + return oldest_rev + +def get_commit_log(url, revnum): + """Return the log message for a specific integer revision + number.""" + out = launchsvn("log --incremental -r%d %s" % (revnum, url)) + return recode_stdout_to_file("".join(out[1:])) + +def construct_merged_log_message(url, revnums): + """Return a commit log message containing all the commit messages + in the specified revisions at the given URL. The separator used + in this log message is determined by searching for the longest + svnmerge separator existing in the commit log messages and + extending it by one more separator. This results in a new commit + log message that is clearer in describing merges that contain + other merges. Trailing newlines are removed from the embedded + log messages.""" + messages = [''] + longest_sep = '' + for r in revnums.sorted(): + message = get_commit_log(url, r) + if message: + message = re.sub(r'(\r\n|\r|\n)', "\n", message) + message = rstrip(message, "\n") + "\n" + messages.append(prefix_lines(LOG_LINE_PREFIX, message)) + for match in LOG_SEPARATOR_RE.findall(message): + sep = match[1] + if len(sep) > len(longest_sep): + longest_sep = sep + + longest_sep += LOG_SEPARATOR + "\n" + messages.append('') + return longest_sep.join(messages) + +def get_default_source(branch_target, branch_props): + """Return the default source for branch_target (given its branch_props). + Error out if there is ambiguity.""" + if not branch_props: + error("no integration info available") + + props = branch_props.copy() + pathid = PathIdentifier.from_target(branch_target) + + # To make bidirectional merges easier, find the target's + # repository local path so it can be removed from the list of + # possible integration sources. + if props.has_key(pathid): + del props[pathid] + + if len(props) > 1: + err_msg = "multiple sources found. " + err_msg += "Explicit source argument (-S/--source) required.\n" + err_msg += "The merge sources available are:" + for prop in props: + err_msg += "\n " + str(prop) + error(err_msg) + + return props.keys()[0] + +def should_find_reflected(branch_dir): + should_find_reflected = opts["bidirectional"] + + # If the source has integration info for the target, set find_reflected + # even if --bidirectional wasn't specified + if not should_find_reflected: + source_props = get_merge_props(opts["source-url"]) + should_find_reflected = source_props.has_key(PathIdentifier.from_target(branch_dir)) + + return should_find_reflected + +def analyze_revs(target_pathid, url, begin=1, end=None, + find_reflected=False): + """For the source of the merges in the source URL being merged into + target_pathid, analyze the revisions in the interval begin-end (which + defaults to 1-HEAD), to find out which revisions are changes in + the url, which are changes elsewhere (so-called 'phantom' + revisions), optionally which are reflected changes (to avoid + conflicts that can occur when doing bidirectional merging between + branches), and which revisions initialize merge tracking against other + branches. Return a tuple of four RevisionSet's: + (real_revs, phantom_revs, reflected_revs, initialized_revs). + + NOTE: To maximize speed, if "end" is not provided, the function is + not able to find phantom revisions following the last real + revision in the URL. + """ + + begin = str(begin) + if end is None: + end = "HEAD" + else: + end = str(end) + if long(begin) > long(end): + return RevisionSet(""), RevisionSet(""), \ + RevisionSet(""), RevisionSet("") + + logs[url] = RevisionLog(url, begin, end, find_reflected) + revs = RevisionSet(logs[url].revs) + + if end == "HEAD": + # If end is not provided, we do not know which is the latest revision + # in the repository. So return the phantom revision set only up to + # the latest known revision. + end = str(list(revs)[-1]) + + phantom_revs = RevisionSet("%s-%s" % (begin, end)) - revs + + if find_reflected: + reflected_revs = logs[url].merge_metadata().changed_revs(target_pathid) + reflected_revs += logs[url].block_metadata().changed_revs(target_pathid) + else: + reflected_revs = [] + + initialized_revs = RevisionSet(logs[url].merge_metadata().initialized_revs()) + reflected_revs = RevisionSet(reflected_revs) + + return revs, phantom_revs, reflected_revs, initialized_revs + +def analyze_source_revs(branch_target, source_url, **kwargs): + """For the given branch and source, extract the real and phantom + source revisions.""" + branch_url = target_to_url(branch_target) + branch_pathid = PathIdentifier.from_target(branch_target) + + # Extract the latest repository revision from the URL of the branch + # directory (which is already cached at this point). + end_rev = get_latest_rev(source_url) + + # Calculate the base of analysis. If there is a "1-XX" interval in the + # merged_revs, we do not need to check those. + base = 1 + r = opts["merged-revs"].normalized() + if r and r[0][0] == 1: + base = r[0][1] + 1 + + # See if the user filtered the revision set. If so, we are not + # interested in something outside that range. + if opts["revision"]: + revs = RevisionSet(opts["revision"]).sorted() + if base < revs[0]: + base = revs[0] + if end_rev > revs[-1]: + end_rev = revs[-1] + + return analyze_revs(branch_pathid, source_url, base, end_rev, **kwargs) + +def minimal_merge_intervals(revs, phantom_revs): + """Produce the smallest number of intervals suitable for merging. revs + is the RevisionSet which we want to merge, and phantom_revs are phantom + revisions which can be used to concatenate intervals, thus minimizing the + number of operations.""" + revnums = revs.normalized() + ret = [] + + cur = revnums.pop() + while revnums: + next = revnums.pop() + assert next[1] < cur[0] # otherwise it is not ordered + assert cur[0] - next[1] > 1 # otherwise it is not normalized + for i in range(next[1]+1, cur[0]): + if i not in phantom_revs: + ret.append(cur) + cur = next + break + else: + cur = (next[0], cur[1]) + + ret.append(cur) + ret.reverse() + return ret + +def display_revisions(revs, display_style, revisions_msg, source_url): + """Show REVS as dictated by DISPLAY_STYLE, either numerically, in + log format, or as diffs. When displaying revisions numerically, + prefix output with REVISIONS_MSG when in verbose mode. Otherwise, + request logs or diffs using SOURCE_URL.""" + if display_style == "revisions": + if revs: + report(revisions_msg) + print revs + elif display_style == "logs": + for start,end in revs.normalized(): + svn_command('log --incremental -v -r %d:%d %s' % \ + (start, end, source_url)) + elif display_style in ("diffs", "summarize"): + if display_style == 'summarize': + summarize = '--summarize ' + else: + summarize = '' + + for start, end in revs.normalized(): + print + if start == end: + print "%s: changes in revision %d follow" % (NAME, start) + else: + print "%s: changes in revisions %d-%d follow" % (NAME, + start, end) + print + + # Note: the starting revision number to 'svn diff' is + # NOT inclusive so we have to subtract one from ${START}. + svn_command("diff -r %d:%d %s %s" % (start - 1, end, summarize, + source_url)) + else: + assert False, "unhandled display style: %s" % display_style + +def action_init(target_dir, target_props): + """Initialize for merges.""" + # Check that directory is ready for being modified + check_dir_clean(target_dir) + + target_pathid = PathIdentifier.from_target(target_dir) + source_pathid = opts['source-pathid'] + if source_pathid == target_pathid: + error("cannot init integration source path '%s'\nIts path identifier does not " + "differ from the path identifier of the current directory, '%s'." + % (source_pathid, target_pathid)) + + source_url = opts['source-url'] + + # If the user hasn't specified the revisions to use, see if the + # "source" is a copy from the current tree and if so, we can use + # the version data obtained from it. + revision_range = opts["revision"] + if not revision_range: + # If source was originally copied from target, and we are merging + # changes from source to target (the copy target is the merge source, + # and the copy source is the merge target), then we want to mark as + # integrated up to the rev in which the copy was committed which + # created the merge source: + cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(source_url) + + cf_pathid = None + if cf_source: + cf_url = get_repo_root(source_url) + cf_source + if is_url(cf_url) and check_url(cf_url): + cf_pathid = PathIdentifier.from_target(cf_url) + + if target_pathid == cf_pathid: + report('the source "%s" was copied from "%s" in rev %s and committed in rev %s' % + (source_url, target_dir, cf_rev, copy_committed_in_rev)) + revision_range = "1-" + str(copy_committed_in_rev) + + if not revision_range: + # If the reverse is true: copy source is the merge source, and + # the copy target is the merge target, then we want to mark as + # integrated up to the specific rev of the merge target from + # which the merge source was copied. (Longer discussion at: + # http://subversion.tigris.org/issues/show_bug.cgi?id=2810 ) + cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(target_dir) + + cf_pathid = None + if cf_source: + cf_url = get_repo_root(target_dir) + cf_source + if is_url(cf_url) and check_url(cf_url): + cf_pathid = PathIdentifier.from_target(cf_url) + + source_pathid = PathIdentifier.from_target(source_url) + if source_pathid == cf_pathid: + report('the target "%s" was copied the source "%s" in rev %s and committed in rev %s' % + (target_dir, source_url, cf_rev, copy_committed_in_rev)) + revision_range = "1-" + cf_rev + + # When neither the merge source nor target is a copy of the other, and + # the user did not specify a revision range, then choose a default which is + # the current revision; saying, in effect, "everything has been merged, so + # mark as integrated up to the latest rev on source url). + if not revision_range: + revision_range = "1-" + get_latest_rev(source_url) + + revs = RevisionSet(revision_range) + + report('marking "%s" as already containing revisions "%s" of "%s"' % + (target_dir, revs, source_url)) + + revs = str(revs) + # If the local svnmerge-integrated property already has an entry + # for the source-pathid, simply error out. + if not opts["force"] and target_props.has_key(source_pathid): + error('Repository-relative path %s has already been initialized at %s\n' + 'Use --force to re-initialize' % (source_pathid, target_dir)) + # set the pathid's external_form based on the user's options + source_pathid.external_form = source_pathid.format(opts['location-type']) + + revs = str(revs) + target_props[source_pathid] = revs + + # Set property + set_merge_props(target_dir, target_props) + + # Write out commit message if desired + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + print >>f, 'Initialized merge tracking via "%s" with revisions "%s" from ' \ + % (NAME, revs) + print >>f, '%s' % source_url + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + +def action_avail(branch_dir, branch_props): + """Show commits available for merges.""" + source_revs, phantom_revs, reflected_revs, initialized_revs = \ + analyze_source_revs(branch_dir, opts["source-url"], + find_reflected= + should_find_reflected(branch_dir)) + report('skipping phantom revisions: %s' % phantom_revs) + if reflected_revs: + report('skipping reflected revisions: %s' % reflected_revs) + report('skipping initialized revisions: %s' % initialized_revs) + + blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"]) + avail_revs = source_revs - opts["merged-revs"] - blocked_revs - \ + reflected_revs - initialized_revs + + # Compose the set of revisions to show + revs = RevisionSet("") + report_msg = "revisions available to be merged are:" + if "avail" in opts["avail-showwhat"]: + revs |= avail_revs + if "blocked" in opts["avail-showwhat"]: + revs |= blocked_revs + report_msg = "revisions blocked are:" + + # Limit to revisions specified by -r (if any) + if opts["revision"]: + revs = revs & RevisionSet(opts["revision"]) + + display_revisions(revs, opts["avail-display"], + report_msg, + opts["source-url"]) + +def action_integrated(branch_dir, branch_props): + """Show change sets already merged. This set of revisions is + calculated from taking svnmerge-integrated property from the + branch, and subtracting any revision older than the branch + creation revision.""" + # Extract the integration info for the branch_dir + branch_props = get_merge_props(branch_dir) + revs = merge_props_to_revision_set(branch_props, opts["source-pathid"]) + + # Lookup the oldest revision on the branch path. + oldest_src_rev = get_created_rev(opts["source-url"]) + + # Subtract any revisions which pre-date the branch. + report("subtracting revisions which pre-date the source URL (%d)" % + oldest_src_rev) + revs = revs - RevisionSet(range(1, oldest_src_rev)) + + # Limit to revisions specified by -r (if any) + if opts["revision"]: + revs = revs & RevisionSet(opts["revision"]) + + display_revisions(revs, opts["integrated-display"], + "revisions already integrated are:", opts["source-url"]) + +def action_merge(branch_dir, branch_props): + """Record merge meta data, and do the actual merge (if not + requested otherwise via --record-only).""" + # Check branch directory is ready for being modified + check_dir_clean(branch_dir) + + source_revs, phantom_revs, reflected_revs, initialized_revs = \ + analyze_source_revs(branch_dir, opts["source-url"], + find_reflected= + should_find_reflected(branch_dir)) + + if opts["revision"]: + revs = RevisionSet(opts["revision"]) + else: + revs = source_revs + + blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"]) + merged_revs = opts["merged-revs"] + + # Show what we're doing + if opts["verbose"]: # just to avoid useless calculations + if merged_revs & revs: + report('"%s" already contains revisions %s' % (branch_dir, + merged_revs & revs)) + if phantom_revs: + report('memorizing phantom revision(s): %s' % phantom_revs) + if reflected_revs: + report('memorizing reflected revision(s): %s' % reflected_revs) + if blocked_revs & revs: + report('skipping blocked revisions(s): %s' % (blocked_revs & revs)) + if initialized_revs: + report('skipping initialized revision(s): %s' % initialized_revs) + + # Compute final merge set. + revs = revs - merged_revs - blocked_revs - reflected_revs - \ + phantom_revs - initialized_revs + if not revs: + report('no revisions to merge, exiting') + return + + # When manually marking revisions as merged, we only update the + # integration meta data, and don't perform an actual merge. + record_only = opts["record-only"] + + if record_only: + report('recording merge of revision(s) %s from "%s"' % + (revs, opts["source-url"])) + else: + report('merging in revision(s) %s from "%s"' % + (revs, opts["source-url"])) + + # Do the merge(s). Note: the starting revision number to 'svn merge' + # is NOT inclusive so we have to subtract one from start. + # We try to keep the number of merge operations as low as possible, + # because it is faster and reduces the number of conflicts. + old_block_props = get_block_props(branch_dir) + merge_metadata = logs[opts["source-url"]].merge_metadata() + block_metadata = logs[opts["source-url"]].block_metadata() + for start,end in minimal_merge_intervals(revs, phantom_revs): + if not record_only: + # Preset merge/blocked properties to the source value at + # the start rev to avoid spurious property conflicts + set_merge_props(branch_dir, merge_metadata.get(start - 1)) + set_block_props(branch_dir, block_metadata.get(start - 1)) + # Do the merge + svn_command("merge --force -r %d:%d %s %s" % \ + (start - 1, end, opts["source-url"], branch_dir)) + # TODO: to support graph merging, add logic to merge the property + # meta-data manually + + # Update the set of merged revisions. + merged_revs = merged_revs | revs | reflected_revs | phantom_revs | initialized_revs + branch_props[opts["source-pathid"]] = str(merged_revs) + set_merge_props(branch_dir, branch_props) + # Reset the blocked revs + set_block_props(branch_dir, old_block_props) + + # Write out commit message if desired + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + if record_only: + print >>f, 'Recorded merge of revisions %s via %s from ' % \ + (revs, NAME) + else: + print >>f, 'Merged revisions %s via %s from ' % \ + (revs, NAME) + print >>f, '%s' % opts["source-url"] + if opts["commit-verbose"]: + print >>f + print >>f, construct_merged_log_message(opts["source-url"], revs), + + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + +def action_block(branch_dir, branch_props): + """Block revisions.""" + # Check branch directory is ready for being modified + check_dir_clean(branch_dir) + + source_revs, phantom_revs, reflected_revs, initialized_revs = \ + analyze_source_revs(branch_dir, opts["source-url"]) + revs_to_block = source_revs - opts["merged-revs"] + + # Limit to revisions specified by -r (if any) + if opts["revision"]: + revs_to_block = RevisionSet(opts["revision"]) & revs_to_block + + if not revs_to_block: + error('no available revisions to block') + + # Change blocked information + blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"]) + blocked_revs = blocked_revs | revs_to_block + set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs) + + # Write out commit message if desired + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + print >>f, 'Blocked revisions %s via %s' % (revs_to_block, NAME) + if opts["commit-verbose"]: + print >>f + print >>f, construct_merged_log_message(opts["source-url"], + revs_to_block), + + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + +def action_unblock(branch_dir, branch_props): + """Unblock revisions.""" + # Check branch directory is ready for being modified + check_dir_clean(branch_dir) + + blocked_revs = get_blocked_revs(branch_dir, opts["source-pathid"]) + revs_to_unblock = blocked_revs + + # Limit to revisions specified by -r (if any) + if opts["revision"]: + revs_to_unblock = revs_to_unblock & RevisionSet(opts["revision"]) + + if not revs_to_unblock: + error('no available revisions to unblock') + + # Change blocked information + blocked_revs = blocked_revs - revs_to_unblock + set_blocked_revs(branch_dir, opts["source-pathid"], blocked_revs) + + # Write out commit message if desired + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + print >>f, 'Unblocked revisions %s via %s' % (revs_to_unblock, NAME) + if opts["commit-verbose"]: + print >>f + print >>f, construct_merged_log_message(opts["source-url"], + revs_to_unblock), + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + +def action_rollback(branch_dir, branch_props): + """Rollback previously integrated revisions.""" + + # Make sure the revision arguments are present + if not opts["revision"]: + error("The '-r' option is mandatory for rollback") + + # Check branch directory is ready for being modified + check_dir_clean(branch_dir) + + # Extract the integration info for the branch_dir + branch_props = get_merge_props(branch_dir) + # Get the list of all revisions already merged into this source-pathid. + merged_revs = merge_props_to_revision_set(branch_props, + opts["source-pathid"]) + + # At which revision was the src created? + oldest_src_rev = get_created_rev(opts["source-url"]) + src_pre_exist_range = RevisionSet("1-%d" % oldest_src_rev) + + # Limit to revisions specified by -r (if any) + revs = merged_revs & RevisionSet(opts["revision"]) + + # make sure there's some revision to rollback + if not revs: + report("Nothing to rollback in revision range r%s" % opts["revision"]) + return + + # If even one specified revision lies outside the lifetime of the + # merge source, error out. + if revs & src_pre_exist_range: + err_str = "Specified revision range falls out of the rollback range.\n" + err_str += "%s was created at r%d" % (opts["source-pathid"], + oldest_src_rev) + error(err_str) + + record_only = opts["record-only"] + + if record_only: + report('recording rollback of revision(s) %s from "%s"' % + (revs, opts["source-url"])) + else: + report('rollback of revision(s) %s from "%s"' % + (revs, opts["source-url"])) + + # Do the reverse merge(s). Note: the starting revision number + # to 'svn merge' is NOT inclusive so we have to subtract one from start. + # We try to keep the number of merge operations as low as possible, + # because it is faster and reduces the number of conflicts. + rollback_intervals = minimal_merge_intervals(revs, []) + # rollback in the reverse order of merge + rollback_intervals.reverse() + for start, end in rollback_intervals: + if not record_only: + # Do the merge + svn_command("merge --force -r %d:%d %s %s" % \ + (end, start - 1, opts["source-url"], branch_dir)) + + # Write out commit message if desired + # calculate the phantom revs first + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + if record_only: + print >>f, 'Recorded rollback of revisions %s via %s from ' % \ + (revs , NAME) + else: + print >>f, 'Rolled back revisions %s via %s from ' % \ + (revs , NAME) + print >>f, '%s' % opts["source-url"] + + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + + # Update the set of merged revisions. + merged_revs = merged_revs - revs + branch_props[opts["source-pathid"]] = str(merged_revs) + set_merge_props(branch_dir, branch_props) + +def action_uninit(branch_dir, branch_props): + """Uninit SOURCE URL.""" + # Check branch directory is ready for being modified + check_dir_clean(branch_dir) + + # If the source-pathid does not have an entry in the svnmerge-integrated + # property, simply error out. + if not branch_props.has_key(opts["source-pathid"]): + error('Repository-relative path "%s" does not contain merge ' + 'tracking information for "%s"' \ + % (opts["source-pathid"], branch_dir)) + + del branch_props[opts["source-pathid"]] + + # Set merge property with the selected source deleted + set_merge_props(branch_dir, branch_props) + + # Set blocked revisions for the selected source to None + set_blocked_revs(branch_dir, opts["source-pathid"], None) + + # Write out commit message if desired + if opts["commit-file"]: + f = open(opts["commit-file"], "w") + print >>f, 'Removed merge tracking for "%s" for ' % NAME + print >>f, '%s' % opts["source-url"] + f.close() + report('wrote commit message to "%s"' % opts["commit-file"]) + +############################################################################### +# Command line parsing -- options and commands management +############################################################################### + +class OptBase: + def __init__(self, *args, **kwargs): + self.help = kwargs["help"] + del kwargs["help"] + self.lflags = [] + self.sflags = [] + for a in args: + if a.startswith("--"): self.lflags.append(a) + elif a.startswith("-"): self.sflags.append(a) + else: + raise TypeError, "invalid flag name: %s" % a + if kwargs.has_key("dest"): + self.dest = kwargs["dest"] + del kwargs["dest"] + else: + if not self.lflags: + raise TypeError, "cannot deduce dest name without long options" + self.dest = self.lflags[0][2:] + if kwargs: + raise TypeError, "invalid keyword arguments: %r" % kwargs.keys() + def repr_flags(self): + f = self.sflags + self.lflags + r = f[0] + for fl in f[1:]: + r += " [%s]" % fl + return r + +class Option(OptBase): + def __init__(self, *args, **kwargs): + self.default = kwargs.setdefault("default", 0) + del kwargs["default"] + self.value = kwargs.setdefault("value", None) + del kwargs["value"] + OptBase.__init__(self, *args, **kwargs) + def apply(self, state, value): + assert value == "" + if self.value is not None: + state[self.dest] = self.value + else: + state[self.dest] += 1 + +class OptionArg(OptBase): + def __init__(self, *args, **kwargs): + self.default = kwargs["default"] + del kwargs["default"] + self.metavar = kwargs.setdefault("metavar", None) + del kwargs["metavar"] + OptBase.__init__(self, *args, **kwargs) + + if self.metavar is None: + if self.dest is not None: + self.metavar = self.dest.upper() + else: + self.metavar = "arg" + if self.default: + self.help += " (default: %s)" % self.default + def apply(self, state, value): + assert value is not None + state[self.dest] = value + def repr_flags(self): + r = OptBase.repr_flags(self) + return r + " " + self.metavar + +class CommandOpts: + class Cmd: + def __init__(self, *args): + self.name, self.func, self.usage, self.help, self.opts = args + def short_help(self): + return self.help.split(".")[0] + def __str__(self): + return self.name + def __call__(self, *args, **kwargs): + return self.func(*args, **kwargs) + + def __init__(self, global_opts, common_opts, command_table, version=None): + self.progname = NAME + self.version = version.replace("%prog", self.progname) + self.cwidth = console_width() - 2 + self.ctable = command_table.copy() + self.gopts = global_opts[:] + self.copts = common_opts[:] + self._add_builtins() + for k in self.ctable.keys(): + cmd = self.Cmd(k, *self.ctable[k]) + opts = [] + for o in cmd.opts: + if isinstance(o, types.StringType) or \ + isinstance(o, types.UnicodeType): + o = self._find_common(o) + opts.append(o) + cmd.opts = opts + self.ctable[k] = cmd + + def _add_builtins(self): + self.gopts.append( + Option("-h", "--help", help="show help for this command and exit")) + if self.version is not None: + self.gopts.append( + Option("-V", "--version", help="show version info and exit")) + self.ctable["help"] = (self._cmd_help, + "help [COMMAND]", + "Display help for a specific command. If COMMAND is omitted, " + "display brief command description.", + []) + + def _cmd_help(self, cmd=None, *args): + if args: + self.error("wrong number of arguments", "help") + if cmd is not None: + cmd = self._command(cmd) + self.print_command_help(cmd) + else: + self.print_command_list() + + def _paragraph(self, text, width=78): + chunks = re.split("\s+", text.strip()) + chunks.reverse() + lines = [] + while chunks: + L = chunks.pop() + while chunks and len(L) + len(chunks[-1]) + 1 <= width: + L += " " + chunks.pop() + lines.append(L) + return lines + + def _paragraphs(self, text, *args, **kwargs): + pars = text.split("\n\n") + lines = self._paragraph(pars[0], *args, **kwargs) + for p in pars[1:]: + lines.append("") + lines.extend(self._paragraph(p, *args, **kwargs)) + return lines + + def _print_wrapped(self, text, indent=0): + text = self._paragraphs(text, self.cwidth - indent) + print text.pop(0) + for t in text: + print " " * indent + t + + def _find_common(self, fl): + for o in self.copts: + if fl in o.lflags+o.sflags: + return o + assert False, fl + + def _compute_flags(self, opts, check_conflicts=True): + back = {} + sfl = "" + lfl = [] + for o in opts: + sapp = lapp = "" + if isinstance(o, OptionArg): + sapp, lapp = ":", "=" + for s in o.sflags: + if check_conflicts and back.has_key(s): + raise RuntimeError, "option conflict: %s" % s + back[s] = o + sfl += s[1:] + sapp + for l in o.lflags: + if check_conflicts and back.has_key(l): + raise RuntimeError, "option conflict: %s" % l + back[l] = o + lfl.append(l[2:] + lapp) + return sfl, lfl, back + + def _extract_command(self, args): + """ + Try to extract the command name from the argument list. This is + non-trivial because we want to allow command-specific options even + before the command itself. + """ + opts = self.gopts[:] + for cmd in self.ctable.values(): + opts.extend(cmd.opts) + sfl, lfl, _ = self._compute_flags(opts, check_conflicts=False) + + lopts,largs = getopt.getopt(args, sfl, lfl) + if not largs: + return None + return self._command(largs[0]) + + def _fancy_getopt(self, args, opts, state=None): + if state is None: + state= {} + for o in opts: + if not state.has_key(o.dest): + state[o.dest] = o.default + + sfl, lfl, back = self._compute_flags(opts) + try: + lopts,args = getopt.gnu_getopt(args, sfl, lfl) + except AttributeError: + # Before Python 2.3, there was no gnu_getopt support. + # So we can't parse intermixed positional arguments + # and options. + lopts,args = getopt.getopt(args, sfl, lfl) + + for o,v in lopts: + back[o].apply(state, v) + return state, args + + def _command(self, cmd): + if not self.ctable.has_key(cmd): + self.error("unknown command: '%s'" % cmd) + return self.ctable[cmd] + + def parse(self, args): + if not args: + self.print_small_help() + sys.exit(0) + + cmd = None + try: + cmd = self._extract_command(args) + opts = self.gopts[:] + if cmd: + opts.extend(cmd.opts) + args.remove(cmd.name) + state, args = self._fancy_getopt(args, opts) + except getopt.GetoptError, e: + self.error(e, cmd) + + # Handle builtins + if self.version is not None and state["version"]: + self.print_version() + sys.exit(0) + if state["help"]: # special case for --help + if cmd: + self.print_command_help(cmd) + sys.exit(0) + cmd = self.ctable["help"] + else: + if cmd is None: + self.error("command argument required") + if str(cmd) == "help": + cmd(*args) + sys.exit(0) + return cmd, args, state + + def error(self, s, cmd=None): + print >>sys.stderr, "%s: %s" % (self.progname, s) + if cmd is not None: + self.print_command_help(cmd) + else: + self.print_small_help() + sys.exit(1) + def print_small_help(self): + print "Type '%s help' for usage" % self.progname + def print_usage_line(self): + print "usage: %s <subcommand> [options...] [args...]\n" % self.progname + def print_command_list(self): + print "Available commands (use '%s help COMMAND' for more details):\n" \ + % self.progname + cmds = self.ctable.keys() + cmds.sort() + indent = max(map(len, cmds)) + for c in cmds: + h = self.ctable[c].short_help() + print " %-*s " % (indent, c), + self._print_wrapped(h, indent+6) + def print_command_help(self, cmd): + cmd = self.ctable[str(cmd)] + print 'usage: %s %s\n' % (self.progname, cmd.usage) + self._print_wrapped(cmd.help) + def print_opts(opts, self=self): + if not opts: return + flags = [o.repr_flags() for o in opts] + indent = max(map(len, flags)) + for f,o in zip(flags, opts): + print " %-*s :" % (indent, f), + self._print_wrapped(o.help, indent+5) + print '\nCommand options:' + print_opts(cmd.opts) + print '\nGlobal options:' + print_opts(self.gopts) + + def print_version(self): + print self.version + +############################################################################### +# Options and Commands description +############################################################################### + +global_opts = [ + Option("-F", "--force", + help="force operation even if the working copy is not clean, or " + "there are pending updates"), + Option("-n", "--dry-run", + help="don't actually change anything, just pretend; " + "implies --show-changes"), + Option("-s", "--show-changes", + help="show subversion commands that make changes"), + Option("-v", "--verbose", + help="verbose mode: output more information about progress"), + OptionArg("-u", "--username", + default=None, + help="invoke subversion commands with the supplied username"), + OptionArg("-p", "--password", + default=None, + help="invoke subversion commands with the supplied password"), + OptionArg("-c", "--config-dir", metavar="DIR", + default=None, + help="cause subversion commands to consult runtime config directory DIR"), +] + +common_opts = [ + Option("-b", "--bidirectional", + value=True, + default=False, + help="remove reflected and initialized revisions from merge candidates. " + "Not required but may be specified to speed things up slightly"), + OptionArg("-f", "--commit-file", metavar="FILE", + default="svnmerge-commit-message.txt", + help="set the name of the file where the suggested log message " + "is written to"), + Option("-M", "--record-only", + value=True, + default=False, + help="do not perform an actual merge of the changes, yet record " + "that a merge happened"), + OptionArg("-r", "--revision", + metavar="REVLIST", + default="", + help="specify a revision list, consisting of revision numbers " + 'and ranges separated by commas, e.g., "534,537-539,540"'), + OptionArg("-S", "--source", "--head", + default=None, + help="specify a merge source for this branch. It can be either " + "a working directory path, a full URL, or an unambiguous " + "substring of one of the locations for which merge tracking was " + "already initialized. Needed only to disambiguate in case of " + "multiple merge sources"), +] + +command_table = { + "init": (action_init, + "init [OPTION...] [SOURCE]", + """Initialize merge tracking from SOURCE on the current working + directory. + + If SOURCE is specified, all the revisions in SOURCE are marked as already + merged; if this is not correct, you can use --revision to specify the + exact list of already-merged revisions. + + If SOURCE is omitted, then it is computed from the "svn cp" history of the + current working directory (searching back for the branch point); in this + case, %s assumes that no revision has been integrated yet since + the branch point (unless you teach it with --revision).""" % NAME, + [ + "-f", "-r", # import common opts + OptionArg("-L", "--location-type", + dest="location-type", + default="path", + help="Use this type of location identifier in the new " + + "Subversion properties; 'uuid', 'url', or 'path' " + + "(default)"), + ]), + + "avail": (action_avail, + "avail [OPTION...] [PATH]", + """Show unmerged revisions available for PATH as a revision list. + If --revision is given, the revisions shown will be limited to those + also specified in the option. + + When svnmerge is used to bidirectionally merge changes between a + branch and its source, it is necessary to not merge the same changes + forth and back: e.g., if you committed a merge of a certain + revision of the branch into the source, you do not want that commit + to appear as available to merged into the branch (as the code + originated in the branch itself!). svnmerge will automatically + exclude these so-called "reflected" revisions.""", + [ + Option("-A", "--all", + dest="avail-showwhat", + value=["blocked", "avail"], + default=["avail"], + help="show both available and blocked revisions (aka ignore " + "blocked revisions)"), + "-b", + Option("-B", "--blocked", + dest="avail-showwhat", + value=["blocked"], + help="show the blocked revision list (see '%s block')" % NAME), + Option("-d", "--diff", + dest="avail-display", + value="diffs", + default="revisions", + help="show corresponding diff instead of revision list"), + Option("--summarize", + dest="avail-display", + value="summarize", + help="show summarized diff instead of revision list"), + Option("-l", "--log", + dest="avail-display", + value="logs", + help="show corresponding log history instead of revision list"), + "-r", + "-S", + ]), + + "integrated": (action_integrated, + "integrated [OPTION...] [PATH]", + """Show merged revisions available for PATH as a revision list. + If --revision is given, the revisions shown will be limited to + those also specified in the option.""", + [ + Option("-d", "--diff", + dest="integrated-display", + value="diffs", + default="revisions", + help="show corresponding diff instead of revision list"), + Option("-l", "--log", + dest="integrated-display", + value="logs", + help="show corresponding log history instead of revision list"), + "-r", + "-S", + ]), + + "rollback": (action_rollback, + "rollback [OPTION...] [PATH]", + """Rollback previously merged in revisions from PATH. The + --revision option is mandatory, and specifies which revisions + will be rolled back. Only the previously integrated merges + will be rolled back. + + When manually rolling back changes, --record-only can be used to + instruct %s that a manual rollback of a certain revision + already happened, so that it can record it and offer that + revision for merge henceforth.""" % (NAME), + [ + "-f", "-r", "-S", "-M", # import common opts + ]), + + "merge": (action_merge, + "merge [OPTION...] [PATH]", + """Merge in revisions into PATH from its source. If --revision is omitted, + all the available revisions will be merged. In any case, already merged-in + revisions will NOT be merged again. + + When svnmerge is used to bidirectionally merge changes between a + branch and its source, it is necessary to not merge the same changes + forth and back: e.g., if you committed a merge of a certain + revision of the branch into the source, you do not want that commit + to appear as available to merged into the branch (as the code + originated in the branch itself!). svnmerge will automatically + exclude these so-called "reflected" revisions. + + When manually merging changes across branches, --record-only can + be used to instruct %s that a manual merge of a certain revision + already happened, so that it can record it and not offer that + revision for merge anymore. Conversely, when there are revisions + which should not be merged, use '%s block'.""" % (NAME, NAME), + [ + "-b", "-f", "-r", "-S", "-M", # import common opts + ]), + + "block": (action_block, + "block [OPTION...] [PATH]", + """Block revisions within PATH so that they disappear from the available + list. This is useful to hide revisions which will not be integrated. + If --revision is omitted, it defaults to all the available revisions. + + Do not use this option to hide revisions that were manually merged + into the branch. Instead, use '%s merge --record-only', which + records that a merge happened (as opposed to a merge which should + not happen).""" % NAME, + [ + "-f", "-r", "-S", # import common opts + ]), + + "unblock": (action_unblock, + "unblock [OPTION...] [PATH]", + """Revert the effect of '%s block'. If --revision is omitted, all the + blocked revisions are unblocked""" % NAME, + [ + "-f", "-r", "-S", # import common opts + ]), + + "uninit": (action_uninit, + "uninit [OPTION...] [PATH]", + """Remove merge tracking information from PATH. It cleans any kind of merge + tracking information (including the list of blocked revisions). If there + are multiple sources, use --source to indicate which source you want to + forget about.""", + [ + "-f", "-S", # import common opts + ]), +} + + +def main(args): + global opts + + # Initialize default options + opts = default_opts.copy() + logs.clear() + + optsparser = CommandOpts(global_opts, common_opts, command_table, + version="%%prog r%s\n modified: %s\n\n" + "Copyright (C) 2004,2005 Awarix Inc.\n" + "Copyright (C) 2005, Giovanni Bajo" + % (__revision__, __date__)) + + cmd, args, state = optsparser.parse(args) + opts.update(state) + + source = opts.get("source", None) + branch_dir = "." + + if str(cmd) == "init": + if len(args) == 1: + source = args[0] + elif len(args) > 1: + optsparser.error("wrong number of parameters", cmd) + elif str(cmd) in command_table.keys(): + if len(args) == 1: + branch_dir = args[0] + elif len(args) > 1: + optsparser.error("wrong number of parameters", cmd) + else: + assert False, "command not handled: %s" % cmd + + # Validate branch_dir + if not is_wc(branch_dir): + if str(cmd) == "avail": + info = None + # it should be noted here that svn info does not error exit + # if an invalid target is specified to it (as is + # intuitive). so the try, except code is not absolutely + # necessary. but, I retain it to indicate the intuitive + # handling. + try: + info = get_svninfo(branch_dir) + except LaunchError: + pass + # test that we definitely targeted a subversion directory, + # mirroring the purpose of the earlier is_wc() call + if info is None or not info.has_key("Node Kind") or info["Node Kind"] != "directory": + error('"%s" is neither a valid URL, nor a working directory' % branch_dir) + else: + error('"%s" is not a subversion working directory' % branch_dir) + + # give out some hints as to potential pathids + PathIdentifier.hint(branch_dir) + if source: PathIdentifier.hint(source) + + # Extract the integration info for the branch_dir + branch_props = get_merge_props(branch_dir) + + # Calculate source_url and source_path + report("calculate source path for the branch") + if not source: + if str(cmd) == "init": + cf_source, cf_rev, copy_committed_in_rev = get_copyfrom(branch_dir) + if not cf_source: + error('no copyfrom info available. ' + 'Explicit source argument (-S/--source) required.') + opts["source-url"] = get_repo_root(branch_dir) + cf_source + opts["source-pathid"] = PathIdentifier.from_target(opts["source-url"]) + + if not opts["revision"]: + opts["revision"] = "1-" + cf_rev + else: + opts["source-pathid"] = get_default_source(branch_dir, branch_props) + opts["source-url"] = opts["source-pathid"].get_url() + + assert is_pathid(opts["source-pathid"]) + assert is_url(opts["source-url"]) + else: + # The source was given as a command line argument and is stored in + # SOURCE. Ensure that the specified source does not end in a /, + # otherwise it's easy to have the same source path listed more + # than once in the integrated version properties, with and without + # trailing /'s. + source = rstrip(source, "/") + if not is_wc(source) and not is_url(source): + # Check if it is a substring of a pathid recorded + # within the branch properties. + found = [] + for pathid in branch_props.keys(): + if pathid.match_substring(source): + found.append(pathid) + if len(found) == 1: + # (assumes pathid is a repository-relative-path) + source_pathid = found[0] + source = source_pathid.get_url() + else: + error('"%s" is neither a valid URL, nor an unambiguous ' + 'substring of a repository path, nor a working directory' + % source) + else: + source_pathid = PathIdentifier.from_target(source) + + source_pathid = PathIdentifier.from_target(source) + if str(cmd) == "init" and \ + source_pathid == PathIdentifier.from_target("."): + error("cannot init integration source path '%s'\n" + "Its repository-relative path must differ from the " + "repository-relative path of the current directory." + % source_pathid) + opts["source-pathid"] = source_pathid + opts["source-url"] = target_to_url(source) + + # Sanity check source_url + assert is_url(opts["source-url"]) + # SVN does not support non-normalized URL (and we should not + # have created them) + assert opts["source-url"].find("/..") < 0 + + report('source is "%s"' % opts["source-url"]) + + # Get previously merged revisions (except when command is init) + if str(cmd) != "init": + opts["merged-revs"] = merge_props_to_revision_set(branch_props, + opts["source-pathid"]) + + # Perform the action + cmd(branch_dir, branch_props) + + +if __name__ == "__main__": + try: + main(sys.argv[1:]) + except LaunchError, (ret, cmd, out): + err_msg = "command execution failed (exit code: %d)\n" % ret + err_msg += cmd + "\n" + err_msg += "".join(out) + error(err_msg) + except KeyboardInterrupt: + # Avoid traceback on CTRL+C + print "aborted by user" + sys.exit(1) diff --git a/testing/subversion/svnserve b/testing/subversion/svnserve new file mode 100755 index 000000000..670fee742 --- /dev/null +++ b/testing/subversion/svnserve @@ -0,0 +1,42 @@ +#!/bin/bash + +. /etc/rc.conf +. /etc/rc.d/functions +. /etc/conf.d/svnserve + +PID=`pidof -o %PPID /usr/bin/svnserve` +case "$1" in + start) + stat_busy "Starting svnserve" + if [ -z "$PID" ]; then + if [ -n "$SVNSERVE_USER" ]; then + su -s '/bin/sh' $SVNSERVE_USER -c "/usr/bin/svnserve -d $SVNSERVE_ARGS" & + else + /usr/bin/svnserve -d $SVNSERVE_ARGS & + fi + fi + if [ ! -z "$PID" -o $? -gt 0 ]; then + stat_fail + else + add_daemon svnserve + stat_done + fi + ;; + stop) + stat_busy "Stopping svnserve" + [ ! -z "$PID" ] && kill $PID &> /dev/null + if [ $? -gt 0 ]; then + stat_fail + else + rm_daemon svnserve + stat_done + fi + ;; + restart) + $0 stop + sleep 1 + $0 start + ;; + *) + echo "usage: $0 {start|stop|restart}" +esac diff --git a/testing/subversion/svnserve.conf b/testing/subversion/svnserve.conf new file mode 100644 index 000000000..37fb7ea10 --- /dev/null +++ b/testing/subversion/svnserve.conf @@ -0,0 +1,7 @@ +# +# Parameters to be passed to svnserve +# +#SVNSERVE_ARGS="-r /path/to/some/repos" +SVNSERVE_ARGS="" + +#SVNSERVE_USER="svn" |