summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorParabola <dev@list.parabolagnulinux.org>2010-12-29 21:39:18 +0000
committerParabola <dev@list.parabolagnulinux.org>2010-12-29 21:39:18 +0000
commitebcd492d39cd0525b768a48558e50041aa0961e3 (patch)
treeaef06d0d3e3b658801d1e64b260aa1831ceeb271
parentedb96c300a560f35ed8f8d6dd87d3159e1edbbe2 (diff)
Revert "Dammit."
This reverts commit acf8c73de7e478804b5273fbdd2e7225f7976af9.
-rw-r--r--bin/activate76
-rw-r--r--bin/activate.csh32
-rw-r--r--bin/activate.fish79
-rw-r--r--bin/activate_this.py32
-rwxr-xr-xbin/easy_install10
-rwxr-xr-xbin/easy_install-2.710
-rwxr-xr-xbin/pip10
-rwxr-xr-xbin/pip-2.710
-rwxr-xr-xbin/pythonbin0 -> 3120 bytes
-rwxr-xr-xbin/python2bin0 -> 3120 bytes
l---------lib/python2.7/UserDict.py1
l---------lib/python2.7/_abcoll.py1
l---------lib/python2.7/_weakrefset.py1
l---------lib/python2.7/abc.py1
l---------lib/python2.7/codecs.py1
l---------lib/python2.7/config1
l---------lib/python2.7/copy_reg.py1
-rw-r--r--lib/python2.7/distutils/__init__.py91
-rw-r--r--lib/python2.7/distutils/distutils.cfg6
l---------lib/python2.7/encodings1
l---------lib/python2.7/fnmatch.py1
l---------lib/python2.7/genericpath.py1
l---------lib/python2.7/lib-dynload1
l---------lib/python2.7/linecache.py1
l---------lib/python2.7/locale.py1
l---------lib/python2.7/ntpath.py1
-rw-r--r--lib/python2.7/orig-prefix.txt1
l---------lib/python2.7/os.py1
l---------lib/python2.7/posixpath.py1
l---------lib/python2.7/re.py1
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO531
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt84
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt1
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt61
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt4
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe1
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py5
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py2693
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py104
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py208
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exebin0 -> 7168 bytes
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py22
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py540
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py41
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py294
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py268
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py141
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py1865
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py457
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py124
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py123
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py53
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py10
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py25
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py252
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py164
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py180
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py183
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py178
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py246
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py816
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py36
-rw-r--r--lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exebin0 -> 7168 bytes
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py830
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py282
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py370
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py2679
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py48
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py20
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py82
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py243
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py112
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py565
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py66
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py65
-rwxr-xr-xlib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py82
-rw-r--r--lib/python2.7/site-packages/easy-install.pth4
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO348
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt57
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt1
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt4
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe1
-rw-r--r--lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt1
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py261
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py589
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py55
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py203
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py231
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py1
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py33
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py60
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py109
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py32
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py247
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py116
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py42
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py9
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py346
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py470
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py17
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py686
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py45
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py181
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py1432
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py18
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py479
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py238
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py138
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py204
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py162
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py260
-rwxr-xr-xlib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py53
-rw-r--r--lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info9
-rw-r--r--lib/python2.7/site-packages/setuptools.pth1
-rw-r--r--lib/python2.7/site.py713
l---------lib/python2.7/sre.py1
l---------lib/python2.7/sre_compile.py1
l---------lib/python2.7/sre_constants.py1
l---------lib/python2.7/sre_parse.py1
l---------lib/python2.7/stat.py1
l---------lib/python2.7/types.py1
l---------lib/python2.7/warnings.py1
l---------media/admin_media1
-rw-r--r--media/airvm_button.pngbin0 -> 4931 bytes
-rw-r--r--media/archnavbar/archlogo.gifbin0 -> 1845 bytes
-rw-r--r--media/archnavbar/archlogo.pngbin0 -> 4192 bytes
-rw-r--r--media/archnavbar/archnavbar.css33
-rw-r--r--media/archnavbar/parabolabw.pngbin0 -> 6595 bytes
-rw-r--r--media/archweb-print.css11
-rw-r--r--media/archweb.css269
-rw-r--r--media/archweb.js136
-rw-r--r--media/asc.gifbin0 -> 54 bytes
-rw-r--r--media/desc.gifbin0 -> 54 bytes
-rwxr-xr-xmedia/devs/aurelien.pngbin0 -> 13049 bytes
-rwxr-xr-xmedia/devs/gtklocker.pngbin0 -> 27710 bytes
-rwxr-xr-xmedia/devs/me001-crop.pngbin0 -> 17884 bytes
-rw-r--r--media/django-jsi18n.js35
-rw-r--r--media/donate.gifbin0 -> 2951 bytes
-rw-r--r--media/favicon.icobin0 -> 1150 bytes
-rw-r--r--media/jquery-1.4.3.min.js166
-rw-r--r--media/jquery.tablesorter.min.js2
-rw-r--r--media/logos/archlinux-logo-black-1200dpi.pngbin0 -> 283011 bytes
-rw-r--r--media/logos/archlinux-logo-black-90dpi.pngbin0 -> 12971 bytes
-rw-r--r--media/logos/archlinux-logo-black-scalable.svg153
-rw-r--r--media/logos/archlinux-logo-dark-1200dpi.pngbin0 -> 291912 bytes
-rw-r--r--media/logos/archlinux-logo-dark-90dpi.pngbin0 -> 13805 bytes
-rw-r--r--media/logos/archlinux-logo-dark-scalable.svg156
-rw-r--r--media/logos/archlinux-logo-light-1200dpi.pngbin0 -> 284099 bytes
-rw-r--r--media/logos/archlinux-logo-light-90dpi.pngbin0 -> 13084 bytes
-rw-r--r--media/logos/archlinux-logo-light-scalable.svg156
-rw-r--r--media/logos/archlinux-logo-white-1200dpi.pngbin0 -> 263771 bytes
-rw-r--r--media/logos/archlinux-logo-white-90dpi.pngbin0 -> 11870 bytes
-rw-r--r--media/logos/archlinux-logo-white-scalable.svg157
-rw-r--r--media/new.pngbin0 -> 378 bytes
-rw-r--r--media/nosort.gifbin0 -> 64 bytes
-rw-r--r--media/rss.pngbin0 -> 725 bytes
-rw-r--r--media/sevenl_button.pngbin0 -> 6840 bytes
-rw-r--r--media/silhouette.pngbin0 -> 33090 bytes
-rw-r--r--media/vnet_button.pngbin0 -> 4908 bytes
-rw-r--r--repos/community.db130
-rw-r--r--repos/community.db.1130
-rw-r--r--repos/community.db.2bin0 -> 602511 bytes
-rw-r--r--repos/core.dbbin0 -> 51479 bytes
-rw-r--r--repos/core.db.1130
-rw-r--r--repos/core.db.2130
-rw-r--r--repos/extra.db130
-rw-r--r--repos/extra.db.1bin0 -> 658725 bytes
-rw-r--r--repos/extra.db.2130
-rw-r--r--repos/i686/core.db.tar.gzbin0 -> 51161 bytes
-rw-r--r--repos/x86_64/core.db.tar.gzbin0 -> 51509 bytes
174 files changed, 25774 insertions, 0 deletions
diff --git a/bin/activate b/bin/activate
new file mode 100644
index 00000000..796cc838
--- /dev/null
+++ b/bin/activate
@@ -0,0 +1,76 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+ # reset old environment variables
+ if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
+ PATH="$_OLD_VIRTUAL_PATH"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
+ PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+ hash -r
+ fi
+
+ if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
+ PS1="$_OLD_VIRTUAL_PS1"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ if [ ! "$1" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelavent variables
+deactivate nondestructive
+
+VIRTUAL_ENV="/srv/http/web"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "$PYTHONHOME" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
+ unset PYTHONHOME
+fi
+
+if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
+ _OLD_VIRTUAL_PS1="$PS1"
+ if [ "x" != x ] ; then
+ PS1="$PS1"
+ else
+ if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+ else
+ PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+ fi
+ fi
+ export PS1
+fi
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
+ hash -r
+fi
diff --git a/bin/activate.csh b/bin/activate.csh
new file mode 100644
index 00000000..0774525d
--- /dev/null
+++ b/bin/activate.csh
@@ -0,0 +1,32 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelavent variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "/srv/http/web"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/bin:$PATH"
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if ("" != "") then
+ set env_name = ""
+else
+ if (`basename "$VIRTUAL_ENV"` == "__") then
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+ else
+ set env_name = `basename "$VIRTUAL_ENV"`
+ endif
+endif
+set prompt = "[$env_name] $prompt"
+unset env_name
+
+rehash
+
diff --git a/bin/activate.fish b/bin/activate.fish
new file mode 100644
index 00000000..39c644a7
--- /dev/null
+++ b/bin/activate.fish
@@ -0,0 +1,79 @@
+# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
+# you cannot run it directly
+
+function deactivate -d "Exit virtualenv and return to normal shell environment"
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ functions -e fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ end
+
+ set -e VIRTUAL_ENV
+ if test "$argv[1]" != "nondestructive"
+ # Self destruct!
+ functions -e deactivate
+ end
+end
+
+# unset irrelavent variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "/srv/http/web"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/bin" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # fish shell uses a function, instead of env vars,
+ # to produce the prompt. Overriding the existing function is easy.
+ # However, adding to the current prompt, instead of clobbering it,
+ # is a little more work.
+ set -l oldpromptfile (tempfile)
+ if test $status
+ # save the current fish_prompt function...
+ echo "function _old_fish_prompt" >> $oldpromptfile
+ echo -n \# >> $oldpromptfile
+ functions fish_prompt >> $oldpromptfile
+ # we've made the "_old_fish_prompt" file, source it.
+ . $oldpromptfile
+ rm -f $oldpromptfile
+
+ if test -n ""
+ # We've been given us a prompt override.
+ #
+ # FIXME: Unsure how to handle this *safely*. We could just eval()
+ # whatever is given, but the risk is a bit much.
+ echo "activate.fish: Alternative prompt prefix is not supported under fish-shell." 1>&2
+ echo "activate.fish: Alter the fish_prompt in this file as needed." 1>&2
+ end
+
+ # with the original prompt function renamed, we can override with our own.
+ function fish_prompt
+ set -l _checkbase (basename "$VIRTUAL_ENV")
+ if test $_checkbase = "__"
+ # special case for Aspen magic directories
+ # see http://www.zetadev.com/software/aspen/
+ printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
+ else
+ printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
+ end
+ end
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+ end
+end
+
diff --git a/bin/activate_this.py b/bin/activate_this.py
new file mode 100644
index 00000000..aff6927d
--- /dev/null
+++ b/bin/activate_this.py
@@ -0,0 +1,32 @@
+"""By using execfile(this_file, dict(__file__=this_file)) you will
+activate this virtualenv environment.
+
+This can be used when you must use an existing Python interpreter, not
+the virtualenv bin/python
+"""
+
+try:
+ __file__
+except NameError:
+ raise AssertionError(
+ "You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
+import sys
+import os
+
+base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+if sys.platform == 'win32':
+ site_packages = os.path.join(base, 'Lib', 'site-packages')
+else:
+ site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
+prev_sys_path = list(sys.path)
+import site
+site.addsitedir(site_packages)
+sys.real_prefix = sys.prefix
+sys.prefix = base
+# Move the added items to the front of the path:
+new_sys_path = []
+for item in list(sys.path):
+ if item not in prev_sys_path:
+ new_sys_path.append(item)
+ sys.path.remove(item)
+sys.path[:0] = new_sys_path
diff --git a/bin/easy_install b/bin/easy_install
new file mode 100755
index 00000000..eeb31a5c
--- /dev/null
+++ b/bin/easy_install
@@ -0,0 +1,10 @@
+#!/srv/http/web/bin/python2
+# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install'
+__requires__ = 'distribute==0.6.14'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install')()
+ )
diff --git a/bin/easy_install-2.7 b/bin/easy_install-2.7
new file mode 100755
index 00000000..09c3bf26
--- /dev/null
+++ b/bin/easy_install-2.7
@@ -0,0 +1,10 @@
+#!/srv/http/web/bin/python2
+# EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.14','console_scripts','easy_install-2.7'
+__requires__ = 'distribute==0.6.14'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('distribute==0.6.14', 'console_scripts', 'easy_install-2.7')()
+ )
diff --git a/bin/pip b/bin/pip
new file mode 100755
index 00000000..2d851f27
--- /dev/null
+++ b/bin/pip
@@ -0,0 +1,10 @@
+#!/srv/http/web/bin/python2
+# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip'
+__requires__ = 'pip==0.8.1'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('pip==0.8.1', 'console_scripts', 'pip')()
+ )
diff --git a/bin/pip-2.7 b/bin/pip-2.7
new file mode 100755
index 00000000..23e5741e
--- /dev/null
+++ b/bin/pip-2.7
@@ -0,0 +1,10 @@
+#!/srv/http/web/bin/python2
+# EASY-INSTALL-ENTRY-SCRIPT: 'pip==0.8.1','console_scripts','pip-2.7'
+__requires__ = 'pip==0.8.1'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('pip==0.8.1', 'console_scripts', 'pip-2.7')()
+ )
diff --git a/bin/python b/bin/python
new file mode 100755
index 00000000..888b52ee
--- /dev/null
+++ b/bin/python
Binary files differ
diff --git a/bin/python2 b/bin/python2
new file mode 100755
index 00000000..888b52ee
--- /dev/null
+++ b/bin/python2
Binary files differ
diff --git a/lib/python2.7/UserDict.py b/lib/python2.7/UserDict.py
new file mode 120000
index 00000000..1dcde33c
--- /dev/null
+++ b/lib/python2.7/UserDict.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/UserDict.py \ No newline at end of file
diff --git a/lib/python2.7/_abcoll.py b/lib/python2.7/_abcoll.py
new file mode 120000
index 00000000..e39c38d2
--- /dev/null
+++ b/lib/python2.7/_abcoll.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/_abcoll.py \ No newline at end of file
diff --git a/lib/python2.7/_weakrefset.py b/lib/python2.7/_weakrefset.py
new file mode 120000
index 00000000..a3c1cd4f
--- /dev/null
+++ b/lib/python2.7/_weakrefset.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/_weakrefset.py \ No newline at end of file
diff --git a/lib/python2.7/abc.py b/lib/python2.7/abc.py
new file mode 120000
index 00000000..cb3e5d16
--- /dev/null
+++ b/lib/python2.7/abc.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/abc.py \ No newline at end of file
diff --git a/lib/python2.7/codecs.py b/lib/python2.7/codecs.py
new file mode 120000
index 00000000..50169dc7
--- /dev/null
+++ b/lib/python2.7/codecs.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/codecs.py \ No newline at end of file
diff --git a/lib/python2.7/config b/lib/python2.7/config
new file mode 120000
index 00000000..154af7a5
--- /dev/null
+++ b/lib/python2.7/config
@@ -0,0 +1 @@
+/usr/lib/python2.7/config \ No newline at end of file
diff --git a/lib/python2.7/copy_reg.py b/lib/python2.7/copy_reg.py
new file mode 120000
index 00000000..5dc0af34
--- /dev/null
+++ b/lib/python2.7/copy_reg.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/copy_reg.py \ No newline at end of file
diff --git a/lib/python2.7/distutils/__init__.py b/lib/python2.7/distutils/__init__.py
new file mode 100644
index 00000000..7ebb41c0
--- /dev/null
+++ b/lib/python2.7/distutils/__init__.py
@@ -0,0 +1,91 @@
+import os
+import sys
+import warnings
+import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
+ # Important! To work on pypy, this must be a module that resides in the
+ # lib-python/modified-x.y.z directory
+
+dirname = os.path.dirname
+
+distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
+if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
+ warnings.warn(
+ "The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
+else:
+ __path__.insert(0, distutils_path)
+ exec open(os.path.join(distutils_path, '__init__.py')).read()
+
+import dist
+import sysconfig
+
+
+## patch build_ext (distutils doesn't know how to get the libs directory
+## path on windows - it hardcodes the paths around the patched sys.prefix)
+
+if sys.platform == 'win32':
+ from distutils.command.build_ext import build_ext as old_build_ext
+ class build_ext(old_build_ext):
+ def finalize_options (self):
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif isinstance(self.library_dirs, basestring):
+ self.library_dirs = self.library_dirs.split(os.pathsep)
+
+ self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
+ old_build_ext.finalize_options(self)
+
+ from distutils.command import build_ext as build_ext_module
+ build_ext_module.build_ext = build_ext
+
+## distutils.dist patches:
+
+old_find_config_files = dist.Distribution.find_config_files
+def find_config_files(self):
+ found = old_find_config_files(self)
+ system_distutils = os.path.join(distutils_path, 'distutils.cfg')
+ #if os.path.exists(system_distutils):
+ # found.insert(0, system_distutils)
+ # What to call the per-user config file
+ if os.name == 'posix':
+ user_filename = ".pydistutils.cfg"
+ else:
+ user_filename = "pydistutils.cfg"
+ user_filename = os.path.join(sys.prefix, user_filename)
+ if os.path.isfile(user_filename):
+ for item in list(found):
+ if item.endswith('pydistutils.cfg'):
+ found.remove(item)
+ found.append(user_filename)
+ return found
+dist.Distribution.find_config_files = find_config_files
+
+## distutils.sysconfig patches:
+
+old_get_python_inc = sysconfig.get_python_inc
+def sysconfig_get_python_inc(plat_specific=0, prefix=None):
+ if prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_inc(plat_specific, prefix)
+sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
+sysconfig.get_python_inc = sysconfig_get_python_inc
+
+old_get_python_lib = sysconfig.get_python_lib
+def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+ if standard_lib and prefix is None:
+ prefix = sys.real_prefix
+ return old_get_python_lib(plat_specific, standard_lib, prefix)
+sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
+sysconfig.get_python_lib = sysconfig_get_python_lib
+
+old_get_config_vars = sysconfig.get_config_vars
+def sysconfig_get_config_vars(*args):
+ real_vars = old_get_config_vars(*args)
+ if sys.platform == 'win32':
+ lib_dir = os.path.join(sys.real_prefix, "libs")
+ if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
+ real_vars['LIBDIR'] = lib_dir # asked for all
+ elif isinstance(real_vars, list) and 'LIBDIR' in args:
+ real_vars = real_vars + [lib_dir] # asked for list
+ return real_vars
+sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
+sysconfig.get_config_vars = sysconfig_get_config_vars
diff --git a/lib/python2.7/distutils/distutils.cfg b/lib/python2.7/distutils/distutils.cfg
new file mode 100644
index 00000000..1af230ec
--- /dev/null
+++ b/lib/python2.7/distutils/distutils.cfg
@@ -0,0 +1,6 @@
+# This is a config file local to this virtualenv installation
+# You may include options that will be used by all distutils commands,
+# and by easy_install. For instance:
+#
+# [easy_install]
+# find_links = http://mylocalsite
diff --git a/lib/python2.7/encodings b/lib/python2.7/encodings
new file mode 120000
index 00000000..1250ad86
--- /dev/null
+++ b/lib/python2.7/encodings
@@ -0,0 +1 @@
+/usr/lib/python2.7/encodings \ No newline at end of file
diff --git a/lib/python2.7/fnmatch.py b/lib/python2.7/fnmatch.py
new file mode 120000
index 00000000..ec3e10cf
--- /dev/null
+++ b/lib/python2.7/fnmatch.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/fnmatch.py \ No newline at end of file
diff --git a/lib/python2.7/genericpath.py b/lib/python2.7/genericpath.py
new file mode 120000
index 00000000..cb8897ce
--- /dev/null
+++ b/lib/python2.7/genericpath.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/genericpath.py \ No newline at end of file
diff --git a/lib/python2.7/lib-dynload b/lib/python2.7/lib-dynload
new file mode 120000
index 00000000..c706a1eb
--- /dev/null
+++ b/lib/python2.7/lib-dynload
@@ -0,0 +1 @@
+/usr/lib/python2.7/lib-dynload \ No newline at end of file
diff --git a/lib/python2.7/linecache.py b/lib/python2.7/linecache.py
new file mode 120000
index 00000000..943c4297
--- /dev/null
+++ b/lib/python2.7/linecache.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/linecache.py \ No newline at end of file
diff --git a/lib/python2.7/locale.py b/lib/python2.7/locale.py
new file mode 120000
index 00000000..92c243c6
--- /dev/null
+++ b/lib/python2.7/locale.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/locale.py \ No newline at end of file
diff --git a/lib/python2.7/ntpath.py b/lib/python2.7/ntpath.py
new file mode 120000
index 00000000..5659ae14
--- /dev/null
+++ b/lib/python2.7/ntpath.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/ntpath.py \ No newline at end of file
diff --git a/lib/python2.7/orig-prefix.txt b/lib/python2.7/orig-prefix.txt
new file mode 100644
index 00000000..e25db584
--- /dev/null
+++ b/lib/python2.7/orig-prefix.txt
@@ -0,0 +1 @@
+/usr \ No newline at end of file
diff --git a/lib/python2.7/os.py b/lib/python2.7/os.py
new file mode 120000
index 00000000..950fc8d2
--- /dev/null
+++ b/lib/python2.7/os.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/os.py \ No newline at end of file
diff --git a/lib/python2.7/posixpath.py b/lib/python2.7/posixpath.py
new file mode 120000
index 00000000..30cb8ca5
--- /dev/null
+++ b/lib/python2.7/posixpath.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/posixpath.py \ No newline at end of file
diff --git a/lib/python2.7/re.py b/lib/python2.7/re.py
new file mode 120000
index 00000000..56a07316
--- /dev/null
+++ b/lib/python2.7/re.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/re.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 00000000..629e916b
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,531 @@
+Metadata-Version: 1.0
+Name: distribute
+Version: 0.6.14
+Summary: Easily download, build, install, upgrade, and uninstall Python packages
+Home-page: http://packages.python.org/distribute
+Author: The fellowship of the packaging
+Author-email: distutils-sig@python.org
+License: PSF or ZPL
+Description: ===============================
+ Installing and Using Distribute
+ ===============================
+
+ .. contents:: **Table of Contents**
+
+ -----------
+ Disclaimers
+ -----------
+
+ About the fork
+ ==============
+
+ `Distribute` is a fork of the `Setuptools` project.
+
+ Distribute is intended to replace Setuptools as the standard method
+ for working with Python module distributions.
+
+ The fork has two goals:
+
+ - Providing a backward compatible version to replace Setuptools
+ and make all distributions that depend on Setuptools work as
+ before, but with less bugs and behaviorial issues.
+
+ This work is done in the 0.6.x series.
+
+ Starting with version 0.6.2, Distribute supports Python 3.
+ Installing and using distribute for Python 3 code works exactly
+ the same as for Python 2 code, but Distribute also helps you to support
+ Python 2 and Python 3 from the same source code by letting you run 2to3
+ on the code as a part of the build process, by setting the keyword parameter
+ ``use_2to3`` to True. See http://packages.python.org/distribute for more
+ information.
+
+ - Refactoring the code, and releasing it in several distributions.
+ This work is being done in the 0.7.x series but not yet released.
+
+ The roadmap is still evolving, and the page that is up-to-date is
+ located at : `http://packages.python.org/distribute/roadmap`.
+
+ If you install `Distribute` and want to switch back for any reason to
+ `Setuptools`, get to the `Uninstallation instructions`_ section.
+
+ More documentation
+ ==================
+
+ You can get more information in the Sphinx-based documentation, located
+ at http://packages.python.org/distribute. This documentation includes the old
+ Setuptools documentation that is slowly replaced, and brand new content.
+
+ About the installation process
+ ==============================
+
+ The `Distribute` installer modifies your installation by de-activating an
+ existing installation of `Setuptools` in a bootstrap process. This process
+ has been tested in various installation schemes and contexts but in case of a
+ bug during this process your Python installation might be left in a broken
+ state. Since all modified files and directories are copied before the
+ installation starts, you will be able to get back to a normal state by reading
+ the instructions in the `Uninstallation instructions`_ section.
+
+ In any case, it is recommended to save you `site-packages` directory before
+ you start the installation of `Distribute`.
+
+ -------------------------
+ Installation Instructions
+ -------------------------
+
+ Distribute is only released as a source distribution.
+
+ It can be installed using pip, and can be done so with the source tarball,
+ or by using the ``distribute_setup.py`` script provided online.
+
+ ``distribute_setup.py`` is the simplest and preferred way on all systems.
+
+ distribute_setup.py
+ ===================
+
+ Download
+ `distribute_setup.py <http://python-distribute.org/distribute_setup.py>`_
+ and execute it, using the Python interpreter of your choice.
+
+ If your shell has the ``curl`` program you can do::
+
+ $ curl -O http://python-distribute.org/distribute_setup.py
+ $ python distribute_setup.py
+
+ Notice this file is also provided in the source release.
+
+ pip
+ ===
+
+ Run easy_install or pip::
+
+ $ pip install distribute
+
+ Source installation
+ ===================
+
+ Download the source tarball, uncompress it, then run the install command::
+
+ $ curl -O http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz
+ $ tar -xzvf distribute-0.6.14.tar.gz
+ $ cd distribute-0.6.14
+ $ python setup.py install
+
+ ---------------------------
+ Uninstallation Instructions
+ ---------------------------
+
+ Like other distutils-based distributions, Distribute doesn't provide an
+ uninstaller yet. It's all done manually! We are all waiting for PEP 376
+ support in Python.
+
+ Distribute is installed in three steps:
+
+ 1. it gets out of the way an existing installation of Setuptools
+ 2. it installs a `fake` setuptools installation
+ 3. it installs distribute
+
+ Distribute can be removed like this:
+
+ - remove the ``distribute*.egg`` file located in your site-packages directory
+ - remove the ``setuptools.pth`` file located in you site-packages directory
+ - remove the easy_install script located in you ``sys.prefix/bin`` directory
+ - remove the ``setuptools*.egg`` directory located in your site-packages directory,
+ if any.
+
+ If you want to get back to setuptools:
+
+ - reinstall setuptools using its instruction.
+
+ Lastly:
+
+ - remove the *.OLD.* directory located in your site-packages directory if any,
+ **once you have checked everything was working correctly again**.
+
+ -------------------------
+ Quick help for developers
+ -------------------------
+
+ To create an egg which is compatible with Distribute, use the same
+ practice as with Setuptools, e.g.::
+
+ from setuptools import setup
+
+ setup(...
+ )
+
+ To use `pkg_resources` to access data files in the egg, you should
+ require the Setuptools distribution explicitly::
+
+ from setuptools import setup
+
+ setup(...
+ install_requires=['setuptools']
+ )
+
+ Only if you need Distribute-specific functionality should you depend
+ on it explicitly. In this case, replace the Setuptools dependency::
+
+ from setuptools import setup
+
+ setup(...
+ install_requires=['distribute']
+ )
+
+ -----------
+ Install FAQ
+ -----------
+
+ - **Why is Distribute wrapping my Setuptools installation?**
+
+ Since Distribute is a fork, and since it provides the same package
+ and modules, it renames the existing Setuptools egg and inserts a
+ new one which merely wraps the Distribute code. This way, full
+ backwards compatibility is kept for packages which rely on the
+ Setuptools modules.
+
+ At the same time, packages can meet their dependency on Setuptools
+ without actually installing it (which would disable Distribute).
+
+ - **How does Distribute interact with virtualenv?**
+
+ Everytime you create a virtualenv it will install setuptools by default.
+ You either need to re-install Distribute in it right after or pass the
+ ``--distribute`` option when creating it.
+
+ Once installed, your virtualenv will use Distribute transparently.
+
+ Although, if you have Setuptools installed in your system-wide Python,
+ and if the virtualenv you are in was generated without the `--no-site-packages`
+ option, the Distribute installation will stop.
+
+ You need in this case to build a virtualenv with the `--no-site-packages`
+ option or to install `Distribute` globally.
+
+ - **How does Distribute interacts with zc.buildout?**
+
+ You can use Distribute in your zc.buildout, with the --distribute option,
+ starting at zc.buildout 1.4.2::
+
+ $ python bootstrap.py --distribute
+
+ For previous zc.buildout versions, *the only thing* you need to do
+ is use the bootstrap at `http://python-distribute.org/bootstrap.py`. Run
+ that bootstrap and ``bin/buildout`` (and all other buildout-generated
+ scripts) will transparently use distribute instead of setuptools. You do
+ not need a specific buildout release.
+
+ A shared eggs directory is no problem (since 0.6.6): the setuptools egg is
+ left in place unmodified. So other buildouts that do not yet use the new
+ bootstrap continue to work just fine. And there is no need to list
+ ``distribute`` somewhere in your eggs: using the bootstrap is enough.
+
+ The source code for the bootstrap script is located at
+ `http://bitbucket.org/tarek/buildout-distribute`.
+
+
+
+ -----------------------------
+ Feedback and getting involved
+ -----------------------------
+
+ - Mailing list: http://mail.python.org/mailman/listinfo/distutils-sig
+ - Issue tracker: http://bitbucket.org/tarek/distribute/issues/
+ - Code Repository: http://bitbucket.org/tarek/distribute
+
+ =======
+ CHANGES
+ =======
+
+ ------
+ 0.6.14
+ ------
+
+ * Issue 170: Fixed unittest failure. Thanks to Toshio.
+ * Issue 171: Fixed race condition in unittests cause deadlocks in test suite.
+ * Issue 143: Fixed a lookup issue with easy_install.
+ Thanks to David and Zooko.
+ * Issue 174: Fixed the edit mode when its used with setuptools itself
+
+ ------
+ 0.6.13
+ ------
+
+ * Issue 160: 2.7 gives ValueError("Invalid IPv6 URL")
+ * Issue 150: Fixed using ~/.local even in a --no-site-packages virtualenv
+ * Issue 163: scan index links before external links, and don't use the md5 when
+ comparing two distributions
+
+ ------
+ 0.6.12
+ ------
+
+ * Issue 149: Fixed various failures on 2.3/2.4
+
+ ------
+ 0.6.11
+ ------
+
+ * Found another case of SandboxViolation - fixed
+ * Issue 15 and 48: Introduced a socket timeout of 15 seconds on url openings
+ * Added indexsidebar.html into MANIFEST.in
+ * Issue 108: Fixed TypeError with Python3.1
+ * Issue 121: Fixed --help install command trying to actually install.
+ * Issue 112: Added an os.makedirs so that Tarek's solution will work.
+ * Issue 133: Added --no-find-links to easy_install
+ * Added easy_install --user
+ * Issue 100: Fixed develop --user not taking '.' in PYTHONPATH into account
+ * Issue 134: removed spurious UserWarnings. Patch by VanLindberg
+ * Issue 138: cant_write_to_target error when setup_requires is used.
+ * Issue 147: respect the sys.dont_write_bytecode flag
+
+ ------
+ 0.6.10
+ ------
+
+ * Reverted change made for the DistributionNotFound exception because
+ zc.buildout uses the exception message to get the name of the
+ distribution.
+
+ -----
+ 0.6.9
+ -----
+
+ * Issue 90: unknown setuptools version can be added in the working set
+ * Issue 87: setupt.py doesn't try to convert distribute_setup.py anymore
+ Initial Patch by arfrever.
+ * Issue 89: added a side bar with a download link to the doc.
+ * Issue 86: fixed missing sentence in pkg_resources doc.
+ * Added a nicer error message when a DistributionNotFound is raised.
+ * Issue 80: test_develop now works with Python 3.1
+ * Issue 93: upload_docs now works if there is an empty sub-directory.
+ * Issue 70: exec bit on non-exec files
+ * Issue 99: now the standalone easy_install command doesn't uses a
+ "setup.cfg" if any exists in the working directory. It will use it
+ only if triggered by ``install_requires`` from a setup.py call
+ (install, develop, etc).
+ * Issue 101: Allowing ``os.devnull`` in Sandbox
+ * Issue 92: Fixed the "no eggs" found error with MacPort
+ (platform.mac_ver() fails)
+ * Issue 103: test_get_script_header_jython_workaround not run
+ anymore under py3 with C or POSIX local. Contributed by Arfrever.
+ * Issue 104: remvoved the assertion when the installation fails,
+ with a nicer message for the end user.
+ * Issue 100: making sure there's no SandboxViolation when
+ the setup script patches setuptools.
+
+ -----
+ 0.6.8
+ -----
+
+ * Added "check_packages" in dist. (added in Setuptools 0.6c11)
+ * Fixed the DONT_PATCH_SETUPTOOLS state.
+
+ -----
+ 0.6.7
+ -----
+
+ * Issue 58: Added --user support to the develop command
+ * Issue 11: Generated scripts now wrap their call to the script entry point
+ in the standard "if name == 'main'"
+ * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv
+ can drive an installation that doesn't patch a global setuptools.
+ * Reviewed unladen-swallow specific change from
+ http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719
+ and determined that it no longer applies. Distribute should work fine with
+ Unladen Swallow 2009Q3.
+ * Issue 21: Allow PackageIndex.open_url to gracefully handle all cases of a
+ httplib.HTTPException instead of just InvalidURL and BadStatusLine.
+ * Removed virtual-python.py from this distribution and updated documentation
+ to point to the actively maintained virtualenv instead.
+ * Issue 64: use_setuptools no longer rebuilds the distribute egg every
+ time it is run
+ * use_setuptools now properly respects the requested version
+ * use_setuptools will no longer try to import a distribute egg for the
+ wrong Python version
+ * Issue 74: no_fake should be True by default.
+ * Issue 72: avoid a bootstrapping issue with easy_install -U
+
+ -----
+ 0.6.6
+ -----
+
+ * Unified the bootstrap file so it works on both py2.x and py3k without 2to3
+ (patch by Holger Krekel)
+
+ -----
+ 0.6.5
+ -----
+
+ * Issue 65: cli.exe and gui.exe are now generated at build time,
+ depending on the platform in use.
+
+ * Issue 67: Fixed doc typo (PEP 381/382)
+
+ * Distribute no longer shadows setuptools if we require a 0.7-series
+ setuptools. And an error is raised when installing a 0.7 setuptools with
+ distribute.
+
+ * When run from within buildout, no attempt is made to modify an existing
+ setuptools egg, whether in a shared egg directory or a system setuptools.
+
+ * Fixed a hole in sandboxing allowing builtin file to write outside of
+ the sandbox.
+
+ -----
+ 0.6.4
+ -----
+
+ * Added the generation of `distribute_setup_3k.py` during the release.
+ This close http://bitbucket.org/tarek/distribute/issue/52.
+
+ * Added an upload_docs command to easily upload project documentation to
+ PyPI's http://packages.python.org.
+ This close http://bitbucket.org/tarek/distribute/issue/56.
+
+ * Fixed a bootstrap bug on the use_setuptools() API.
+
+ -----
+ 0.6.3
+ -----
+
+ setuptools
+ ==========
+
+ * Fixed a bunch of calls to file() that caused crashes on Python 3.
+
+ bootstrapping
+ =============
+
+ * Fixed a bug in sorting that caused bootstrap to fail on Python 3.
+
+ -----
+ 0.6.2
+ -----
+
+ setuptools
+ ==========
+
+ * Added Python 3 support; see docs/python3.txt.
+ This closes http://bugs.python.org/setuptools/issue39.
+
+ * Added option to run 2to3 automatically when installing on Python 3.
+ This closes http://bitbucket.org/tarek/distribute/issue/31.
+
+ * Fixed invalid usage of requirement.parse, that broke develop -d.
+ This closes http://bugs.python.org/setuptools/issue44.
+
+ * Fixed script launcher for 64-bit Windows.
+ This closes http://bugs.python.org/setuptools/issue2.
+
+ * KeyError when compiling extensions.
+ This closes http://bugs.python.org/setuptools/issue41.
+
+ bootstrapping
+ =============
+
+ * Fixed bootstrap not working on Windows.
+ This closes http://bitbucket.org/tarek/distribute/issue/49.
+
+ * Fixed 2.6 dependencies.
+ This closes http://bitbucket.org/tarek/distribute/issue/50.
+
+ * Make sure setuptools is patched when running through easy_install
+ This closes http://bugs.python.org/setuptools/issue40.
+
+ -----
+ 0.6.1
+ -----
+
+ setuptools
+ ==========
+
+ * package_index.urlopen now catches BadStatusLine and malformed url errors.
+ This closes http://bitbucket.org/tarek/distribute/issue/16 and
+ http://bitbucket.org/tarek/distribute/issue/18.
+
+ * zip_ok is now False by default. This closes
+ http://bugs.python.org/setuptools/issue33.
+
+ * Fixed invalid URL error catching. http://bugs.python.org/setuptools/issue20.
+
+ * Fixed invalid bootstraping with easy_install installation
+ http://bitbucket.org/tarek/distribute/issue/40.
+ Thanks to Florian Schulze for the help.
+
+ * Removed buildout/bootstrap.py. A new repository will create a specific
+ bootstrap.py script.
+
+
+ bootstrapping
+ =============
+
+ * The boostrap process leave setuptools alone if detected in the system
+ and --root or --prefix is provided, but is not in the same location.
+ This closes http://bitbucket.org/tarek/distribute/issue/10.
+
+ ---
+ 0.6
+ ---
+
+ setuptools
+ ==========
+
+ * Packages required at build time where not fully present at install time.
+ This closes http://bitbucket.org/tarek/distribute/issue/12.
+
+ * Protected against failures in tarfile extraction. This closes
+ http://bitbucket.org/tarek/distribute/issue/10.
+
+ * Made Jython api_tests.txt doctest compatible. This closes
+ http://bitbucket.org/tarek/distribute/issue/7.
+
+ * sandbox.py replaced builtin type file with builtin function open. This
+ closes http://bitbucket.org/tarek/distribute/issue/6.
+
+ * Immediately close all file handles. This closes
+ http://bitbucket.org/tarek/distribute/issue/3.
+
+ * Added compatibility with Subversion 1.6. This references
+ http://bitbucket.org/tarek/distribute/issue/1.
+
+ pkg_resources
+ =============
+
+ * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API
+ instead. Based on a patch from ronaldoussoren. This closes
+ http://bitbucket.org/tarek/distribute/issue/5.
+
+ * Fixed a SandboxViolation for mkdir that could occur in certain cases.
+ This closes http://bitbucket.org/tarek/distribute/issue/13.
+
+ * Allow to find_on_path on systems with tight permissions to fail gracefully.
+ This closes http://bitbucket.org/tarek/distribute/issue/9.
+
+ * Corrected inconsistency between documentation and code of add_entry.
+ This closes http://bitbucket.org/tarek/distribute/issue/8.
+
+ * Immediately close all file handles. This closes
+ http://bitbucket.org/tarek/distribute/issue/3.
+
+ easy_install
+ ============
+
+ * Immediately close all file handles. This closes
+ http://bitbucket.org/tarek/distribute/issue/3.
+
+
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 00000000..fc0a26bd
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/SOURCES.txt
@@ -0,0 +1,84 @@
+CHANGES.txt
+CONTRIBUTORS.txt
+DEVGUIDE.txt
+MANIFEST.in
+README.txt
+distribute_setup.py
+easy_install.py
+launcher.c
+pkg_resources.py
+setup.cfg
+setup.py
+site.py
+distribute.egg-info/PKG-INFO
+distribute.egg-info/SOURCES.txt
+distribute.egg-info/dependency_links.txt
+distribute.egg-info/entry_points.txt
+distribute.egg-info/top_level.txt
+distribute.egg-info/zip-safe
+docs/Makefile
+docs/conf.py
+docs/easy_install.txt
+docs/index.txt
+docs/pkg_resources.txt
+docs/python3.txt
+docs/roadmap.txt
+docs/setuptools.txt
+docs/using.txt
+docs/_templates/indexsidebar.html
+docs/_theme/nature/theme.conf
+docs/_theme/nature/static/nature.css_t
+docs/_theme/nature/static/pygments.css
+setuptools/__init__.py
+setuptools/archive_util.py
+setuptools/cli.exe
+setuptools/depends.py
+setuptools/dist.py
+setuptools/extension.py
+setuptools/gui.exe
+setuptools/package_index.py
+setuptools/sandbox.py
+setuptools/command/__init__.py
+setuptools/command/alias.py
+setuptools/command/bdist_egg.py
+setuptools/command/bdist_rpm.py
+setuptools/command/bdist_wininst.py
+setuptools/command/build_ext.py
+setuptools/command/build_py.py
+setuptools/command/develop.py
+setuptools/command/easy_install.py
+setuptools/command/egg_info.py
+setuptools/command/install.py
+setuptools/command/install_egg_info.py
+setuptools/command/install_lib.py
+setuptools/command/install_scripts.py
+setuptools/command/register.py
+setuptools/command/rotate.py
+setuptools/command/saveopts.py
+setuptools/command/sdist.py
+setuptools/command/setopt.py
+setuptools/command/test.py
+setuptools/command/upload.py
+setuptools/command/upload_docs.py
+setuptools/tests/__init__.py
+setuptools/tests/doctest.py
+setuptools/tests/server.py
+setuptools/tests/test_build_ext.py
+setuptools/tests/test_develop.py
+setuptools/tests/test_easy_install.py
+setuptools/tests/test_packageindex.py
+setuptools/tests/test_resources.py
+setuptools/tests/test_sandbox.py
+setuptools/tests/test_upload_docs.py
+setuptools/tests/win_script_wrapper.txt
+setuptools/tests/indexes/test_links_priority/external.html
+setuptools/tests/indexes/test_links_priority/simple/foobar/index.html
+tests/api_tests.txt
+tests/install_test.py
+tests/manual_test.py
+tests/test_distribute_setup.py
+tests/shlib_test/hello.c
+tests/shlib_test/hello.pyx
+tests/shlib_test/hellolib.c
+tests/shlib_test/setup.py
+tests/shlib_test/test_hello.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt
new file mode 100644
index 00000000..9fd41758
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/entry_points.txt
@@ -0,0 +1,61 @@
+[distutils.commands]
+bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+rotate = setuptools.command.rotate:rotate
+develop = setuptools.command.develop:develop
+setopt = setuptools.command.setopt:setopt
+build_py = setuptools.command.build_py:build_py
+saveopts = setuptools.command.saveopts:saveopts
+egg_info = setuptools.command.egg_info:egg_info
+register = setuptools.command.register:register
+upload_docs = setuptools.command.upload_docs:upload_docs
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
+alias = setuptools.command.alias:alias
+easy_install = setuptools.command.easy_install:easy_install
+install_scripts = setuptools.command.install_scripts:install_scripts
+bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+install = setuptools.command.install:install
+test = setuptools.command.test:test
+install_lib = setuptools.command.install_lib:install_lib
+build_ext = setuptools.command.build_ext:build_ext
+sdist = setuptools.command.sdist:sdist
+
+[egg_info.writers]
+dependency_links.txt = setuptools.command.egg_info:overwrite_arg
+requires.txt = setuptools.command.egg_info:write_requirements
+PKG-INFO = setuptools.command.egg_info:write_pkg_info
+eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+top_level.txt = setuptools.command.egg_info:write_toplevel_names
+namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+entry_points.txt = setuptools.command.egg_info:write_entries
+depends.txt = setuptools.command.egg_info:warn_depends_obsolete
+
+[console_scripts]
+easy_install = setuptools.command.easy_install:main
+easy_install-2.7 = setuptools.command.easy_install:main
+
+[setuptools.file_finders]
+svn_cvs = setuptools.command.sdist:_default_revctrl
+
+[distutils.setup_keywords]
+dependency_links = setuptools.dist:assert_string_list
+entry_points = setuptools.dist:check_entry_points
+extras_require = setuptools.dist:check_extras
+package_data = setuptools.dist:check_package_data
+install_requires = setuptools.dist:check_requirements
+use_2to3 = setuptools.dist:assert_bool
+use_2to3_fixers = setuptools.dist:assert_string_list
+include_package_data = setuptools.dist:assert_bool
+exclude_package_data = setuptools.dist:check_package_data
+namespace_packages = setuptools.dist:check_nsp
+test_suite = setuptools.dist:check_test_suite
+eager_resources = setuptools.dist:assert_string_list
+zip_safe = setuptools.dist:assert_bool
+test_loader = setuptools.dist:check_importable
+packages = setuptools.dist:check_packages
+convert_2to3_doctests = setuptools.dist:assert_string_list
+tests_require = setuptools.dist:check_requirements
+
+[setuptools.installation]
+eggsecutable = setuptools.command.easy_install:bootstrap
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt
new file mode 100644
index 00000000..ef77c7c1
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/top_level.txt
@@ -0,0 +1,4 @@
+easy_install
+pkg_resources
+setuptools
+site
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/EGG-INFO/zip-safe
@@ -0,0 +1 @@
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py
new file mode 100755
index 00000000..d87e9840
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/easy_install.py
@@ -0,0 +1,5 @@
+"""Run the EasyInstall command"""
+
+if __name__ == '__main__':
+ from setuptools.command.easy_install import main
+ main()
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py
new file mode 100755
index 00000000..30dbc188
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/pkg_resources.py
@@ -0,0 +1,2693 @@
+"""Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof. The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is. Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files. It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+import sys, os, zipimport, time, re, imp, types
+from urlparse import urlparse, urlunparse
+
+try:
+ frozenset
+except NameError:
+ from sets import ImmutableSet as frozenset
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+ from os import mkdir, rename, unlink
+ WRITE_SUPPORT = True
+except ImportError:
+ # no write support, probably under GAE
+ WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+# This marker is used to simplify the process that checks is the
+# setuptools package was installed by the Setuptools project
+# or by the Distribute project, in case Setuptools creates
+# a distribution with the same version.
+#
+# The bootstrapping script for instance, will check if this
+# attribute is present to decide wether to reinstall the package
+_distribute = True
+
+def _bypass_ensure_directory(name, mode=0777):
+ # Sandbox-bypassing version of ensure_directory()
+ if not WRITE_SUPPORT:
+ raise IOError('"os.mkdir" not supported on this platform.')
+ dirname, filename = split(name)
+ if dirname and filename and not isdir(dirname):
+ _bypass_ensure_directory(dirname)
+ mkdir(dirname, mode)
+
+
+
+
+
+
+
+
+def get_supported_platform():
+ """Return this platform's maximum compatible version.
+
+ distutils.util.get_platform() normally reports the minimum version
+ of Mac OS X that would be required to *use* extensions produced by
+ distutils. But what we want when checking compatibility is to know the
+ version of Mac OS X that we are *running*. To allow usage of packages that
+ explicitly require a newer version of Mac OS X, we must also know the
+ current version of the OS.
+
+ If this condition occurs for any other platform with a version in its
+ platform strings, this function should be extended accordingly.
+ """
+ plat = get_build_platform(); m = macosVersionString.match(plat)
+ if m is not None and sys.platform == "darwin":
+ try:
+ plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+ except ValueError:
+ pass # not Mac OS X
+ return plat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+__all__ = [
+ # Basic resource access and distribution/entry point discovery
+ 'require', 'run_script', 'get_provider', 'get_distribution',
+ 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
+ 'resource_string', 'resource_stream', 'resource_filename',
+ 'resource_listdir', 'resource_exists', 'resource_isdir',
+
+ # Environmental control
+ 'declare_namespace', 'working_set', 'add_activation_listener',
+ 'find_distributions', 'set_extraction_path', 'cleanup_resources',
+ 'get_default_cache',
+
+ # Primary implementation classes
+ 'Environment', 'WorkingSet', 'ResourceManager',
+ 'Distribution', 'Requirement', 'EntryPoint',
+
+ # Exceptions
+ 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
+ 'ExtractionError',
+
+ # Parsing functions and string utilities
+ 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+ 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+ 'safe_extra', 'to_filename',
+
+ # filesystem utilities
+ 'ensure_directory', 'normalize_path',
+
+ # Distribution "precedence" constants
+ 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+ # "Provider" interfaces, implementations, and registration/lookup APIs
+ 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+ 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+ 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+ 'register_finder', 'register_namespace_handler', 'register_loader_type',
+ 'fixup_namespace_packages', 'get_importer',
+
+ # Deprecated/backward compatibility only
+ 'run_main', 'AvailableDistributions',
+]
+class ResolutionError(Exception):
+ """Abstract base for dependency resolution errors"""
+ def __repr__(self):
+ return self.__class__.__name__+repr(self.args)
+
+class VersionConflict(ResolutionError):
+ """An already-installed version conflicts with the requested version"""
+
+class DistributionNotFound(ResolutionError):
+ """A requested distribution was not found"""
+
+class UnknownExtra(ResolutionError):
+ """Distribution doesn't have an "extra feature" of the given name"""
+_provider_factories = {}
+
+PY_MAJOR = sys.version[:3]
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+def register_loader_type(loader_type, provider_factory):
+ """Register `provider_factory` to make providers for `loader_type`
+
+ `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+ and `provider_factory` is a function that, passed a *module* object,
+ returns an ``IResourceProvider`` for that module.
+ """
+ _provider_factories[loader_type] = provider_factory
+
+def get_provider(moduleOrReq):
+ """Return an IResourceProvider for the named module or requirement"""
+ if isinstance(moduleOrReq,Requirement):
+ return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+ try:
+ module = sys.modules[moduleOrReq]
+ except KeyError:
+ __import__(moduleOrReq)
+ module = sys.modules[moduleOrReq]
+ loader = getattr(module, '__loader__', None)
+ return _find_adapter(_provider_factories, loader)(module)
+
+def _macosx_vers(_cache=[]):
+ if not _cache:
+ import platform
+ version = platform.mac_ver()[0]
+ # fallback for MacPorts
+ if version == '':
+ import plistlib
+ plist = '/System/Library/CoreServices/SystemVersion.plist'
+ if os.path.exists(plist):
+ if hasattr(plistlib, 'readPlist'):
+ plist_content = plistlib.readPlist(plist)
+ if 'ProductVersion' in plist_content:
+ version = plist_content['ProductVersion']
+
+ _cache.append(version.split('.'))
+ return _cache[0]
+
+def _macosx_arch(machine):
+ return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
+
+def get_build_platform():
+ """Return this platform's string for platform-specific distributions
+
+ XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+ needs some hacks for Linux and Mac OS X.
+ """
+ try:
+ from distutils.util import get_platform
+ except ImportError:
+ from sysconfig import get_platform
+
+ plat = get_platform()
+ if sys.platform == "darwin" and not plat.startswith('macosx-'):
+ try:
+ version = _macosx_vers()
+ machine = os.uname()[4].replace(" ", "_")
+ return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
+ _macosx_arch(machine))
+ except ValueError:
+ # if someone is running a non-Mac darwin system, this will fall
+ # through to the default implementation
+ pass
+ return plat
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+get_platform = get_build_platform # XXX backward compat
+
+def compatible_platforms(provided,required):
+ """Can code for the `provided` platform run on the `required` platform?
+
+ Returns true if either platform is ``None``, or the platforms are equal.
+
+ XXX Needs compatibility checks for Linux and other unixy OSes.
+ """
+ if provided is None or required is None or provided==required:
+ return True # easy case
+
+ # Mac OS X special cases
+ reqMac = macosVersionString.match(required)
+ if reqMac:
+ provMac = macosVersionString.match(provided)
+
+ # is this a Mac package?
+ if not provMac:
+ # this is backwards compatibility for packages built before
+ # setuptools 0.6. All packages built after this point will
+ # use the new macosx designation.
+ provDarwin = darwinVersionString.match(provided)
+ if provDarwin:
+ dversion = int(provDarwin.group(1))
+ macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+ if dversion == 7 and macosversion >= "10.3" or \
+ dversion == 8 and macosversion >= "10.4":
+
+ #import warnings
+ #warnings.warn("Mac eggs should be rebuilt to "
+ # "use the macosx designation instead of darwin.",
+ # category=DeprecationWarning)
+ return True
+ return False # egg isn't macosx or legacy darwin
+
+ # are they the same major version and machine type?
+ if provMac.group(1) != reqMac.group(1) or \
+ provMac.group(3) != reqMac.group(3):
+ return False
+
+
+
+ # is the required OS major update >= the provided one?
+ if int(provMac.group(2)) > int(reqMac.group(2)):
+ return False
+
+ return True
+
+ # XXX Linux and other platforms' special cases should go here
+ return False
+
+
+def run_script(dist_spec, script_name):
+ """Locate distribution `dist_spec` and run its `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ require(dist_spec)[0].run_script(script_name, ns)
+
+run_main = run_script # backward compatibility
+
+def get_distribution(dist):
+ """Return a current distribution object for a Requirement or string"""
+ if isinstance(dist,basestring): dist = Requirement.parse(dist)
+ if isinstance(dist,Requirement): dist = get_provider(dist)
+ if not isinstance(dist,Distribution):
+ raise TypeError("Expected string, Requirement, or Distribution", dist)
+ return dist
+
+def load_entry_point(dist, group, name):
+ """Return `name` entry point of `group` for `dist` or raise ImportError"""
+ return get_distribution(dist).load_entry_point(group, name)
+
+def get_entry_map(dist, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ return get_distribution(dist).get_entry_map(group)
+
+def get_entry_info(dist, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+
+ def has_metadata(name):
+ """Does the package's distribution contain the named metadata?"""
+
+ def get_metadata(name):
+ """The named metadata resource as a string"""
+
+ def get_metadata_lines(name):
+ """Yield named metadata resource as list of non-blank non-comment lines
+
+ Leading and trailing whitespace is stripped from each line, and lines
+ with ``#`` as the first non-blank character are omitted."""
+
+ def metadata_isdir(name):
+ """Is the named metadata a directory? (like ``os.path.isdir()``)"""
+
+ def metadata_listdir(name):
+ """List of metadata names in the directory (like ``os.listdir()``)"""
+
+ def run_script(script_name, namespace):
+ """Execute the named script in the supplied namespace dictionary"""
+
+
+
+
+
+
+
+
+
+
+class IResourceProvider(IMetadataProvider):
+ """An object that provides access to package resources"""
+
+ def get_resource_filename(manager, resource_name):
+ """Return a true filesystem path for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_stream(manager, resource_name):
+ """Return a readable file-like object for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_string(manager, resource_name):
+ """Return a string containing the contents of `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def has_resource(resource_name):
+ """Does the package contain the named resource?"""
+
+ def resource_isdir(resource_name):
+ """Is the named resource a directory? (like ``os.path.isdir()``)"""
+
+ def resource_listdir(resource_name):
+ """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class WorkingSet(object):
+ """A collection of active distributions on sys.path (or a similar list)"""
+
+ def __init__(self, entries=None):
+ """Create working set from list of path entries (default=sys.path)"""
+ self.entries = []
+ self.entry_keys = {}
+ self.by_key = {}
+ self.callbacks = []
+
+ if entries is None:
+ entries = sys.path
+
+ for entry in entries:
+ self.add_entry(entry)
+
+
+ def add_entry(self, entry):
+ """Add a path item to ``.entries``, finding any distributions on it
+
+ ``find_distributions(entry,True)`` is used to find distributions
+ corresponding to the path entry, and they are added. `entry` is
+ always appended to ``.entries``, even if it is already present.
+ (This is because ``sys.path`` can contain the same value more than
+ once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+ equal ``sys.path``.)
+ """
+ self.entry_keys.setdefault(entry, [])
+ self.entries.append(entry)
+ for dist in find_distributions(entry, True):
+ self.add(dist, entry, False)
+
+
+ def __contains__(self,dist):
+ """True if `dist` is the active distribution for its project"""
+ return self.by_key.get(dist.key) == dist
+
+
+
+
+
+ def find(self, req):
+ """Find a distribution matching requirement `req`
+
+ If there is an active distribution for the requested project, this
+ returns it as long as it meets the version requirement specified by
+ `req`. But, if there is an active distribution for the project and it
+ does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ If there is no active distribution for the requested project, ``None``
+ is returned.
+ """
+ dist = self.by_key.get(req.key)
+ if dist is not None and dist not in req:
+ raise VersionConflict(dist,req) # XXX add more info
+ else:
+ return dist
+
+ def iter_entry_points(self, group, name=None):
+ """Yield entry point objects from `group` matching `name`
+
+ If `name` is None, yields all entry points in `group` from all
+ distributions in the working set, otherwise only ones matching
+ both `group` and `name` are yielded (in distribution order).
+ """
+ for dist in self:
+ entries = dist.get_entry_map(group)
+ if name is None:
+ for ep in entries.values():
+ yield ep
+ elif name in entries:
+ yield entries[name]
+
+ def run_script(self, requires, script_name):
+ """Locate distribution for `requires` and run `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ self.require(requires)[0].run_script(script_name, ns)
+
+
+
+ def __iter__(self):
+ """Yield distributions for non-duplicate projects in the working set
+
+ The yield order is the order in which the items' path entries were
+ added to the working set.
+ """
+ seen = {}
+ for item in self.entries:
+ for key in self.entry_keys[item]:
+ if key not in seen:
+ seen[key]=1
+ yield self.by_key[key]
+
+ def add(self, dist, entry=None, insert=True):
+ """Add `dist` to working set, associated with `entry`
+
+ If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+ On exit from this routine, `entry` is added to the end of the working
+ set's ``.entries`` (if it wasn't already present).
+
+ `dist` is only added to the working set if it's for a project that
+ doesn't already have a distribution in the set. If it's added, any
+ callbacks registered with the ``subscribe()`` method will be called.
+ """
+ if insert:
+ dist.insert_on(self.entries, entry)
+
+ if entry is None:
+ entry = dist.location
+ keys = self.entry_keys.setdefault(entry,[])
+ keys2 = self.entry_keys.setdefault(dist.location,[])
+ if dist.key in self.by_key:
+ return # ignore hidden distros
+
+ self.by_key[dist.key] = dist
+ if dist.key not in keys:
+ keys.append(dist.key)
+ if dist.key not in keys2:
+ keys2.append(dist.key)
+ self._added_new(dist)
+
+ def resolve(self, requirements, env=None, installer=None, replacement=True):
+ """List all distributions needed to (recursively) meet `requirements`
+
+ `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ if supplied, should be an ``Environment`` instance. If
+ not supplied, it defaults to all distributions available within any
+ entry or distribution in the working set. `installer`, if supplied,
+ will be invoked with each requirement that cannot be met by an
+ already-installed distribution; it should return a ``Distribution`` or
+ ``None``.
+ """
+
+ requirements = list(requirements)[::-1] # set up the stack
+ processed = {} # set of processed requirements
+ best = {} # key -> dist
+ to_activate = []
+
+ while requirements:
+ req = requirements.pop(0) # process dependencies breadth-first
+ if _override_setuptools(req) and replacement:
+ req = Requirement.parse('distribute')
+
+ if req in processed:
+ # Ignore cyclic or redundant dependencies
+ continue
+ dist = best.get(req.key)
+ if dist is None:
+ # Find the best distribution and add it to the map
+ dist = self.by_key.get(req.key)
+ if dist is None:
+ if env is None:
+ env = Environment(self.entries)
+ dist = best[req.key] = env.best_match(req, self, installer)
+ if dist is None:
+ #msg = ("The '%s' distribution was not found on this "
+ # "system, and is required by this application.")
+ #raise DistributionNotFound(msg % req)
+
+ # unfortunately, zc.buildout uses a str(err)
+ # to get the name of the distribution here..
+ raise DistributionNotFound(req)
+ to_activate.append(dist)
+ if dist not in req:
+ # Oops, the "best" so far conflicts with a dependency
+ raise VersionConflict(dist,req) # XXX put more info here
+ requirements.extend(dist.requires(req.extras)[::-1])
+ processed[req] = True
+
+ return to_activate # return list of distros to activate
+
+ def find_plugins(self,
+ plugin_env, full_env=None, installer=None, fallback=True
+ ):
+ """Find all activatable distributions in `plugin_env`
+
+ Example usage::
+
+ distributions, errors = working_set.find_plugins(
+ Environment(plugin_dirlist)
+ )
+ map(working_set.add, distributions) # add plugins+libs to sys.path
+ print 'Could not load', errors # display errors
+
+ The `plugin_env` should be an ``Environment`` instance that contains
+ only distributions that are in the project's "plugin directory" or
+ directories. The `full_env`, if supplied, should be an ``Environment``
+ contains all currently-available distributions. If `full_env` is not
+ supplied, one is created automatically from the ``WorkingSet`` this
+ method is called on, which will typically mean that every directory on
+ ``sys.path`` will be scanned for distributions.
+
+ `installer` is a standard installer callback as used by the
+ ``resolve()`` method. The `fallback` flag indicates whether we should
+ attempt to resolve older versions of a plugin if the newest version
+ cannot be resolved.
+
+ This method returns a 2-tuple: (`distributions`, `error_info`), where
+ `distributions` is a list of the distributions found in `plugin_env`
+ that were loadable, along with any other distributions that are needed
+ to resolve their dependencies. `error_info` is a dictionary mapping
+ unloadable plugin distributions to an exception instance describing the
+ error that occurred. Usually this will be a ``DistributionNotFound`` or
+ ``VersionConflict`` instance.
+ """
+
+ plugin_projects = list(plugin_env)
+ plugin_projects.sort() # scan project names in alphabetic order
+
+ error_info = {}
+ distributions = {}
+
+ if full_env is None:
+ env = Environment(self.entries)
+ env += plugin_env
+ else:
+ env = full_env + plugin_env
+
+ shadow_set = self.__class__([])
+ map(shadow_set.add, self) # put all our entries in shadow_set
+
+ for project_name in plugin_projects:
+
+ for dist in plugin_env[project_name]:
+
+ req = [dist.as_requirement()]
+
+ try:
+ resolvees = shadow_set.resolve(req, env, installer)
+
+ except ResolutionError,v:
+ error_info[dist] = v # save error info
+ if fallback:
+ continue # try the next older version of project
+ else:
+ break # give up on this project, keep going
+
+ else:
+ map(shadow_set.add, resolvees)
+ distributions.update(dict.fromkeys(resolvees))
+
+ # success, no need to try any more versions of this project
+ break
+
+ distributions = list(distributions)
+ distributions.sort()
+
+ return distributions, error_info
+
+
+
+
+
+ def require(self, *requirements):
+ """Ensure that distributions matching `requirements` are activated
+
+ `requirements` must be a string or a (possibly-nested) sequence
+ thereof, specifying the distributions and versions required. The
+ return value is a sequence of the distributions that needed to be
+ activated to fulfill the requirements; all relevant distributions are
+ included, even if they were already activated in this working set.
+ """
+
+ needed = self.resolve(parse_requirements(requirements))
+
+ for dist in needed:
+ self.add(dist)
+
+ return needed
+
+
+ def subscribe(self, callback):
+ """Invoke `callback` for all distributions (including existing ones)"""
+ if callback in self.callbacks:
+ return
+ self.callbacks.append(callback)
+ for dist in self:
+ callback(dist)
+
+
+ def _added_new(self, dist):
+ for callback in self.callbacks:
+ callback(dist)
+
+
+
+
+
+
+
+
+
+
+
+class Environment(object):
+ """Searchable snapshot of distributions on a search path"""
+
+ def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
+ """Snapshot distributions available on a search path
+
+ Any distributions found on `search_path` are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used.
+
+ `platform` is an optional string specifying the name of the platform
+ that platform-specific distributions must be compatible with. If
+ unspecified, it defaults to the current platform. `python` is an
+ optional string naming the desired version of Python (e.g. ``'2.4'``);
+ it defaults to the current version.
+
+ You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ wish to map *all* distributions, not just those compatible with the
+ running platform or Python version.
+ """
+ self._distmap = {}
+ self._cache = {}
+ self.platform = platform
+ self.python = python
+ self.scan(search_path)
+
+ def can_add(self, dist):
+ """Is distribution `dist` acceptable for this environment?
+
+ The distribution must match the platform and python version
+ requirements specified when this environment was created, or False
+ is returned.
+ """
+ return (self.python is None or dist.py_version is None
+ or dist.py_version==self.python) \
+ and compatible_platforms(dist.platform,self.platform)
+
+ def remove(self, dist):
+ """Remove `dist` from the environment"""
+ self._distmap[dist.key].remove(dist)
+
+ def scan(self, search_path=None):
+ """Scan `search_path` for distributions usable in this environment
+
+ Any distributions found are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used. Only distributions conforming to
+ the platform/python version defined at initialization are added.
+ """
+ if search_path is None:
+ search_path = sys.path
+
+ for item in search_path:
+ for dist in find_distributions(item):
+ self.add(dist)
+
+ def __getitem__(self,project_name):
+ """Return a newest-to-oldest list of distributions for `project_name`
+ """
+ try:
+ return self._cache[project_name]
+ except KeyError:
+ project_name = project_name.lower()
+ if project_name not in self._distmap:
+ return []
+
+ if project_name not in self._cache:
+ dists = self._cache[project_name] = self._distmap[project_name]
+ _sort_dists(dists)
+
+ return self._cache[project_name]
+
+ def add(self,dist):
+ """Add `dist` if we ``can_add()`` it and it isn't already added"""
+ if self.can_add(dist) and dist.has_version():
+ dists = self._distmap.setdefault(dist.key,[])
+ if dist not in dists:
+ dists.append(dist)
+ if dist.key in self._cache:
+ _sort_dists(self._cache[dist.key])
+
+
+ def best_match(self, req, working_set, installer=None):
+ """Find distribution best matching `req` and usable on `working_set`
+
+ This calls the ``find(req)`` method of the `working_set` to see if a
+ suitable distribution is already active. (This may raise
+ ``VersionConflict`` if an unsuitable version of the project is already
+ active in the specified `working_set`.) If a suitable distribution
+ isn't active, this method returns the newest distribution in the
+ environment that meets the ``Requirement`` in `req`. If no suitable
+ distribution is found, and `installer` is supplied, then the result of
+ calling the environment's ``obtain(req, installer)`` method will be
+ returned.
+ """
+ dist = working_set.find(req)
+ if dist is not None:
+ return dist
+ for dist in self[req.key]:
+ if dist in req:
+ return dist
+ return self.obtain(req, installer) # try and download/install
+
+ def obtain(self, requirement, installer=None):
+ """Obtain a distribution matching `requirement` (e.g. via download)
+
+ Obtain a distro that matches requirement (e.g. via download). In the
+ base ``Environment`` class, this routine just returns
+ ``installer(requirement)``, unless `installer` is None, in which case
+ None is returned instead. This method is a hook that allows subclasses
+ to attempt other ways of obtaining a distribution before falling back
+ to the `installer` argument."""
+ if installer is not None:
+ return installer(requirement)
+
+ def __iter__(self):
+ """Yield the unique project names of the available distributions"""
+ for key in self._distmap.keys():
+ if self[key]: yield key
+
+
+
+
+ def __iadd__(self, other):
+ """In-place addition of a distribution or environment"""
+ if isinstance(other,Distribution):
+ self.add(other)
+ elif isinstance(other,Environment):
+ for project in other:
+ for dist in other[project]:
+ self.add(dist)
+ else:
+ raise TypeError("Can't add %r to environment" % (other,))
+ return self
+
+ def __add__(self, other):
+ """Add an environment or distribution to an environment"""
+ new = self.__class__([], platform=None, python=None)
+ for env in self, other:
+ new += env
+ return new
+
+
+AvailableDistributions = Environment # XXX backward compatibility
+
+
+class ExtractionError(RuntimeError):
+ """An error occurred extracting a resource
+
+ The following attributes are available from instances of this exception:
+
+ manager
+ The resource manager that raised this exception
+
+ cache_path
+ The base directory for resource extraction
+
+ original_error
+ The exception instance that caused extraction to fail
+ """
+
+
+
+
+class ResourceManager:
+ """Manage resource extraction and packages"""
+ extraction_path = None
+
+ def __init__(self):
+ self.cached_files = {}
+
+ def resource_exists(self, package_or_requirement, resource_name):
+ """Does the named resource exist?"""
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+ def resource_isdir(self, package_or_requirement, resource_name):
+ """Is the named resource an existing directory?"""
+ return get_provider(package_or_requirement).resource_isdir(
+ resource_name
+ )
+
+ def resource_filename(self, package_or_requirement, resource_name):
+ """Return a true filesystem path for specified resource"""
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name
+ )
+
+ def resource_stream(self, package_or_requirement, resource_name):
+ """Return a readable file-like object for specified resource"""
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name
+ )
+
+ def resource_string(self, package_or_requirement, resource_name):
+ """Return specified resource as a string"""
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name
+ )
+
+ def resource_listdir(self, package_or_requirement, resource_name):
+ """List the contents of the named resource directory"""
+ return get_provider(package_or_requirement).resource_listdir(
+ resource_name
+ )
+
+ def extraction_error(self):
+ """Give an error message for problems extracting file(s)"""
+
+ old_exc = sys.exc_info()[1]
+ cache_path = self.extraction_path or get_default_cache()
+
+ err = ExtractionError("""Can't extract file(s) to egg cache
+
+The following error occurred while trying to extract file(s) to the Python egg
+cache:
+
+ %s
+
+The Python egg cache directory is currently set to:
+
+ %s
+
+Perhaps your account does not have write access to this directory? You can
+change the cache directory by setting the PYTHON_EGG_CACHE environment
+variable to point to an accessible directory.
+""" % (old_exc, cache_path)
+ )
+ err.manager = self
+ err.cache_path = cache_path
+ err.original_error = old_exc
+ raise err
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def get_cache_path(self, archive_name, names=()):
+ """Return absolute location in cache for `archive_name` and `names`
+
+ The parent directory of the resulting path will be created if it does
+ not already exist. `archive_name` should be the base filename of the
+ enclosing egg (which may not be the name of the enclosing zipfile!),
+ including its ".egg" extension. `names`, if provided, should be a
+ sequence of path name parts "under" the egg's extraction location.
+
+ This method should only be called by resource providers that need to
+ obtain an extraction location, and only for names they intend to
+ extract, as it tracks the generated names for possible cleanup later.
+ """
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+ try:
+ _bypass_ensure_directory(target_path)
+ except:
+ self.extraction_error()
+
+ self.cached_files[target_path] = 1
+ return target_path
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def postprocess(self, tempname, filename):
+ """Perform any platform-specific postprocessing of `tempname`
+
+ This is where Mac header rewrites should be done; other platforms don't
+ have anything special they should do.
+
+ Resource providers should call this method ONLY after successfully
+ extracting a compressed resource. They must NOT call it on resources
+ that are already in the filesystem.
+
+ `tempname` is the current (temporary) name of the file, and `filename`
+ is the name it will be renamed to by the caller after this routine
+ returns.
+ """
+
+ if os.name == 'posix':
+ # Make the resource executable
+ mode = ((os.stat(tempname).st_mode) | 0555) & 07777
+ os.chmod(tempname, mode)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def set_extraction_path(self, path):
+ """Set the base path where resources will be extracted to, if needed.
+
+ If you do not call this routine before any extractions take place, the
+ path defaults to the return value of ``get_default_cache()``. (Which
+ is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+ platform-specific fallbacks. See that routine's documentation for more
+ details.)
+
+ Resources are extracted to subdirectories of this path based upon
+ information given by the ``IResourceProvider``. You may set this to a
+ temporary directory, but then you must call ``cleanup_resources()`` to
+ delete the extracted files when done. There is no guarantee that
+ ``cleanup_resources()`` will be able to remove all extracted files.
+
+ (Note: you may not change the extraction path for a given resource
+ manager once resources have been extracted, unless you first call
+ ``cleanup_resources()``.)
+ """
+ if self.cached_files:
+ raise ValueError(
+ "Can't change extraction path, files already extracted"
+ )
+
+ self.extraction_path = path
+
+ def cleanup_resources(self, force=False):
+ """
+ Delete all extracted resource files and directories, returning a list
+ of the file and directory names that could not be successfully removed.
+ This function does not have any concurrency protection, so it should
+ generally only be called when the extraction path is a temporary
+ directory exclusive to a single process. This method is not
+ automatically called; you must call it explicitly or register it as an
+ ``atexit`` function if you wish to ensure cleanup of a temporary
+ directory used for extractions.
+ """
+ # XXX
+
+
+
+def get_default_cache():
+ """Determine the default cache location
+
+ This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
+ Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
+ "Application Data" directory. On all other systems, it's "~/.python-eggs".
+ """
+ try:
+ return os.environ['PYTHON_EGG_CACHE']
+ except KeyError:
+ pass
+
+ if os.name!='nt':
+ return os.path.expanduser('~/.python-eggs')
+
+ app_data = 'Application Data' # XXX this may be locale-specific!
+ app_homes = [
+ (('APPDATA',), None), # best option, should be locale-safe
+ (('USERPROFILE',), app_data),
+ (('HOMEDRIVE','HOMEPATH'), app_data),
+ (('HOMEPATH',), app_data),
+ (('HOME',), None),
+ (('WINDIR',), app_data), # 95/98/ME
+ ]
+
+ for keys, subdir in app_homes:
+ dirname = ''
+ for key in keys:
+ if key in os.environ:
+ dirname = os.path.join(dirname, os.environ[key])
+ else:
+ break
+ else:
+ if subdir:
+ dirname = os.path.join(dirname,subdir)
+ return os.path.join(dirname, 'Python-Eggs')
+ else:
+ raise RuntimeError(
+ "Please set the PYTHON_EGG_CACHE enviroment variable"
+ )
+
+def safe_name(name):
+ """Convert an arbitrary string to a standard distribution name
+
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+ """Convert an arbitrary string to a standard version string
+
+ Spaces become dots, and all other non-alphanumeric characters become
+ dashes, with runs of multiple dashes condensed to a single dash.
+ """
+ version = version.replace(' ','.')
+ return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+ """Convert an arbitrary string to a standard 'extra' name
+
+ Any runs of non-alphanumeric characters are replaced with a single '_',
+ and the result is always lowercased.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
+
+
+def to_filename(name):
+ """Convert a project or version name to its filename-escaped form
+
+ Any '-' characters are currently replaced with '_'.
+ """
+ return name.replace('-','_')
+
+
+
+
+
+
+
+
+class NullProvider:
+ """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+ egg_name = None
+ egg_info = None
+ loader = None
+
+ def __init__(self, module):
+ self.loader = getattr(module, '__loader__', None)
+ self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+ def get_resource_filename(self, manager, resource_name):
+ return self._fn(self.module_path, resource_name)
+
+ def get_resource_stream(self, manager, resource_name):
+ return StringIO(self.get_resource_string(manager, resource_name))
+
+ def get_resource_string(self, manager, resource_name):
+ return self._get(self._fn(self.module_path, resource_name))
+
+ def has_resource(self, resource_name):
+ return self._has(self._fn(self.module_path, resource_name))
+
+ def has_metadata(self, name):
+ return self.egg_info and self._has(self._fn(self.egg_info,name))
+
+ if sys.version_info <= (3,):
+ def get_metadata(self, name):
+ if not self.egg_info:
+ return ""
+ return self._get(self._fn(self.egg_info,name))
+ else:
+ def get_metadata(self, name):
+ if not self.egg_info:
+ return ""
+ return self._get(self._fn(self.egg_info,name)).decode("utf-8")
+
+ def get_metadata_lines(self, name):
+ return yield_lines(self.get_metadata(name))
+
+ def resource_isdir(self,resource_name):
+ return self._isdir(self._fn(self.module_path, resource_name))
+
+ def metadata_isdir(self,name):
+ return self.egg_info and self._isdir(self._fn(self.egg_info,name))
+
+
+ def resource_listdir(self,resource_name):
+ return self._listdir(self._fn(self.module_path,resource_name))
+
+ def metadata_listdir(self,name):
+ if self.egg_info:
+ return self._listdir(self._fn(self.egg_info,name))
+ return []
+
+ def run_script(self,script_name,namespace):
+ script = 'scripts/'+script_name
+ if not self.has_metadata(script):
+ raise ResolutionError("No script named %r" % script_name)
+ script_text = self.get_metadata(script).replace('\r\n','\n')
+ script_text = script_text.replace('\r','\n')
+ script_filename = self._fn(self.egg_info,script)
+ namespace['__file__'] = script_filename
+ if os.path.exists(script_filename):
+ execfile(script_filename, namespace, namespace)
+ else:
+ from linecache import cache
+ cache[script_filename] = (
+ len(script_text), 0, script_text.split('\n'), script_filename
+ )
+ script_code = compile(script_text,script_filename,'exec')
+ exec script_code in namespace, namespace
+
+ def _has(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _isdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _listdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _fn(self, base, resource_name):
+ if resource_name:
+ return os.path.join(base, *resource_name.split('/'))
+ return base
+
+ def _get(self, path):
+ if hasattr(self.loader, 'get_data'):
+ return self.loader.get_data(path)
+ raise NotImplementedError(
+ "Can't perform this operation for loaders without 'get_data()'"
+ )
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+ """Provider based on a virtual filesystem"""
+
+ def __init__(self,module):
+ NullProvider.__init__(self,module)
+ self._setup_prefix()
+
+ def _setup_prefix(self):
+ # we assume here that our metadata may be nested inside a "basket"
+ # of multiple eggs; that's why we use module_path instead of .archive
+ path = self.module_path
+ old = None
+ while path!=old:
+ if path.lower().endswith('.egg'):
+ self.egg_name = os.path.basename(path)
+ self.egg_info = os.path.join(path, 'EGG-INFO')
+ self.egg_root = path
+ break
+ old = path
+ path, base = os.path.split(path)
+
+
+
+
+
+
+class DefaultProvider(EggProvider):
+ """Provides access to package resources in the filesystem"""
+
+ def _has(self, path):
+ return os.path.exists(path)
+
+ def _isdir(self,path):
+ return os.path.isdir(path)
+
+ def _listdir(self,path):
+ return os.listdir(path)
+
+ def get_resource_stream(self, manager, resource_name):
+ return open(self._fn(self.module_path, resource_name), 'rb')
+
+ def _get(self, path):
+ stream = open(path, 'rb')
+ try:
+ return stream.read()
+ finally:
+ stream.close()
+
+register_loader_type(type(None), DefaultProvider)
+
+
+class EmptyProvider(NullProvider):
+ """Provider that returns nothing for all requests"""
+
+ _isdir = _has = lambda self,path: False
+ _get = lambda self,path: ''
+ _listdir = lambda self,path: []
+ module_path = None
+
+ def __init__(self):
+ pass
+
+empty_provider = EmptyProvider()
+
+
+
+
+class ZipProvider(EggProvider):
+ """Resource support for zips and eggs"""
+
+ eagers = None
+
+ def __init__(self, module):
+ EggProvider.__init__(self,module)
+ self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
+ self.zip_pre = self.loader.archive+os.sep
+
+ def _zipinfo_name(self, fspath):
+ # Convert a virtual filename (full path to file) into a zipfile subpath
+ # usable with the zipimport directory cache for our target archive
+ if fspath.startswith(self.zip_pre):
+ return fspath[len(self.zip_pre):]
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath,self.zip_pre)
+ )
+
+ def _parts(self,zip_path):
+ # Convert a zipfile subpath into an egg-relative path part list
+ fspath = self.zip_pre+zip_path # pseudo-fs path
+ if fspath.startswith(self.egg_root+os.sep):
+ return fspath[len(self.egg_root)+1:].split(os.sep)
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath,self.egg_root)
+ )
+
+ def get_resource_filename(self, manager, resource_name):
+ if not self.egg_name:
+ raise NotImplementedError(
+ "resource_filename() only supported for .egg, not .zip"
+ )
+ # no need to lock for extraction, since we use temp names
+ zip_path = self._resource_to_zip(resource_name)
+ eagers = self._get_eager_resources()
+ if '/'.join(self._parts(zip_path)) in eagers:
+ for name in eagers:
+ self._extract_resource(manager, self._eager_to_zip(name))
+ return self._extract_resource(manager, zip_path)
+
+ def _extract_resource(self, manager, zip_path):
+
+ if zip_path in self._index():
+ for name in self._index()[zip_path]:
+ last = self._extract_resource(
+ manager, os.path.join(zip_path, name)
+ )
+ return os.path.dirname(last) # return the extracted directory name
+
+ zip_stat = self.zipinfo[zip_path]
+ t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
+ date_time = (
+ (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
+ (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
+ )
+ timestamp = time.mktime(date_time)
+
+ try:
+ if not WRITE_SUPPORT:
+ raise IOError('"os.rename" and "os.unlink" are not supported '
+ 'on this platform')
+
+ real_path = manager.get_cache_path(
+ self.egg_name, self._parts(zip_path)
+ )
+
+ if os.path.isfile(real_path):
+ stat = os.stat(real_path)
+ if stat.st_size==size and stat.st_mtime==timestamp:
+ # size and stamp match, don't bother extracting
+ return real_path
+
+ outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
+ os.write(outf, self.loader.get_data(zip_path))
+ os.close(outf)
+ utime(tmpnam, (timestamp,timestamp))
+ manager.postprocess(tmpnam, real_path)
+
+ try:
+ rename(tmpnam, real_path)
+
+ except os.error:
+ if os.path.isfile(real_path):
+ stat = os.stat(real_path)
+
+ if stat.st_size==size and stat.st_mtime==timestamp:
+ # size and stamp match, somebody did it just ahead of
+ # us, so we're done
+ return real_path
+ elif os.name=='nt': # Windows, del old file and retry
+ unlink(real_path)
+ rename(tmpnam, real_path)
+ return real_path
+ raise
+
+ except os.error:
+ manager.extraction_error() # report a user-friendly error
+
+ return real_path
+
+ def _get_eager_resources(self):
+ if self.eagers is None:
+ eagers = []
+ for name in ('native_libs.txt', 'eager_resources.txt'):
+ if self.has_metadata(name):
+ eagers.extend(self.get_metadata_lines(name))
+ self.eagers = eagers
+ return self.eagers
+
+ def _index(self):
+ try:
+ return self._dirindex
+ except AttributeError:
+ ind = {}
+ for path in self.zipinfo:
+ parts = path.split(os.sep)
+ while parts:
+ parent = os.sep.join(parts[:-1])
+ if parent in ind:
+ ind[parent].append(parts[-1])
+ break
+ else:
+ ind[parent] = [parts.pop()]
+ self._dirindex = ind
+ return ind
+
+ def _has(self, fspath):
+ zip_path = self._zipinfo_name(fspath)
+ return zip_path in self.zipinfo or zip_path in self._index()
+
+ def _isdir(self,fspath):
+ return self._zipinfo_name(fspath) in self._index()
+
+ def _listdir(self,fspath):
+ return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+ def _eager_to_zip(self,resource_name):
+ return self._zipinfo_name(self._fn(self.egg_root,resource_name))
+
+ def _resource_to_zip(self,resource_name):
+ return self._zipinfo_name(self._fn(self.module_path,resource_name))
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class FileMetadata(EmptyProvider):
+ """Metadata handler for standalone PKG-INFO files
+
+ Usage::
+
+ metadata = FileMetadata("/path/to/PKG-INFO")
+
+ This provider rejects all data and metadata requests except for PKG-INFO,
+ which is treated as existing, and will be the contents of the file at
+ the provided location.
+ """
+
+ def __init__(self,path):
+ self.path = path
+
+ def has_metadata(self,name):
+ return name=='PKG-INFO'
+
+ def get_metadata(self,name):
+ if name=='PKG-INFO':
+ f = open(self.path,'rU')
+ metadata = f.read()
+ f.close()
+ return metadata
+ raise KeyError("No metadata except PKG-INFO is available")
+
+ def get_metadata_lines(self,name):
+ return yield_lines(self.get_metadata(name))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class PathMetadata(DefaultProvider):
+ """Metadata provider for egg directories
+
+ Usage::
+
+ # Development eggs:
+
+ egg_info = "/path/to/PackageName.egg-info"
+ base_dir = os.path.dirname(egg_info)
+ metadata = PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
+
+ # Unpacked egg directories:
+
+ egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+ dist = Distribution.from_filename(egg_path, metadata=metadata)
+ """
+
+ def __init__(self, path, egg_info):
+ self.module_path = path
+ self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+ """Metadata provider for .egg files"""
+
+ def __init__(self, importer):
+ """Create a metadata provider from a zipimporter"""
+
+ self.zipinfo = zipimport._zip_directory_cache[importer.archive]
+ self.zip_pre = importer.archive+os.sep
+ self.loader = importer
+ if importer.prefix:
+ self.module_path = os.path.join(importer.archive, importer.prefix)
+ else:
+ self.module_path = importer.archive
+ self._setup_prefix()
+
+
+class ImpWrapper:
+ """PEP 302 Importer that wraps Python's "normal" import algorithm"""
+
+ def __init__(self, path=None):
+ self.path = path
+
+ def find_module(self, fullname, path=None):
+ subname = fullname.split(".")[-1]
+ if subname != fullname and self.path is None:
+ return None
+ if self.path is None:
+ path = None
+ else:
+ path = [self.path]
+ try:
+ file, filename, etc = imp.find_module(subname, path)
+ except ImportError:
+ return None
+ return ImpLoader(file, filename, etc)
+
+
+class ImpLoader:
+ """PEP 302 Loader that wraps Python's "normal" import algorithm"""
+
+ def __init__(self, file, filename, etc):
+ self.file = file
+ self.filename = filename
+ self.etc = etc
+
+ def load_module(self, fullname):
+ try:
+ mod = imp.load_module(fullname, self.file, self.filename, self.etc)
+ finally:
+ if self.file: self.file.close()
+ # Note: we don't set __loader__ because we want the module to look
+ # normal; i.e. this is just a wrapper for standard import machinery
+ return mod
+
+
+
+
+def get_importer(path_item):
+ """Retrieve a PEP 302 "importer" for the given path item
+
+ If there is no importer, this returns a wrapper around the builtin import
+ machinery. The returned importer is only cached if it was created by a
+ path hook.
+ """
+ try:
+ importer = sys.path_importer_cache[path_item]
+ except KeyError:
+ for hook in sys.path_hooks:
+ try:
+ importer = hook(path_item)
+ except ImportError:
+ pass
+ else:
+ break
+ else:
+ importer = None
+
+ sys.path_importer_cache.setdefault(path_item,importer)
+ if importer is None:
+ try:
+ importer = ImpWrapper(path_item)
+ except ImportError:
+ pass
+ return importer
+
+try:
+ from pkgutil import get_importer, ImpImporter
+except ImportError:
+ pass # Python 2.3 or 2.4, use our own implementation
+else:
+ ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation
+ del ImpLoader, ImpImporter
+
+
+
+
+
+
+_distribution_finders = {}
+
+def register_finder(importer_type, distribution_finder):
+ """Register `distribution_finder` to find distributions in sys.path items
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `distribution_finder` is a callable that, passed a path
+ item and the importer instance, yields ``Distribution`` instances found on
+ that path item. See ``pkg_resources.find_on_path`` for an example."""
+ _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+ """Yield distributions accessible via `path_item`"""
+ importer = get_importer(path_item)
+ finder = _find_adapter(_distribution_finders, importer)
+ return finder(importer, path_item, only)
+
+def find_in_zip(importer, path_item, only=False):
+ metadata = EggMetadata(importer)
+ if metadata.has_metadata('PKG-INFO'):
+ yield Distribution.from_filename(path_item, metadata=metadata)
+ if only:
+ return # don't yield nested distros
+ for subitem in metadata.resource_listdir('/'):
+ if subitem.endswith('.egg'):
+ subpath = os.path.join(path_item, subitem)
+ for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
+ yield dist
+
+register_finder(zipimport.zipimporter, find_in_zip)
+
+def StringIO(*args, **kw):
+ """Thunk to load the real StringIO on demand"""
+ global StringIO
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+ return StringIO(*args,**kw)
+
+def find_nothing(importer, path_item, only=False):
+ return ()
+register_finder(object,find_nothing)
+
+def find_on_path(importer, path_item, only=False):
+ """Yield distributions accessible on a sys.path directory"""
+ path_item = _normalize_cached(path_item)
+
+ if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
+ if path_item.lower().endswith('.egg'):
+ # unpacked egg
+ yield Distribution.from_filename(
+ path_item, metadata=PathMetadata(
+ path_item, os.path.join(path_item,'EGG-INFO')
+ )
+ )
+ else:
+ # scan for .egg and .egg-info in directory
+ for entry in os.listdir(path_item):
+ lower = entry.lower()
+ if lower.endswith('.egg-info'):
+ fullpath = os.path.join(path_item, entry)
+ if os.path.isdir(fullpath):
+ # egg-info directory, allow getting metadata
+ metadata = PathMetadata(path_item, fullpath)
+ else:
+ metadata = FileMetadata(fullpath)
+ yield Distribution.from_location(
+ path_item,entry,metadata,precedence=DEVELOP_DIST
+ )
+ elif not only and lower.endswith('.egg'):
+ for dist in find_distributions(os.path.join(path_item, entry)):
+ yield dist
+ elif not only and lower.endswith('.egg-link'):
+ for line in open(os.path.join(path_item, entry)):
+ if not line.strip(): continue
+ for item in find_distributions(os.path.join(path_item,line.rstrip())):
+ yield item
+ break
+register_finder(ImpWrapper,find_on_path)
+
+_namespace_handlers = {}
+_namespace_packages = {}
+
+def register_namespace_handler(importer_type, namespace_handler):
+ """Register `namespace_handler` to declare namespace packages
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `namespace_handler` is a callable like this::
+
+ def namespace_handler(importer,path_entry,moduleName,module):
+ # return a path_entry to use for child packages
+
+ Namespace handlers are only called if the importer object has already
+ agreed that it can handle the relevant path item, and they should only
+ return a subpath if the module __path__ does not already contain an
+ equivalent subpath. For an example namespace handler, see
+ ``pkg_resources.file_ns_handler``.
+ """
+ _namespace_handlers[importer_type] = namespace_handler
+
+def _handle_ns(packageName, path_item):
+ """Ensure that named package includes a subpath of path_item (if needed)"""
+ importer = get_importer(path_item)
+ if importer is None:
+ return None
+ loader = importer.find_module(packageName)
+ if loader is None:
+ return None
+ module = sys.modules.get(packageName)
+ if module is None:
+ module = sys.modules[packageName] = types.ModuleType(packageName)
+ module.__path__ = []; _set_parent_ns(packageName)
+ elif not hasattr(module,'__path__'):
+ raise TypeError("Not a package:", packageName)
+ handler = _find_adapter(_namespace_handlers, importer)
+ subpath = handler(importer,path_item,packageName,module)
+ if subpath is not None:
+ path = module.__path__; path.append(subpath)
+ loader.load_module(packageName); module.__path__ = path
+ return subpath
+
+def declare_namespace(packageName):
+ """Declare that package 'packageName' is a namespace package"""
+
+ imp.acquire_lock()
+ try:
+ if packageName in _namespace_packages:
+ return
+
+ path, parent = sys.path, None
+ if '.' in packageName:
+ parent = '.'.join(packageName.split('.')[:-1])
+ declare_namespace(parent)
+ __import__(parent)
+ try:
+ path = sys.modules[parent].__path__
+ except AttributeError:
+ raise TypeError("Not a package:", parent)
+
+ # Track what packages are namespaces, so when new path items are added,
+ # they can be updated
+ _namespace_packages.setdefault(parent,[]).append(packageName)
+ _namespace_packages.setdefault(packageName,[])
+
+ for path_item in path:
+ # Ensure all the parent's path items are reflected in the child,
+ # if they apply
+ _handle_ns(packageName, path_item)
+
+ finally:
+ imp.release_lock()
+
+def fixup_namespace_packages(path_item, parent=None):
+ """Ensure that previously-declared namespace packages include path_item"""
+ imp.acquire_lock()
+ try:
+ for package in _namespace_packages.get(parent,()):
+ subpath = _handle_ns(package, path_item)
+ if subpath: fixup_namespace_packages(subpath,package)
+ finally:
+ imp.release_lock()
+
+def file_ns_handler(importer, path_item, packageName, module):
+ """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+ subpath = os.path.join(path_item, packageName.split('.')[-1])
+ normalized = _normalize_cached(subpath)
+ for item in module.__path__:
+ if _normalize_cached(item)==normalized:
+ break
+ else:
+ # Only return the path if it's not already there
+ return subpath
+
+register_namespace_handler(ImpWrapper,file_ns_handler)
+register_namespace_handler(zipimport.zipimporter,file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+ return None
+
+register_namespace_handler(object,null_ns_handler)
+
+
+def normalize_path(filename):
+ """Normalize a file/dir name for comparison purposes"""
+ return os.path.normcase(os.path.realpath(filename))
+
+def _normalize_cached(filename,_cache={}):
+ try:
+ return _cache[filename]
+ except KeyError:
+ _cache[filename] = result = normalize_path(filename)
+ return result
+
+def _set_parent_ns(packageName):
+ parts = packageName.split('.')
+ name = parts.pop()
+ if parts:
+ parent = '.'.join(parts)
+ setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+ """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
+ if isinstance(strs,basestring):
+ for s in strs.splitlines():
+ s = s.strip()
+ if s and not s.startswith('#'): # skip blank lines/comments
+ yield s
+ else:
+ for ss in strs:
+ for s in yield_lines(ss):
+ yield s
+
+LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
+CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
+DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
+VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
+COMMA = re.compile(r"\s*,").match # comma between items
+OBRACKET = re.compile(r"\s*\[").match
+CBRACKET = re.compile(r"\s*\]").match
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+ r"(?P<name>[^-]+)"
+ r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
+ re.VERBOSE | re.IGNORECASE
+).match
+
+component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
+replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
+
+def _parse_version_parts(s):
+ for part in component_re.split(s):
+ part = replace(part,part)
+ if not part or part=='.':
+ continue
+ if part[:1] in '0123456789':
+ yield part.zfill(8) # pad for numeric comparison
+ else:
+ yield '*'+part
+
+ yield '*final' # ensure that alpha/beta/candidate are before final
+
+def parse_version(s):
+ """Convert a version string to a chronologically-sortable key
+
+ This is a rough cross between distutils' StrictVersion and LooseVersion;
+ if you give it versions that would work with StrictVersion, then it behaves
+ the same; otherwise it acts like a slightly-smarter LooseVersion. It is
+ *possible* to create pathological version coding schemes that will fool
+ this parser, but they should be very rare in practice.
+
+ The returned value will be a tuple of strings. Numeric portions of the
+ version are padded to 8 digits so they will compare numerically, but
+ without relying on how numbers compare relative to strings. Dots are
+ dropped, but dashes are retained. Trailing zeros between alpha segments
+ or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
+ "2.4". Alphanumeric parts are lower-cased.
+
+ The algorithm assumes that strings like "-" and any alpha string that
+ alphabetically follows "final" represents a "patch level". So, "2.4-1"
+ is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
+ considered newer than "2.4-1", which in turn is newer than "2.4".
+
+ Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
+ come before "final" alphabetically) are assumed to be pre-release versions,
+ so that the version "2.4" is considered newer than "2.4a1".
+
+ Finally, to handle miscellaneous cases, the strings "pre", "preview", and
+ "rc" are treated as if they were "c", i.e. as though they were release
+ candidates, and therefore are not as new as a version string that does not
+ contain them, and "dev" is replaced with an '@' so that it sorts lower than
+ than any other pre-release tag.
+ """
+ parts = []
+ for part in _parse_version_parts(s.lower()):
+ if part.startswith('*'):
+ if part<'*final': # remove '-' before a prerelease tag
+ while parts and parts[-1]=='*final-': parts.pop()
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1]=='00000000':
+ parts.pop()
+ parts.append(part)
+ return tuple(parts)
+
+class EntryPoint(object):
+ """Object representing an advertised importable object"""
+
+ def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+ if not MODULE(module_name):
+ raise ValueError("Invalid module name", module_name)
+ self.name = name
+ self.module_name = module_name
+ self.attrs = tuple(attrs)
+ self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
+ self.dist = dist
+
+ def __str__(self):
+ s = "%s = %s" % (self.name, self.module_name)
+ if self.attrs:
+ s += ':' + '.'.join(self.attrs)
+ if self.extras:
+ s += ' [%s]' % ','.join(self.extras)
+ return s
+
+ def __repr__(self):
+ return "EntryPoint.parse(%r)" % str(self)
+
+ def load(self, require=True, env=None, installer=None):
+ if require: self.require(env, installer)
+ entry = __import__(self.module_name, globals(),globals(), ['__name__'])
+ for attr in self.attrs:
+ try:
+ entry = getattr(entry,attr)
+ except AttributeError:
+ raise ImportError("%r has no %r attribute" % (entry,attr))
+ return entry
+
+ def require(self, env=None, installer=None):
+ if self.extras and not self.dist:
+ raise UnknownExtra("Can't require() without a distribution", self)
+ map(working_set.add,
+ working_set.resolve(self.dist.requires(self.extras),env,installer))
+
+
+
+ #@classmethod
+ def parse(cls, src, dist=None):
+ """Parse a single entry point from string `src`
+
+ Entry point syntax follows the form::
+
+ name = some.module:some.attr [extra1,extra2]
+
+ The entry name and module name are required, but the ``:attrs`` and
+ ``[extras]`` parts are optional
+ """
+ try:
+ attrs = extras = ()
+ name,value = src.split('=',1)
+ if '[' in value:
+ value,extras = value.split('[',1)
+ req = Requirement.parse("x["+extras)
+ if req.specs: raise ValueError
+ extras = req.extras
+ if ':' in value:
+ value,attrs = value.split(':',1)
+ if not MODULE(attrs.rstrip()):
+ raise ValueError
+ attrs = attrs.rstrip().split('.')
+ except ValueError:
+ raise ValueError(
+ "EntryPoint must be in 'name=module:attrs [extras]' format",
+ src
+ )
+ else:
+ return cls(name.strip(), value.strip(), attrs, extras, dist)
+
+ parse = classmethod(parse)
+
+
+
+
+
+
+
+
+ #@classmethod
+ def parse_group(cls, group, lines, dist=None):
+ """Parse an entry point group"""
+ if not MODULE(group):
+ raise ValueError("Invalid group name", group)
+ this = {}
+ for line in yield_lines(lines):
+ ep = cls.parse(line, dist)
+ if ep.name in this:
+ raise ValueError("Duplicate entry point", group, ep.name)
+ this[ep.name]=ep
+ return this
+
+ parse_group = classmethod(parse_group)
+
+ #@classmethod
+ def parse_map(cls, data, dist=None):
+ """Parse a map of entry point groups"""
+ if isinstance(data,dict):
+ data = data.items()
+ else:
+ data = split_sections(data)
+ maps = {}
+ for group, lines in data:
+ if group is None:
+ if not lines:
+ continue
+ raise ValueError("Entry points must be listed in groups")
+ group = group.strip()
+ if group in maps:
+ raise ValueError("Duplicate group name", group)
+ maps[group] = cls.parse_group(group, lines, dist)
+ return maps
+
+ parse_map = classmethod(parse_map)
+
+
+def _remove_md5_fragment(location):
+ if not location:
+ return ''
+ parsed = urlparse(location)
+ if parsed[-1].startswith('md5='):
+ return urlunparse(parsed[:-1] + ('',))
+ return location
+
+
+class Distribution(object):
+ """Wrap an actual or potential sys.path entry w/metadata"""
+ def __init__(self,
+ location=None, metadata=None, project_name=None, version=None,
+ py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
+ ):
+ self.project_name = safe_name(project_name or 'Unknown')
+ if version is not None:
+ self._version = safe_version(version)
+ self.py_version = py_version
+ self.platform = platform
+ self.location = location
+ self.precedence = precedence
+ self._provider = metadata or empty_provider
+
+ #@classmethod
+ def from_location(cls,location,basename,metadata=None,**kw):
+ project_name, version, py_version, platform = [None]*4
+ basename, ext = os.path.splitext(basename)
+ if ext.lower() in (".egg",".egg-info"):
+ match = EGG_NAME(basename)
+ if match:
+ project_name, version, py_version, platform = match.group(
+ 'name','ver','pyver','plat'
+ )
+ return cls(
+ location, metadata, project_name=project_name, version=version,
+ py_version=py_version, platform=platform, **kw
+ )
+ from_location = classmethod(from_location)
+
+
+ hashcmp = property(
+ lambda self: (
+ getattr(self,'parsed_version',()),
+ self.precedence,
+ self.key,
+ _remove_md5_fragment(self.location),
+ self.py_version,
+ self.platform
+ )
+ )
+ def __hash__(self): return hash(self.hashcmp)
+ def __lt__(self, other):
+ return self.hashcmp < other.hashcmp
+ def __le__(self, other):
+ return self.hashcmp <= other.hashcmp
+ def __gt__(self, other):
+ return self.hashcmp > other.hashcmp
+ def __ge__(self, other):
+ return self.hashcmp >= other.hashcmp
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ # It's not a Distribution, so they are not equal
+ return False
+ return self.hashcmp == other.hashcmp
+ def __ne__(self, other):
+ return not self == other
+
+ # These properties have to be lazy so that we don't have to load any
+ # metadata until/unless it's actually needed. (i.e., some distributions
+ # may not know their name or version without loading PKG-INFO)
+
+ #@property
+ def key(self):
+ try:
+ return self._key
+ except AttributeError:
+ self._key = key = self.project_name.lower()
+ return key
+ key = property(key)
+
+ #@property
+ def parsed_version(self):
+ try:
+ return self._parsed_version
+ except AttributeError:
+ self._parsed_version = pv = parse_version(self.version)
+ return pv
+
+ parsed_version = property(parsed_version)
+
+ #@property
+ def version(self):
+ try:
+ return self._version
+ except AttributeError:
+ for line in self._get_metadata('PKG-INFO'):
+ if line.lower().startswith('version:'):
+ self._version = safe_version(line.split(':',1)[1].strip())
+ return self._version
+ else:
+ raise ValueError(
+ "Missing 'Version:' header and/or PKG-INFO file", self
+ )
+ version = property(version)
+
+
+
+
+ #@property
+ def _dep_map(self):
+ try:
+ return self.__dep_map
+ except AttributeError:
+ dm = self.__dep_map = {None: []}
+ for name in 'requires.txt', 'depends.txt':
+ for extra,reqs in split_sections(self._get_metadata(name)):
+ if extra: extra = safe_extra(extra)
+ dm.setdefault(extra,[]).extend(parse_requirements(reqs))
+ return dm
+ _dep_map = property(_dep_map)
+
+ def requires(self,extras=()):
+ """List of Requirements needed for this distro if `extras` are used"""
+ dm = self._dep_map
+ deps = []
+ deps.extend(dm.get(None,()))
+ for ext in extras:
+ try:
+ deps.extend(dm[safe_extra(ext)])
+ except KeyError:
+ raise UnknownExtra(
+ "%s has no such extra feature %r" % (self, ext)
+ )
+ return deps
+
+ def _get_metadata(self,name):
+ if self.has_metadata(name):
+ for line in self.get_metadata_lines(name):
+ yield line
+
+ def activate(self,path=None):
+ """Ensure distribution is importable on `path` (default=sys.path)"""
+ if path is None: path = sys.path
+ self.insert_on(path)
+ if path is sys.path:
+ fixup_namespace_packages(self.location)
+ map(declare_namespace, self._get_metadata('namespace_packages.txt'))
+
+
+ def egg_name(self):
+ """Return what this distribution's standard .egg filename should be"""
+ filename = "%s-%s-py%s" % (
+ to_filename(self.project_name), to_filename(self.version),
+ self.py_version or PY_MAJOR
+ )
+
+ if self.platform:
+ filename += '-'+self.platform
+ return filename
+
+ def __repr__(self):
+ if self.location:
+ return "%s (%s)" % (self,self.location)
+ else:
+ return str(self)
+
+ def __str__(self):
+ try: version = getattr(self,'version',None)
+ except ValueError: version = None
+ version = version or "[unknown version]"
+ return "%s %s" % (self.project_name,version)
+
+ def __getattr__(self,attr):
+ """Delegate all unrecognized public attributes to .metadata provider"""
+ if attr.startswith('_'):
+ raise AttributeError,attr
+ return getattr(self._provider, attr)
+
+ #@classmethod
+ def from_filename(cls,filename,metadata=None, **kw):
+ return cls.from_location(
+ _normalize_cached(filename), os.path.basename(filename), metadata,
+ **kw
+ )
+ from_filename = classmethod(from_filename)
+
+ def as_requirement(self):
+ """Return a ``Requirement`` that matches this distribution exactly"""
+ return Requirement.parse('%s==%s' % (self.project_name, self.version))
+
+ def load_entry_point(self, group, name):
+ """Return the `name` entry point of `group` or raise ImportError"""
+ ep = self.get_entry_info(group,name)
+ if ep is None:
+ raise ImportError("Entry point %r not found" % ((group,name),))
+ return ep.load()
+
+ def get_entry_map(self, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ try:
+ ep_map = self._ep_map
+ except AttributeError:
+ ep_map = self._ep_map = EntryPoint.parse_map(
+ self._get_metadata('entry_points.txt'), self
+ )
+ if group is not None:
+ return ep_map.get(group,{})
+ return ep_map
+
+ def get_entry_info(self, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return self.get_entry_map(group).get(name)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def insert_on(self, path, loc = None):
+ """Insert self.location in path before its nearest parent directory"""
+
+ loc = loc or self.location
+
+ if self.project_name == 'setuptools':
+ try:
+ version = self.version
+ except ValueError:
+ version = ''
+ if '0.7' in version:
+ raise ValueError(
+ "A 0.7-series setuptools cannot be installed "
+ "with distribute. Found one at %s" % str(self.location))
+
+ if not loc:
+ return
+
+ if path is sys.path:
+ self.check_version_conflict()
+
+ nloc = _normalize_cached(loc)
+ bdir = os.path.dirname(nloc)
+ npath= map(_normalize_cached, path)
+
+ bp = None
+ for p, item in enumerate(npath):
+ if item==nloc:
+ break
+ elif item==bdir and self.precedence==EGG_DIST:
+ # if it's an .egg, give it precedence over its directory
+ path.insert(p, loc)
+ npath.insert(p, nloc)
+ break
+ else:
+ path.append(loc)
+ return
+
+ # p is the spot where we found or inserted loc; now remove duplicates
+ while 1:
+ try:
+ np = npath.index(nloc, p+1)
+ except ValueError:
+ break
+ else:
+ del npath[np], path[np]
+ p = np # ha!
+
+ return
+
+
+
+ def check_version_conflict(self):
+ if self.key=='distribute':
+ return # ignore the inevitable setuptools self-conflicts :(
+
+ nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+ loc = normalize_path(self.location)
+ for modname in self._get_metadata('top_level.txt'):
+ if (modname not in sys.modules or modname in nsp
+ or modname in _namespace_packages
+ ):
+ continue
+ if modname in ('pkg_resources', 'setuptools', 'site'):
+ continue
+ fn = getattr(sys.modules[modname], '__file__', None)
+ if fn and (normalize_path(fn).startswith(loc) or
+ fn.startswith(self.location)):
+ continue
+ issue_warning(
+ "Module %s was already imported from %s, but %s is being added"
+ " to sys.path" % (modname, fn, self.location),
+ )
+
+ def has_version(self):
+ try:
+ self.version
+ except ValueError:
+ issue_warning("Unbuilt egg for "+repr(self))
+ return False
+ return True
+
+ def clone(self,**kw):
+ """Copy this distribution, substituting in any changed keyword args"""
+ for attr in (
+ 'project_name', 'version', 'py_version', 'platform', 'location',
+ 'precedence'
+ ):
+ kw.setdefault(attr, getattr(self,attr,None))
+ kw.setdefault('metadata', self._provider)
+ return self.__class__(**kw)
+
+
+
+
+ #@property
+ def extras(self):
+ return [dep for dep in self._dep_map if dep]
+ extras = property(extras)
+
+
+def issue_warning(*args,**kw):
+ level = 1
+ g = globals()
+ try:
+ # find the first stack frame that is *not* code in
+ # the pkg_resources module, to use for the warning
+ while sys._getframe(level).f_globals is g:
+ level += 1
+ except ValueError:
+ pass
+ from warnings import warn
+ warn(stacklevel = level+1, *args, **kw)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def parse_requirements(strs):
+ """Yield ``Requirement`` objects for each specification in `strs`
+
+ `strs` must be an instance of ``basestring``, or a (possibly-nested)
+ iterable thereof.
+ """
+ # create a steppable iterator, so we can handle \-continuations
+ lines = iter(yield_lines(strs))
+
+ def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
+
+ items = []
+
+ while not TERMINATOR(line,p):
+ if CONTINUE(line,p):
+ try:
+ line = lines.next(); p = 0
+ except StopIteration:
+ raise ValueError(
+ "\\ must not appear on the last nonblank line"
+ )
+
+ match = ITEM(line,p)
+ if not match:
+ raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
+
+ items.append(match.group(*groups))
+ p = match.end()
+
+ match = COMMA(line,p)
+ if match:
+ p = match.end() # skip the comma
+ elif not TERMINATOR(line,p):
+ raise ValueError(
+ "Expected ',' or end-of-list in",line,"at",line[p:]
+ )
+
+ match = TERMINATOR(line,p)
+ if match: p = match.end() # skip the terminator, if any
+ return line, p, items
+
+ for line in lines:
+ match = DISTRO(line)
+ if not match:
+ raise ValueError("Missing distribution spec", line)
+ project_name = match.group(1)
+ p = match.end()
+ extras = []
+
+ match = OBRACKET(line,p)
+ if match:
+ p = match.end()
+ line, p, extras = scan_list(
+ DISTRO, CBRACKET, line, p, (1,), "'extra' name"
+ )
+
+ line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
+ specs = [(op,safe_version(val)) for op,val in specs]
+ yield Requirement(project_name, specs, extras)
+
+
+def _sort_dists(dists):
+ tmp = [(dist.hashcmp,dist) for dist in dists]
+ tmp.sort()
+ dists[::-1] = [d for hc,d in tmp]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Requirement:
+ def __init__(self, project_name, specs, extras):
+ """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+ self.unsafe_name, project_name = project_name, safe_name(project_name)
+ self.project_name, self.key = project_name, project_name.lower()
+ index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
+ index.sort()
+ self.specs = [(op,ver) for parsed,trans,op,ver in index]
+ self.index, self.extras = index, tuple(map(safe_extra,extras))
+ self.hashCmp = (
+ self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
+ frozenset(self.extras)
+ )
+ self.__hash = hash(self.hashCmp)
+
+ def __str__(self):
+ specs = ','.join([''.join(s) for s in self.specs])
+ extras = ','.join(self.extras)
+ if extras: extras = '[%s]' % extras
+ return '%s%s%s' % (self.project_name, extras, specs)
+
+ def __eq__(self,other):
+ return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
+
+ def __contains__(self,item):
+ if isinstance(item,Distribution):
+ if item.key <> self.key: return False
+ if self.index: item = item.parsed_version # only get if we need it
+ elif isinstance(item,basestring):
+ item = parse_version(item)
+ last = None
+ compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
+ for parsed,trans,op,ver in self.index:
+ action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
+ if action=='F': return False
+ elif action=='T': return True
+ elif action=='+': last = True
+ elif action=='-' or last is None: last = False
+ if last is None: last = True # no rules encountered
+ return last
+
+
+ def __hash__(self):
+ return self.__hash
+
+ def __repr__(self): return "Requirement.parse(%r)" % str(self)
+
+ #@staticmethod
+ def parse(s, replacement=True):
+ reqs = list(parse_requirements(s))
+ if reqs:
+ if len(reqs) == 1:
+ founded_req = reqs[0]
+ # if asked for setuptools distribution
+ # and if distribute is installed, we want to give
+ # distribute instead
+ if _override_setuptools(founded_req) and replacement:
+ distribute = list(parse_requirements('distribute'))
+ if len(distribute) == 1:
+ return distribute[0]
+ return founded_req
+ else:
+ return founded_req
+
+ raise ValueError("Expected only one requirement", s)
+ raise ValueError("No requirements found", s)
+
+ parse = staticmethod(parse)
+
+state_machine = {
+ # =><
+ '<' : '--T',
+ '<=': 'T-T',
+ '>' : 'F+F',
+ '>=': 'T+F',
+ '==': 'T..',
+ '!=': 'F++',
+}
+
+
+def _override_setuptools(req):
+ """Return True when distribute wants to override a setuptools dependency.
+
+ We want to override when the requirement is setuptools and the version is
+ a variant of 0.6.
+
+ """
+ if req.project_name == 'setuptools':
+ if not len(req.specs):
+ # Just setuptools: ok
+ return True
+ for comparator, version in req.specs:
+ if comparator in ['==', '>=', '>']:
+ if '0.7' in version:
+ # We want some setuptools not from the 0.6 series.
+ return False
+ return True
+ return False
+
+
+def _get_mro(cls):
+ """Get an mro for a type or classic class"""
+ if not isinstance(cls,type):
+ class cls(cls,object): pass
+ return cls.__mro__[1:]
+ return cls.__mro__
+
+def _find_adapter(registry, ob):
+ """Return an adapter factory for `ob` from `registry`"""
+ for t in _get_mro(getattr(ob, '__class__', type(ob))):
+ if t in registry:
+ return registry[t]
+
+
+def ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+
+def split_sections(s):
+ """Split a string or iterable thereof into (section,content) pairs
+
+ Each ``section`` is a stripped version of the section header ("[section]")
+ and each ``content`` is a list of stripped lines excluding blank lines and
+ comment-only lines. If there are any such lines before the first section
+ header, they're returned in a first ``section`` of ``None``.
+ """
+ section = None
+ content = []
+ for line in yield_lines(s):
+ if line.startswith("["):
+ if line.endswith("]"):
+ if section or content:
+ yield section, content
+ section = line[1:-1].strip()
+ content = []
+ else:
+ raise ValueError("Invalid section heading", line)
+ else:
+ content.append(line)
+
+ # wrap up last segment
+ yield section, content
+
+def _mkstemp(*args,**kw):
+ from tempfile import mkstemp
+ old_open = os.open
+ try:
+ os.open = os_open # temporarily bypass sandboxing
+ return mkstemp(*args,**kw)
+ finally:
+ os.open = old_open # and then put it back
+
+
+# Set up global resource manager
+_manager = ResourceManager()
+def _initialize(g):
+ for name in dir(_manager):
+ if not name.startswith('_'):
+ g[name] = getattr(_manager, name)
+_initialize(globals())
+
+# Prepare the master working set and make the ``require()`` API available
+working_set = WorkingSet()
+try:
+ # Does the main program list any requirements?
+ from __main__ import __requires__
+except ImportError:
+ pass # No: just use the default working set based on sys.path
+else:
+ # Yes: ensure the requirements are met, by prefixing sys.path if necessary
+ try:
+ working_set.require(__requires__)
+ except VersionConflict: # try it without defaults already on sys.path
+ working_set = WorkingSet([]) # by starting with an empty path
+ for dist in working_set.resolve(
+ parse_requirements(__requires__), Environment()
+ ):
+ working_set.add(dist)
+ for entry in sys.path: # add any missing entries from sys.path
+ if entry not in working_set.entries:
+ working_set.add_entry(entry)
+ sys.path[:] = working_set.entries # then copy back to sys.path
+
+require = working_set.require
+iter_entry_points = working_set.iter_entry_points
+add_activation_listener = working_set.subscribe
+run_script = working_set.run_script
+run_main = run_script # backward compatibility
+# Activate all distributions already on sys.path, and ensure that
+# all distributions added to the working set in the future (e.g. by
+# calling ``require()``) will get activated as well.
+add_activation_listener(lambda dist: dist.activate())
+working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py
new file mode 100755
index 00000000..9de373f9
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/__init__.py
@@ -0,0 +1,104 @@
+"""Extensions to the 'distutils' for large or complex distributions"""
+from setuptools.extension import Extension, Library
+from setuptools.dist import Distribution, Feature, _get_unpatched
+import distutils.core, setuptools.command
+from setuptools.depends import Require
+from distutils.core import Command as _Command
+from distutils.util import convert_path
+import os
+import sys
+
+__version__ = '0.6'
+__all__ = [
+ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+ 'find_packages'
+]
+
+# This marker is used to simplify the process that checks is the
+# setuptools package was installed by the Setuptools project
+# or by the Distribute project, in case Setuptools creates
+# a distribution with the same version.
+#
+# The distribute_setup script for instance, will check if this
+# attribute is present to decide whether to reinstall the package
+# or not.
+_distribute = True
+
+bootstrap_install_from = None
+
+# If we run 2to3 on .py files, should we also convert docstrings?
+# Default: yes; assume that we can detect doctests reliably
+run_2to3_on_doctests = True
+# Standard package names for fixer packages
+lib2to3_fixer_packages = ['lib2to3.fixes']
+
+def find_packages(where='.', exclude=()):
+ """Return a list all Python packages found within directory 'where'
+
+ 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
+ will be converted to the appropriate local path syntax. 'exclude' is a
+ sequence of package names to exclude; '*' can be used as a wildcard in the
+ names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
+ 'foo' itself).
+ """
+ out = []
+ stack=[(convert_path(where), '')]
+ while stack:
+ where,prefix = stack.pop(0)
+ for name in os.listdir(where):
+ fn = os.path.join(where,name)
+ if ('.' not in name and os.path.isdir(fn) and
+ os.path.isfile(os.path.join(fn,'__init__.py'))
+ ):
+ out.append(prefix+name); stack.append((fn,prefix+name+'.'))
+ for pat in list(exclude)+['ez_setup', 'distribute_setup']:
+ from fnmatch import fnmatchcase
+ out = [item for item in out if not fnmatchcase(item,pat)]
+ return out
+
+setup = distutils.core.setup
+
+_Command = _get_unpatched(_Command)
+
+class Command(_Command):
+ __doc__ = _Command.__doc__
+
+ command_consumes_arguments = False
+
+ def __init__(self, dist, **kw):
+ # Add support for keyword arguments
+ _Command.__init__(self,dist)
+ for k,v in kw.items():
+ setattr(self,k,v)
+
+ def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+ cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
+ for k,v in kw.items():
+ setattr(cmd,k,v) # update command with keywords
+ return cmd
+
+import distutils.core
+distutils.core.Command = Command # we can't patch distutils.cmd, alas
+
+def findall(dir = os.curdir):
+ """Find all files under 'dir' and return the list of full filenames
+ (relative to 'dir').
+ """
+ all_files = []
+ for base, dirs, files in os.walk(dir):
+ if base==os.curdir or base.startswith(os.curdir+os.sep):
+ base = base[2:]
+ if base:
+ files = [os.path.join(base, f) for f in files]
+ all_files.extend(filter(os.path.isfile, files))
+ return all_files
+
+import distutils.filelist
+distutils.filelist.findall = findall # fix findall bug in distutils.
+
+# sys.dont_write_bytecode was introduced in Python 2.6.
+if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or
+ (not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))):
+ _dont_write_bytecode = True
+else:
+ _dont_write_bytecode = False
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py
new file mode 100755
index 00000000..ab786f3d
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/archive_util.py
@@ -0,0 +1,208 @@
+"""Utilities for extracting common archive formats"""
+
+
+__all__ = [
+ "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
+ "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
+]
+
+import zipfile, tarfile, os, shutil
+from pkg_resources import ensure_directory
+from distutils.errors import DistutilsError
+
+class UnrecognizedFormat(DistutilsError):
+ """Couldn't recognize the archive type"""
+
+def default_filter(src,dst):
+ """The default progress/filter callback; returns True for all files"""
+ return dst
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+ drivers=None
+):
+ """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
+
+ `progress_filter` is a function taking two arguments: a source path
+ internal to the archive ('/'-separated), and a filesystem path where it
+ will be extracted. The callback must return the desired extract path
+ (which may be the same as the one passed in), or else ``None`` to skip
+ that file or directory. The callback can thus be used to report on the
+ progress of the extraction, as well as to filter the items extracted or
+ alter their extraction paths.
+
+ `drivers`, if supplied, must be a non-empty sequence of functions with the
+ same signature as this function (minus the `drivers` argument), that raise
+ ``UnrecognizedFormat`` if they do not support extracting the designated
+ archive type. The `drivers` are tried in sequence until one is found that
+ does not raise an error, or until all are exhausted (in which case
+ ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
+ drivers, the module's ``extraction_drivers`` constant will be used, which
+ means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
+ order.
+ """
+ for driver in drivers or extraction_drivers:
+ try:
+ driver(filename, extract_dir, progress_filter)
+ except UnrecognizedFormat:
+ continue
+ else:
+ return
+ else:
+ raise UnrecognizedFormat(
+ "Not a recognized archive type: %s" % filename
+ )
+
+
+
+
+
+
+
+def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+ """"Unpack" a directory, using the same interface as for archives
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a directory
+ """
+ if not os.path.isdir(filename):
+ raise UnrecognizedFormat("%s is not a directory" % (filename,))
+
+ paths = {filename:('',extract_dir)}
+ for base, dirs, files in os.walk(filename):
+ src,dst = paths[base]
+ for d in dirs:
+ paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
+ for f in files:
+ name = src+f
+ target = os.path.join(dst,f)
+ target = progress_filter(src+f, target)
+ if not target:
+ continue # skip non-files
+ ensure_directory(target)
+ f = os.path.join(base,f)
+ shutil.copyfile(f, target)
+ shutil.copystat(f, target)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack zip `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
+ by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+
+ if not zipfile.is_zipfile(filename):
+ raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+
+ z = zipfile.ZipFile(filename)
+ try:
+ for info in z.infolist():
+ name = info.filename
+
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name:
+ continue
+
+ target = os.path.join(extract_dir, *name.split('/'))
+ target = progress_filter(name, target)
+ if not target:
+ continue
+ if name.endswith('/'):
+ # directory
+ ensure_directory(target)
+ else:
+ # file
+ ensure_directory(target)
+ data = z.read(info.filename)
+ f = open(target,'wb')
+ try:
+ f.write(data)
+ finally:
+ f.close()
+ del data
+ finally:
+ z.close()
+
+
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
+ by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+
+ try:
+ tarobj = tarfile.open(filename)
+ except tarfile.TarError:
+ raise UnrecognizedFormat(
+ "%s is not a compressed or uncompressed tar file" % (filename,)
+ )
+
+ try:
+ tarobj.chown = lambda *args: None # don't do any chowning!
+ for member in tarobj:
+ if member.isfile() or member.isdir():
+ name = member.name
+ # don't extract absolute paths or ones with .. in them
+ if not name.startswith('/') and '..' not in name:
+ dst = os.path.join(extract_dir, *name.split('/'))
+ dst = progress_filter(name, dst)
+ if dst:
+ if dst.endswith(os.sep):
+ dst = dst[:-1]
+ try:
+ tarobj._extract_member(member,dst) # XXX Ugh
+ except tarfile.ExtractError:
+ pass # chown/chmod/mkfifo/mknode/makedev failed
+ return True
+ finally:
+ tarobj.close()
+
+
+
+
+extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe
new file mode 100644
index 00000000..8906ff77
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/cli.exe
Binary files differ
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py
new file mode 100755
index 00000000..152406b3
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/__init__.py
@@ -0,0 +1,22 @@
+__all__ = [
+ 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
+ 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
+ 'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
+ 'register', 'bdist_wininst', 'upload_docs',
+]
+
+from setuptools.command import install_scripts
+import sys
+
+if sys.version>='2.5':
+ # In Python 2.5 and above, distutils includes its own upload command
+ __all__.remove('upload')
+
+from distutils.command.bdist import bdist
+
+
+if 'egg' not in bdist.format_commands:
+ bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+ bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py
new file mode 100755
index 00000000..f5368b29
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/alias.py
@@ -0,0 +1,82 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+def shquote(arg):
+ """Quote an argument for later parsing by shlex.split()"""
+ for c in '"', "'", "\\", "#":
+ if c in arg: return repr(arg)
+ if arg.split()<>[arg]:
+ return repr(arg)
+ return arg
+
+
+class alias(option_base):
+ """Define a shortcut that invokes one or more commands"""
+
+ description = "define a shortcut to invoke one or more commands"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('remove', 'r', 'remove (unset) the alias'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.args = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.remove and len(self.args)<>1:
+ raise DistutilsOptionError(
+ "Must specify exactly one argument (the alias name) when "
+ "using --remove"
+ )
+
+ def run(self):
+ aliases = self.distribution.get_option_dict('aliases')
+
+ if not self.args:
+ print "Command Aliases"
+ print "---------------"
+ for alias in aliases:
+ print "setup.py alias", format_alias(alias, aliases)
+ return
+
+ elif len(self.args)==1:
+ alias, = self.args
+ if self.remove:
+ command = None
+ elif alias in aliases:
+ print "setup.py alias", format_alias(alias, aliases)
+ return
+ else:
+ print "No alias definition found for %r" % alias
+ return
+ else:
+ alias = self.args[0]
+ command = ' '.join(map(shquote,self.args[1:]))
+
+ edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+ source, command = aliases[name]
+ if source == config_file('global'):
+ source = '--global-config '
+ elif source == config_file('user'):
+ source = '--user-config '
+ elif source == config_file('local'):
+ source = ''
+ else:
+ source = '--filename=%r' % source
+ return source+name+' '+command
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py
new file mode 100755
index 00000000..90e1f525
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_egg.py
@@ -0,0 +1,540 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+# This module should be kept compatible with Python 2.3
+import sys, os, marshal
+from setuptools import Command
+from distutils.dir_util import remove_tree, mkpath
+try:
+ from distutils.sysconfig import get_python_version, get_python_lib
+except ImportError:
+ from sysconfig import get_python_version
+ from distutils.sysconfig import get_python_lib
+
+from distutils import log
+from distutils.errors import DistutilsSetupError
+from pkg_resources import get_build_platform, Distribution, ensure_directory
+from pkg_resources import EntryPoint
+from types import CodeType
+from setuptools.extension import Library
+
+def strip_module(filename):
+ if '.' in filename:
+ filename = os.path.splitext(filename)[0]
+ if filename.endswith('module'):
+ filename = filename[:-6]
+ return filename
+
+def write_stub(resource, pyfile):
+ f = open(pyfile,'w')
+ f.write('\n'.join([
+ "def __bootstrap__():",
+ " global __bootstrap__, __loader__, __file__",
+ " import sys, pkg_resources, imp",
+ " __file__ = pkg_resources.resource_filename(__name__,%r)"
+ % resource,
+ " __loader__ = None; del __bootstrap__, __loader__",
+ " imp.load_dynamic(__name__,__file__)",
+ "__bootstrap__()",
+ "" # terminal \n
+ ]))
+ f.close()
+
+# stub __init__.py for packages distributed without one
+NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
+
+class bdist_egg(Command):
+
+ description = "create an \"egg\" distribution"
+
+ user_options = [
+ ('bdist-dir=', 'b',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p',
+ "platform name to embed in generated filenames "
+ "(default: %s)" % get_build_platform()),
+ ('exclude-source-files', None,
+ "remove all .py files from the generated egg"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ]
+
+ boolean_options = [
+ 'keep-temp', 'skip-build', 'exclude-source-files'
+ ]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def initialize_options (self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.dist_dir = None
+ self.skip_build = 0
+ self.egg_output = None
+ self.exclude_source_files = None
+
+
+ def finalize_options(self):
+ ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_info = ei_cmd.egg_info
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+ if self.plat_name is None:
+ self.plat_name = get_build_platform()
+
+ self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+ if self.egg_output is None:
+
+ # Compute filename of the output egg
+ basename = Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+ get_python_version(),
+ self.distribution.has_ext_modules() and self.plat_name
+ ).egg_name()
+
+ self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
+
+
+
+
+
+
+
+
+ def do_install_data(self):
+ # Hack for packages that install data to install's --install-lib
+ self.get_finalized_command('install').install_lib = self.bdist_dir
+
+ site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
+ old, self.distribution.data_files = self.distribution.data_files,[]
+
+ for item in old:
+ if isinstance(item,tuple) and len(item)==2:
+ if os.path.isabs(item[0]):
+ realpath = os.path.realpath(item[0])
+ normalized = os.path.normcase(realpath)
+ if normalized==site_packages or normalized.startswith(
+ site_packages+os.sep
+ ):
+ item = realpath[len(site_packages)+1:], item[1]
+ # XXX else: raise ???
+ self.distribution.data_files.append(item)
+
+ try:
+ log.info("installing package data to %s" % self.bdist_dir)
+ self.call_command('install_data', force=0, root=None)
+ finally:
+ self.distribution.data_files = old
+
+
+ def get_outputs(self):
+ return [self.egg_output]
+
+
+ def call_command(self,cmdname,**kw):
+ """Invoke reinitialized command `cmdname` with keyword args"""
+ for dirname in INSTALL_DIRECTORY_ATTRS:
+ kw.setdefault(dirname,self.bdist_dir)
+ kw.setdefault('skip_build',self.skip_build)
+ kw.setdefault('dry_run', self.dry_run)
+ cmd = self.reinitialize_command(cmdname, **kw)
+ self.run_command(cmdname)
+ return cmd
+
+
+ def run(self):
+ # Generate metadata first
+ self.run_command("egg_info")
+
+ # We run install_lib before install_data, because some data hacks
+ # pull their data path from the install_lib command.
+ log.info("installing library code to %s" % self.bdist_dir)
+ instcmd = self.get_finalized_command('install')
+ old_root = instcmd.root; instcmd.root = None
+ cmd = self.call_command('install_lib', warn_dir=0)
+ instcmd.root = old_root
+
+ all_outputs, ext_outputs = self.get_ext_outputs()
+ self.stubs = []
+ to_compile = []
+ for (p,ext_name) in enumerate(ext_outputs):
+ filename,ext = os.path.splitext(ext_name)
+ pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
+ self.stubs.append(pyfile)
+ log.info("creating stub loader for %s" % ext_name)
+ if not self.dry_run:
+ write_stub(os.path.basename(ext_name), pyfile)
+ to_compile.append(pyfile)
+ ext_outputs[p] = ext_name.replace(os.sep,'/')
+
+ to_compile.extend(self.make_init_files())
+ if to_compile:
+ cmd.byte_compile(to_compile)
+
+ if self.distribution.data_files:
+ self.do_install_data()
+
+ # Make the EGG-INFO directory
+ archive_root = self.bdist_dir
+ egg_info = os.path.join(archive_root,'EGG-INFO')
+ self.mkpath(egg_info)
+ if self.distribution.scripts:
+ script_dir = os.path.join(egg_info, 'scripts')
+ log.info("installing scripts to %s" % script_dir)
+ self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
+
+ self.copy_metadata_to(egg_info)
+ native_libs = os.path.join(egg_info, "native_libs.txt")
+ if all_outputs:
+ log.info("writing %s" % native_libs)
+ if not self.dry_run:
+ ensure_directory(native_libs)
+ libs_file = open(native_libs, 'wt')
+ libs_file.write('\n'.join(all_outputs))
+ libs_file.write('\n')
+ libs_file.close()
+ elif os.path.isfile(native_libs):
+ log.info("removing %s" % native_libs)
+ if not self.dry_run:
+ os.unlink(native_libs)
+
+ write_safety_flag(
+ os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
+ )
+
+ if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
+ log.warn(
+ "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+ if self.exclude_source_files:
+ self.zap_pyfiles()
+
+ # Make the archive
+ make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+ dry_run=self.dry_run, mode=self.gen_header())
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ # Add to 'Distribution.dist_files' so that the "upload" command works
+ getattr(self.distribution,'dist_files',[]).append(
+ ('bdist_egg',get_python_version(),self.egg_output))
+
+
+
+
+ def zap_pyfiles(self):
+ log.info("Removing .py files from temporary directory")
+ for base,dirs,files in walk_egg(self.bdist_dir):
+ for name in files:
+ if name.endswith('.py'):
+ path = os.path.join(base,name)
+ log.debug("Deleting %s", path)
+ os.unlink(path)
+
+ def zip_safe(self):
+ safe = getattr(self.distribution,'zip_safe',None)
+ if safe is not None:
+ return safe
+ log.warn("zip_safe flag not set; analyzing archive contents...")
+ return analyze_egg(self.bdist_dir, self.stubs)
+
+ def make_init_files(self):
+ """Create missing package __init__ files"""
+ init_files = []
+ for base,dirs,files in walk_egg(self.bdist_dir):
+ if base==self.bdist_dir:
+ # don't put an __init__ in the root
+ continue
+ for name in files:
+ if name.endswith('.py'):
+ if '__init__.py' not in files:
+ pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
+ if self.distribution.has_contents_for(pkg):
+ log.warn("Creating missing __init__.py for %s",pkg)
+ filename = os.path.join(base,'__init__.py')
+ if not self.dry_run:
+ f = open(filename,'w'); f.write(NS_PKG_STUB)
+ f.close()
+ init_files.append(filename)
+ break
+ else:
+ # not a package, don't traverse to subdirectories
+ dirs[:] = []
+
+ return init_files
+
+ def gen_header(self):
+ epm = EntryPoint.parse_map(self.distribution.entry_points or '')
+ ep = epm.get('setuptools.installation',{}).get('eggsecutable')
+ if ep is None:
+ return 'w' # not an eggsecutable, do it the usual way.
+
+ if not ep.attrs or ep.extras:
+ raise DistutilsSetupError(
+ "eggsecutable entry point (%r) cannot have 'extras' "
+ "or refer to a module" % (ep,)
+ )
+
+ pyver = sys.version[:3]
+ pkg = ep.module_name
+ full = '.'.join(ep.attrs)
+ base = ep.attrs[0]
+ basename = os.path.basename(self.egg_output)
+
+ header = (
+ "#!/bin/sh\n"
+ 'if [ `basename $0` = "%(basename)s" ]\n'
+ 'then exec python%(pyver)s -c "'
+ "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
+ "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
+ '" "$@"\n'
+ 'else\n'
+ ' echo $0 is not the correct name for this egg file.\n'
+ ' echo Please rename it back to %(basename)s and try again.\n'
+ ' exec false\n'
+ 'fi\n'
+
+ ) % locals()
+
+ if not self.dry_run:
+ mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
+ f = open(self.egg_output, 'w')
+ f.write(header)
+ f.close()
+ return 'a'
+
+
+ def copy_metadata_to(self, target_dir):
+ prefix = os.path.join(self.egg_info,'')
+ for path in self.ei_cmd.filelist.files:
+ if path.startswith(prefix):
+ target = os.path.join(target_dir, path[len(prefix):])
+ ensure_directory(target)
+ self.copy_file(path, target)
+
+ def get_ext_outputs(self):
+ """Get a list of relative paths to C extensions in the output distro"""
+
+ all_outputs = []
+ ext_outputs = []
+
+ paths = {self.bdist_dir:''}
+ for base, dirs, files in os.walk(self.bdist_dir):
+ for filename in files:
+ if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+ all_outputs.append(paths[base]+filename)
+ for filename in dirs:
+ paths[os.path.join(base,filename)] = paths[base]+filename+'/'
+
+ if self.distribution.has_ext_modules():
+ build_cmd = self.get_finalized_command('build_ext')
+ for ext in build_cmd.extensions:
+ if isinstance(ext,Library):
+ continue
+ fullname = build_cmd.get_ext_fullname(ext.name)
+ filename = build_cmd.get_ext_filename(fullname)
+ if not os.path.basename(filename).startswith('dl-'):
+ if os.path.exists(os.path.join(self.bdist_dir,filename)):
+ ext_outputs.append(filename)
+
+ return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+
+
+def walk_egg(egg_dir):
+ """Walk an unpacked egg's contents, skipping the metadata directory"""
+ walker = os.walk(egg_dir)
+ base,dirs,files = walker.next()
+ if 'EGG-INFO' in dirs:
+ dirs.remove('EGG-INFO')
+ yield base,dirs,files
+ for bdf in walker:
+ yield bdf
+
+def analyze_egg(egg_dir, stubs):
+ # check for existing flag in EGG-INFO
+ for flag,fn in safety_flags.items():
+ if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
+ return flag
+ if not can_scan(): return False
+ safe = True
+ for base, dirs, files in walk_egg(egg_dir):
+ for name in files:
+ if name.endswith('.py') or name.endswith('.pyw'):
+ continue
+ elif name.endswith('.pyc') or name.endswith('.pyo'):
+ # always scan, even if we already know we're not safe
+ safe = scan_module(egg_dir, base, name, stubs) and safe
+ return safe
+
+def write_safety_flag(egg_dir, safe):
+ # Write or remove zip safety flag file(s)
+ for flag,fn in safety_flags.items():
+ fn = os.path.join(egg_dir, fn)
+ if os.path.exists(fn):
+ if safe is None or bool(safe)<>flag:
+ os.unlink(fn)
+ elif safe is not None and bool(safe)==flag:
+ f=open(fn,'wt'); f.write('\n'); f.close()
+
+safety_flags = {
+ True: 'zip-safe',
+ False: 'not-zip-safe',
+}
+
+def scan_module(egg_dir, base, name, stubs):
+ """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+ filename = os.path.join(base,name)
+ if filename[:-1] in stubs:
+ return True # Extension module
+ pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
+ module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
+ f = open(filename,'rb'); f.read(8) # skip magic & date
+ code = marshal.load(f); f.close()
+ safe = True
+ symbols = dict.fromkeys(iter_symbols(code))
+ for bad in ['__file__', '__path__']:
+ if bad in symbols:
+ log.warn("%s: module references %s", module, bad)
+ safe = False
+ if 'inspect' in symbols:
+ for bad in [
+ 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+ 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+ 'getinnerframes', 'getouterframes', 'stack', 'trace'
+ ]:
+ if bad in symbols:
+ log.warn("%s: module MAY be using inspect.%s", module, bad)
+ safe = False
+ if '__name__' in symbols and '__main__' in symbols and '.' not in module:
+ if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
+ log.warn("%s: top-level module may be 'python -m' script", module)
+ safe = False
+ return safe
+
+def iter_symbols(code):
+ """Yield names and strings used by `code` and its nested code objects"""
+ for name in code.co_names: yield name
+ for const in code.co_consts:
+ if isinstance(const,basestring):
+ yield const
+ elif isinstance(const,CodeType):
+ for name in iter_symbols(const):
+ yield name
+
+def can_scan():
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ # CPython, PyPy, etc.
+ return True
+ log.warn("Unable to analyze compiled code on this platform.")
+ log.warn("Please ask the author to include a 'zip_safe'"
+ " setting (either True or False) in the package's setup.py")
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+ 'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
+ mode='w'
+):
+ """Create a zip file from all the files under 'base_dir'. The output
+ zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
+ Python module (if available) or the InfoZIP "zip" utility (if installed
+ and found on the default search path). If neither tool is available,
+ raises DistutilsExecError. Returns the name of the output zip file.
+ """
+ import zipfile
+ mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+ log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+ def visit(z, dirname, names):
+ for name in names:
+ path = os.path.normpath(os.path.join(dirname, name))
+ if os.path.isfile(path):
+ p = path[len(base_dir)+1:]
+ if not dry_run:
+ z.write(path, p)
+ log.debug("adding '%s'" % p)
+
+ if compress is None:
+ compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
+
+ compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
+ if not dry_run:
+ z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+ for dirname, dirs, files in os.walk(base_dir):
+ visit(z, dirname, files)
+ z.close()
+ else:
+ for dirname, dirs, files in os.walk(base_dir):
+ visit(None, dirname, file)
+ return zip_filename
+#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py
new file mode 100755
index 00000000..8c48da35
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_rpm.py
@@ -0,0 +1,82 @@
+# This is just a kludge so that bdist_rpm doesn't guess wrong about the
+# distribution name and version, if the egg_info command is going to alter
+# them, another kludge to allow you to build old-style non-egg RPMs, and
+# finally, a kludge to track .rpm files for uploading when run on Python <2.5.
+
+from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
+import sys, os
+
+class bdist_rpm(_bdist_rpm):
+
+ def initialize_options(self):
+ _bdist_rpm.initialize_options(self)
+ self.no_egg = None
+
+ if sys.version<"2.5":
+ # Track for uploading any .rpm file(s) moved to self.dist_dir
+ def move_file(self, src, dst, level=1):
+ _bdist_rpm.move_file(self, src, dst, level)
+ if dst==self.dist_dir and src.endswith('.rpm'):
+ getattr(self.distribution,'dist_files',[]).append(
+ ('bdist_rpm',
+ src.endswith('.src.rpm') and 'any' or sys.version[:3],
+ os.path.join(dst, os.path.basename(src)))
+ )
+
+ def run(self):
+ self.run_command('egg_info') # ensure distro name is up-to-date
+ _bdist_rpm.run(self)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def _make_spec_file(self):
+ version = self.distribution.get_version()
+ rpmversion = version.replace('-','_')
+ spec = _bdist_rpm._make_spec_file(self)
+ line23 = '%define version '+version
+ line24 = '%define version '+rpmversion
+ spec = [
+ line.replace(
+ "Source0: %{name}-%{version}.tar",
+ "Source0: %{name}-%{unmangled_version}.tar"
+ ).replace(
+ "setup.py install ",
+ "setup.py install --single-version-externally-managed "
+ ).replace(
+ "%setup",
+ "%setup -n %{name}-%{unmangled_version}"
+ ).replace(line23,line24)
+ for line in spec
+ ]
+ spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
+ return spec
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py
new file mode 100755
index 00000000..93e6846d
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/bdist_wininst.py
@@ -0,0 +1,41 @@
+from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst
+import os, sys
+
+class bdist_wininst(_bdist_wininst):
+
+ def create_exe(self, arcname, fullname, bitmap=None):
+ _bdist_wininst.create_exe(self, arcname, fullname, bitmap)
+ dist_files = getattr(self.distribution, 'dist_files', [])
+
+ if self.target_version:
+ installer_name = os.path.join(self.dist_dir,
+ "%s.win32-py%s.exe" %
+ (fullname, self.target_version))
+ pyversion = self.target_version
+
+ # fix 2.5 bdist_wininst ignoring --target-version spec
+ bad = ('bdist_wininst','any',installer_name)
+ if bad in dist_files:
+ dist_files.remove(bad)
+ else:
+ installer_name = os.path.join(self.dist_dir,
+ "%s.win32.exe" % fullname)
+ pyversion = 'any'
+ good = ('bdist_wininst', pyversion, installer_name)
+ if good not in dist_files:
+ dist_files.append(good)
+
+ def reinitialize_command (self, command, reinit_subcommands=0):
+ cmd = self.distribution.reinitialize_command(
+ command, reinit_subcommands)
+ if command in ('install', 'install_lib'):
+ cmd.install_lib = None # work around distutils bug
+ return cmd
+
+ def run(self):
+ self._is_running = True
+ try:
+ _bdist_wininst.run(self)
+ finally:
+ self._is_running = False
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py
new file mode 100755
index 00000000..4a94572c
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_ext.py
@@ -0,0 +1,294 @@
+from distutils.command.build_ext import build_ext as _du_build_ext
+try:
+ # Attempt to use Pyrex for building extensions, if available
+ from Pyrex.Distutils.build_ext import build_ext as _build_ext
+except ImportError:
+ _build_ext = _du_build_ext
+
+import os, sys
+from distutils.file_util import copy_file
+from setuptools.extension import Library
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+get_config_var("LDSHARED") # make sure _config_vars is initialized
+from distutils.sysconfig import _config_vars
+from distutils import log
+from distutils.errors import *
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+ use_stubs = True
+elif os.name != 'nt':
+ try:
+ from dl import RTLD_NOW
+ have_rtld = True
+ use_stubs = True
+ except ImportError:
+ pass
+
+def if_dl(s):
+ if have_rtld:
+ return s
+ return ''
+
+
+
+
+
+
+class build_ext(_build_ext):
+ def run(self):
+ """Build extensions in build directory, then copy if --inplace"""
+ old_inplace, self.inplace = self.inplace, 0
+ _build_ext.run(self)
+ self.inplace = old_inplace
+ if old_inplace:
+ self.copy_extensions_to_source()
+
+ def copy_extensions_to_source(self):
+ build_py = self.get_finalized_command('build_py')
+ for ext in self.extensions:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = self.get_ext_filename(fullname)
+ modpath = fullname.split('.')
+ package = '.'.join(modpath[:-1])
+ package_dir = build_py.get_package_dir(package)
+ dest_filename = os.path.join(package_dir,os.path.basename(filename))
+ src_filename = os.path.join(self.build_lib,filename)
+
+ # Always copy, even if source is older than destination, to ensure
+ # that the right extensions for the current Python/platform are
+ # used.
+ copy_file(
+ src_filename, dest_filename, verbose=self.verbose,
+ dry_run=self.dry_run
+ )
+ if ext._needs_stub:
+ self.write_stub(package_dir or os.curdir, ext, True)
+
+
+ if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
+ # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
+ def swig_sources(self, sources, *otherargs):
+ # first do any Pyrex processing
+ sources = _build_ext.swig_sources(self, sources) or sources
+ # Then do any actual SWIG stuff on the remainder
+ return _du_build_ext.swig_sources(self, sources, *otherargs)
+
+
+
+ def get_ext_filename(self, fullname):
+ filename = _build_ext.get_ext_filename(self,fullname)
+ if fullname not in self.ext_map:
+ return filename
+ ext = self.ext_map[fullname]
+ if isinstance(ext,Library):
+ fn, ext = os.path.splitext(filename)
+ return self.shlib_compiler.library_filename(fn,libtype)
+ elif use_stubs and ext._links_to_dynamic:
+ d,fn = os.path.split(filename)
+ return os.path.join(d,'dl-'+fn)
+ else:
+ return filename
+
+ def initialize_options(self):
+ _build_ext.initialize_options(self)
+ self.shlib_compiler = None
+ self.shlibs = []
+ self.ext_map = {}
+
+ def finalize_options(self):
+ _build_ext.finalize_options(self)
+ self.extensions = self.extensions or []
+ self.check_extensions_list(self.extensions)
+ self.shlibs = [ext for ext in self.extensions
+ if isinstance(ext,Library)]
+ if self.shlibs:
+ self.setup_shlib_compiler()
+ for ext in self.extensions:
+ ext._full_name = self.get_ext_fullname(ext.name)
+ for ext in self.extensions:
+ fullname = ext._full_name
+ self.ext_map[fullname] = ext
+
+ # distutils 3.1 will also ask for module names
+ # XXX what to do with conflicts?
+ self.ext_map[fullname.split('.')[-1]] = ext
+
+ ltd = ext._links_to_dynamic = \
+ self.shlibs and self.links_to_dynamic(ext) or False
+ ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
+ filename = ext._file_name = self.get_ext_filename(fullname)
+ libdir = os.path.dirname(os.path.join(self.build_lib,filename))
+ if ltd and libdir not in ext.library_dirs:
+ ext.library_dirs.append(libdir)
+ if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+ ext.runtime_library_dirs.append(os.curdir)
+
+ def setup_shlib_compiler(self):
+ compiler = self.shlib_compiler = new_compiler(
+ compiler=self.compiler, dry_run=self.dry_run, force=self.force
+ )
+ if sys.platform == "darwin":
+ tmp = _config_vars.copy()
+ try:
+ # XXX Help! I don't have any idea whether these are right...
+ _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
+ _config_vars['CCSHARED'] = " -dynamiclib"
+ _config_vars['SO'] = ".dylib"
+ customize_compiler(compiler)
+ finally:
+ _config_vars.clear()
+ _config_vars.update(tmp)
+ else:
+ customize_compiler(compiler)
+
+ if self.include_dirs is not None:
+ compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name,value) in self.define:
+ compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ compiler.set_link_objects(self.link_objects)
+
+ # hack so distutils' build_extension() builds a library instead
+ compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+
+
+ def get_export_symbols(self, ext):
+ if isinstance(ext,Library):
+ return ext.export_symbols
+ return _build_ext.get_export_symbols(self,ext)
+
+ def build_extension(self, ext):
+ _compiler = self.compiler
+ try:
+ if isinstance(ext,Library):
+ self.compiler = self.shlib_compiler
+ _build_ext.build_extension(self,ext)
+ if ext._needs_stub:
+ self.write_stub(
+ self.get_finalized_command('build_py').build_lib, ext
+ )
+ finally:
+ self.compiler = _compiler
+
+ def links_to_dynamic(self, ext):
+ """Return true if 'ext' links to a dynamic lib in the same package"""
+ # XXX this should check to ensure the lib is actually being built
+ # XXX as dynamic, and not just using a locally-found version or a
+ # XXX static-compiled version
+ libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+ pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
+ for libname in ext.libraries:
+ if pkg+libname in libnames: return True
+ return False
+
+ def get_outputs(self):
+ outputs = _build_ext.get_outputs(self)
+ optimize = self.get_finalized_command('build_py').optimize
+ for ext in self.extensions:
+ if ext._needs_stub:
+ base = os.path.join(self.build_lib, *ext._full_name.split('.'))
+ outputs.append(base+'.py')
+ outputs.append(base+'.pyc')
+ if optimize:
+ outputs.append(base+'.pyo')
+ return outputs
+
+ def write_stub(self, output_dir, ext, compile=False):
+ log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
+ stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
+ if compile and os.path.exists(stub_file):
+ raise DistutilsError(stub_file+" already exists! Please delete.")
+ if not self.dry_run:
+ f = open(stub_file,'w')
+ f.write('\n'.join([
+ "def __bootstrap__():",
+ " global __bootstrap__, __file__, __loader__",
+ " import sys, os, pkg_resources, imp"+if_dl(", dl"),
+ " __file__ = pkg_resources.resource_filename(__name__,%r)"
+ % os.path.basename(ext._file_name),
+ " del __bootstrap__",
+ " if '__loader__' in globals():",
+ " del __loader__",
+ if_dl(" old_flags = sys.getdlopenflags()"),
+ " old_dir = os.getcwd()",
+ " try:",
+ " os.chdir(os.path.dirname(__file__))",
+ if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
+ " imp.load_dynamic(__name__,__file__)",
+ " finally:",
+ if_dl(" sys.setdlopenflags(old_flags)"),
+ " os.chdir(old_dir)",
+ "__bootstrap__()",
+ "" # terminal \n
+ ]))
+ f.close()
+ if compile:
+ from distutils.util import byte_compile
+ byte_compile([stub_file], optimize=0,
+ force=True, dry_run=self.dry_run)
+ optimize = self.get_finalized_command('install_lib').optimize
+ if optimize > 0:
+ byte_compile([stub_file], optimize=optimize,
+ force=True, dry_run=self.dry_run)
+ if os.path.exists(stub_file) and not self.dry_run:
+ os.unlink(stub_file)
+
+
+if use_stubs or os.name=='nt':
+ # Build shared libraries
+ #
+ def link_shared_object(self, objects, output_libname, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=0, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None
+ ): self.link(
+ self.SHARED_LIBRARY, objects, output_libname,
+ output_dir, libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug, extra_preargs, extra_postargs,
+ build_temp, target_lang
+ )
+else:
+ # Build static libraries everywhere else
+ libtype = 'static'
+
+ def link_shared_object(self, objects, output_libname, output_dir=None,
+ libraries=None, library_dirs=None, runtime_library_dirs=None,
+ export_symbols=None, debug=0, extra_preargs=None,
+ extra_postargs=None, build_temp=None, target_lang=None
+ ):
+ # XXX we need to either disallow these attrs on Library instances,
+ # or warn/abort here if set, or something...
+ #libraries=None, library_dirs=None, runtime_library_dirs=None,
+ #export_symbols=None, extra_preargs=None, extra_postargs=None,
+ #build_temp=None
+
+ assert output_dir is None # distutils build_ext doesn't pass this
+ output_dir,filename = os.path.split(output_libname)
+ basename, ext = os.path.splitext(filename)
+ if self.library_filename("x").startswith('lib'):
+ # strip 'lib' prefix; this is kludgy if some platform uses
+ # a different prefix
+ basename = basename[3:]
+
+ self.create_static_lib(
+ objects, basename, output_dir, debug, target_lang
+ )
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py
new file mode 100755
index 00000000..a01e2843
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/build_py.py
@@ -0,0 +1,268 @@
+import os.path, sys, fnmatch
+from distutils.command.build_py import build_py as _build_py
+from distutils.util import convert_path
+from glob import glob
+
+try:
+ from distutils.util import Mixin2to3 as _Mixin2to3
+ # add support for converting doctests that is missing in 3.1 distutils
+ from distutils import log
+ from lib2to3.refactor import RefactoringTool, get_fixers_from_package
+ import setuptools
+ class DistutilsRefactoringTool(RefactoringTool):
+ def log_error(self, msg, *args, **kw):
+ log.error(msg, *args)
+
+ def log_message(self, msg, *args):
+ log.info(msg, *args)
+
+ def log_debug(self, msg, *args):
+ log.debug(msg, *args)
+
+ class Mixin2to3(_Mixin2to3):
+ def run_2to3(self, files, doctests = False):
+ # See of the distribution option has been set, otherwise check the
+ # setuptools default.
+ if self.distribution.use_2to3 is not True:
+ return
+ if not files:
+ return
+ log.info("Fixing "+" ".join(files))
+ if not self.fixer_names:
+ self.fixer_names = []
+ for p in setuptools.lib2to3_fixer_packages:
+ self.fixer_names.extend(get_fixers_from_package(p))
+ if self.distribution.use_2to3_fixers is not None:
+ for p in self.distribution.use_2to3_fixers:
+ self.fixer_names.extend(get_fixers_from_package(p))
+ if doctests:
+ if setuptools.run_2to3_on_doctests:
+ r = DistutilsRefactoringTool(self.fixer_names)
+ r.refactor(files, write=True, doctests_only=True)
+ else:
+ _Mixin2to3.run_2to3(self, files)
+
+except ImportError:
+ class Mixin2to3:
+ def run_2to3(self, files, doctests=True):
+ # Nothing done in 2.x
+ pass
+
+class build_py(_build_py, Mixin2to3):
+ """Enhanced 'build_py' command that includes data files with packages
+
+ The data files are specified via a 'package_data' argument to 'setup()'.
+ See 'setuptools.dist.Distribution' for more details.
+
+ Also, this version of the 'build_py' command allows you to specify both
+ 'py_modules' and 'packages' in the same setup operation.
+ """
+ def finalize_options(self):
+ _build_py.finalize_options(self)
+ self.package_data = self.distribution.package_data
+ self.exclude_package_data = self.distribution.exclude_package_data or {}
+ if 'data_files' in self.__dict__: del self.__dict__['data_files']
+ self.__updated_files = []
+ self.__doctests_2to3 = []
+
+ def run(self):
+ """Build modules, packages, and copy data files to build directory"""
+ if not self.py_modules and not self.packages:
+ return
+
+ if self.py_modules:
+ self.build_modules()
+
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ self.run_2to3(self.__updated_files, False)
+ self.run_2to3(self.__updated_files, True)
+ self.run_2to3(self.__doctests_2to3, True)
+
+ # Only compile actual .py files, using our base class' idea of what our
+ # output files are.
+ self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
+
+ def __getattr__(self,attr):
+ if attr=='data_files': # lazily compute data files
+ self.data_files = files = self._get_data_files(); return files
+ return _build_py.__getattr__(self,attr)
+
+ def build_module(self, module, module_file, package):
+ outfile, copied = _build_py.build_module(self, module, module_file, package)
+ if copied:
+ self.__updated_files.append(outfile)
+ return outfile, copied
+
+ def _get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+ self.analyze_manifest()
+ data = []
+ for package in self.packages or ():
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Length of path to strip from found files
+ plen = len(src_dir)+1
+
+ # Strip directory from globbed filenames
+ filenames = [
+ file[plen:] for file in self.find_data_files(package, src_dir)
+ ]
+ data.append( (package, src_dir, build_dir, filenames) )
+ return data
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'"""
+ globs = (self.package_data.get('', [])
+ + self.package_data.get(package, []))
+ files = self.manifest_files.get(package, [])[:]
+ for pattern in globs:
+ # Each pattern has to be converted to a platform-specific path
+ files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
+ return self.exclude_data_files(package, src_dir, files)
+
+ def build_package_data(self):
+ """Copy data files into build directory"""
+ lastdir = None
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ self.mkpath(os.path.dirname(target))
+ srcfile = os.path.join(src_dir, filename)
+ outf, copied = self.copy_file(srcfile, target)
+ srcfile = os.path.abspath(srcfile)
+ if copied and srcfile in self.distribution.convert_2to3_doctests:
+ self.__doctests_2to3.append(outf)
+
+
+ def analyze_manifest(self):
+ self.manifest_files = mf = {}
+ if not self.distribution.include_package_data:
+ return
+ src_dirs = {}
+ for package in self.packages or ():
+ # Locate package source directory
+ src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ for path in ei_cmd.filelist.files:
+ d,f = os.path.split(assert_relative(path))
+ prev = None
+ oldf = f
+ while d and d!=prev and d not in src_dirs:
+ prev = d
+ d, df = os.path.split(d)
+ f = os.path.join(df, f)
+ if d in src_dirs:
+ if path.endswith('.py') and f==oldf:
+ continue # it's a module, not data
+ mf.setdefault(src_dirs[d],[]).append(path)
+
+ def get_data_files(self): pass # kludge 2.4 for lazy computation
+
+ if sys.version<"2.4": # Python 2.4 already has this code
+ def get_outputs(self, include_bytecode=1):
+ """Return complete list of files copied to the build directory
+
+ This includes both '.py' files and data files, as well as '.pyc'
+ and '.pyo' files if 'include_bytecode' is true. (This method is
+ needed for the 'install_lib' command to do its job properly, and to
+ generate a correct installation manifest.)
+ """
+ return _build_py.get_outputs(self, include_bytecode) + [
+ os.path.join(build_dir, filename)
+ for package, src_dir, build_dir,filenames in self.data_files
+ for filename in filenames
+ ]
+
+ def check_package(self, package, package_dir):
+ """Check namespace packages' __init__ for declare_namespace"""
+ try:
+ return self.packages_checked[package]
+ except KeyError:
+ pass
+
+ init_py = _build_py.check_package(self, package, package_dir)
+ self.packages_checked[package] = init_py
+
+ if not init_py or not self.distribution.namespace_packages:
+ return init_py
+
+ for pkg in self.distribution.namespace_packages:
+ if pkg==package or pkg.startswith(package+'.'):
+ break
+ else:
+ return init_py
+
+ f = open(init_py,'rU')
+ if 'declare_namespace' not in f.read():
+ from distutils import log
+ log.warn(
+ "WARNING: %s is a namespace package, but its __init__.py does\n"
+ "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
+ '(See the setuptools manual under "Namespace Packages" for '
+ "details.)\n", package
+ )
+ f.close()
+ return init_py
+
+ def initialize_options(self):
+ self.packages_checked={}
+ _build_py.initialize_options(self)
+
+
+ def get_package_dir(self, package):
+ res = _build_py.get_package_dir(self, package)
+ if self.distribution.src_root is not None:
+ return os.path.join(self.distribution.src_root, res)
+ return res
+
+
+ def exclude_data_files(self, package, src_dir, files):
+ """Filter filenames for package's data files in 'src_dir'"""
+ globs = (self.exclude_package_data.get('', [])
+ + self.exclude_package_data.get(package, []))
+ bad = []
+ for pattern in globs:
+ bad.extend(
+ fnmatch.filter(
+ files, os.path.join(src_dir, convert_path(pattern))
+ )
+ )
+ bad = dict.fromkeys(bad)
+ seen = {}
+ return [
+ f for f in files if f not in bad
+ and f not in seen and seen.setdefault(f,1) # ditch dupes
+ ]
+
+
+def assert_relative(path):
+ if not os.path.isabs(path):
+ return path
+ from distutils.errors import DistutilsSetupError
+ raise DistutilsSetupError(
+"""Error: setup script specifies an absolute path:
+
+ %s
+
+setup() arguments must *always* be /-separated paths relative to the
+setup.py directory, *never* absolute paths.
+""" % path
+ )
+
+
+
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py
new file mode 100755
index 00000000..93b7773c
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/develop.py
@@ -0,0 +1,141 @@
+from setuptools.command.easy_install import easy_install
+from distutils.util import convert_path, subst_vars
+from pkg_resources import Distribution, PathMetadata, normalize_path
+from distutils import log
+from distutils.errors import DistutilsError, DistutilsOptionError
+import os, setuptools, glob
+
+class develop(easy_install):
+ """Set up package for development"""
+
+ description = "install package in 'development mode'"
+
+ user_options = easy_install.user_options + [
+ ("uninstall", "u", "Uninstall this source package"),
+ ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+ ]
+
+ boolean_options = easy_install.boolean_options + ['uninstall']
+
+ command_consumes_arguments = False # override base
+
+ def run(self):
+ if self.uninstall:
+ self.multi_version = True
+ self.uninstall_link()
+ else:
+ self.install_for_development()
+ self.warn_deprecated_options()
+
+ def initialize_options(self):
+ self.uninstall = None
+ self.egg_path = None
+ easy_install.initialize_options(self)
+ self.setup_path = None
+ self.always_copy_from = '.' # always copy eggs installed in curdir
+
+
+
+ def finalize_options(self):
+ ei = self.get_finalized_command("egg_info")
+ if ei.broken_egg_info:
+ raise DistutilsError(
+ "Please rename %r to %r before using 'develop'"
+ % (ei.egg_info, ei.broken_egg_info)
+ )
+ self.args = [ei.egg_name]
+
+
+
+
+ easy_install.finalize_options(self)
+ self.expand_basedirs()
+ self.expand_dirs()
+ # pick up setup-dir .egg files only: no .egg-info
+ self.package_index.scan(glob.glob('*.egg'))
+
+ self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
+ self.egg_base = ei.egg_base
+ if self.egg_path is None:
+ self.egg_path = os.path.abspath(ei.egg_base)
+
+ target = normalize_path(self.egg_base)
+ if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
+ raise DistutilsOptionError(
+ "--egg-path must be a relative path from the install"
+ " directory to "+target
+ )
+
+ # Make a distribution for the package's source
+ self.dist = Distribution(
+ target,
+ PathMetadata(target, os.path.abspath(ei.egg_info)),
+ project_name = ei.egg_name
+ )
+
+ p = self.egg_base.replace(os.sep,'/')
+ if p!= os.curdir:
+ p = '../' * (p.count('/')+1)
+ self.setup_path = p
+ p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
+ if p != normalize_path(os.curdir):
+ raise DistutilsOptionError(
+ "Can't get a consistent path to setup script from"
+ " installation directory", p, normalize_path(os.curdir))
+
+ def install_for_development(self):
+ # Ensure metadata is up-to-date
+ self.run_command('egg_info')
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+ self.install_site_py() # ensure that target dir is site-safe
+ if setuptools.bootstrap_install_from:
+ self.easy_install(setuptools.bootstrap_install_from)
+ setuptools.bootstrap_install_from = None
+
+ # create an .egg-link in the installation dir, pointing to our egg
+ log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+ if not self.dry_run:
+ f = open(self.egg_link,"w")
+ f.write(self.egg_path + "\n" + self.setup_path)
+ f.close()
+ # postprocess the installed distro, fixing up .pth, installing scripts,
+ # and handling requirements
+ self.process_distribution(None, self.dist, not self.no_deps)
+
+
+ def uninstall_link(self):
+ if os.path.exists(self.egg_link):
+ log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+ contents = [line.rstrip() for line in open(self.egg_link)]
+ if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
+ log.warn("Link points to %s: uninstall aborted", contents)
+ return
+ if not self.dry_run:
+ os.unlink(self.egg_link)
+ if not self.dry_run:
+ self.update_pth(self.dist) # remove any .pth link to us
+ if self.distribution.scripts:
+ # XXX should also check for entry point scripts!
+ log.warn("Note: you must uninstall or replace scripts manually!")
+
+ def install_egg_scripts(self, dist):
+ if dist is not self.dist:
+ # Installing a dependency, so fall back to normal behavior
+ return easy_install.install_egg_scripts(self,dist)
+
+ # create wrapper scripts in the script dir, pointing to dist.scripts
+
+ # new-style...
+ self.install_wrapper_scripts(dist)
+
+ # ...and old-style
+ for script_name in self.distribution.scripts or []:
+ script_path = os.path.abspath(convert_path(script_name))
+ script_name = os.path.basename(script_path)
+ f = open(script_path,'rU')
+ script_text = f.read()
+ f.close()
+ self.install_script(dist, script_name, script_text, script_path)
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py
new file mode 100755
index 00000000..27fd00c7
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/easy_install.py
@@ -0,0 +1,1865 @@
+#!python
+"""\
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages. For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ http://packages.python.org/distribute/easy_install.html
+
+"""
+import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random
+from glob import glob
+from setuptools import Command, _dont_write_bytecode
+from setuptools.sandbox import run_setup
+from distutils import log, dir_util
+from distutils.util import convert_path, subst_vars
+from distutils.sysconfig import get_python_lib, get_config_vars
+from distutils.errors import DistutilsArgError, DistutilsOptionError, \
+ DistutilsError, DistutilsPlatformError
+from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import PackageIndex
+from setuptools.package_index import URL_SCHEME
+from setuptools.command import bdist_egg, egg_info
+from pkg_resources import yield_lines, normalize_path, resource_string, \
+ ensure_directory, get_distribution, find_distributions, \
+ Environment, Requirement, Distribution, \
+ PathMetadata, EggMetadata, WorkingSet, \
+ DistributionNotFound, VersionConflict, \
+ DEVELOP_DIST
+
+sys_executable = os.path.normpath(sys.executable)
+
+__all__ = [
+ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+ 'main', 'get_exe_prefixes',
+]
+
+import site
+HAS_USER_SITE = not sys.version < "2.6" and site.ENABLE_USER_SITE
+
+def samefile(p1,p2):
+ if hasattr(os.path,'samefile') and (
+ os.path.exists(p1) and os.path.exists(p2)
+ ):
+ return os.path.samefile(p1,p2)
+ return (
+ os.path.normpath(os.path.normcase(p1)) ==
+ os.path.normpath(os.path.normcase(p2))
+ )
+
+if sys.version_info <= (3,):
+ def _to_ascii(s):
+ return s
+ def isascii(s):
+ try:
+ unicode(s, 'ascii')
+ return True
+ except UnicodeError:
+ return False
+else:
+ def _to_ascii(s):
+ return s.encode('ascii')
+ def isascii(s):
+ try:
+ s.encode('ascii')
+ return True
+ except UnicodeError:
+ return False
+
+class easy_install(Command):
+ """Manage a download/build/install process"""
+ description = "Find/get/install Python packages"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('prefix=', None, "installation prefix"),
+ ("zip-ok", "z", "install package as a zipfile"),
+ ("multi-version", "m", "make apps have to require() a version"),
+ ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+ ("install-dir=", "d", "install package to DIR"),
+ ("script-dir=", "s", "install scripts to DIR"),
+ ("exclude-scripts", "x", "Don't install scripts"),
+ ("always-copy", "a", "Copy all needed packages to install dir"),
+ ("index-url=", "i", "base URL of Python Package Index"),
+ ("find-links=", "f", "additional URL(s) to search for packages"),
+ ("delete-conflicting", "D", "no longer needed; don't use this"),
+ ("ignore-conflicts-at-my-risk", None,
+ "no longer needed; don't use this"),
+ ("build-directory=", "b",
+ "download/extract/build in DIR; keep the results"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('record=', None,
+ "filename in which to record list of installed files"),
+ ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+ ('site-dirs=','S',"list of directories where .pth files work"),
+ ('editable', 'e', "Install specified packages in editable form"),
+ ('no-deps', 'N', "don't install dependencies"),
+ ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+ ('local-snapshots-ok', 'l', "allow building eggs from local checkouts"),
+ ('version', None, "print version information and exit"),
+ ('no-find-links', None,
+ "Don't load find-links defined in packages being installed")
+ ]
+ boolean_options = [
+ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+ 'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
+ 'no-deps', 'local-snapshots-ok', 'version'
+ ]
+
+ if HAS_USER_SITE:
+ user_options.append(('user', None,
+ "install in user site-package '%s'" % site.USER_SITE))
+ boolean_options.append('user')
+
+
+ negative_opt = {'always-unzip': 'zip-ok'}
+ create_index = PackageIndex
+
+ def initialize_options(self):
+ if HAS_USER_SITE:
+ whereami = os.path.abspath(__file__)
+ self.user = whereami.startswith(site.USER_SITE)
+ else:
+ self.user = 0
+
+ self.zip_ok = self.local_snapshots_ok = None
+ self.install_dir = self.script_dir = self.exclude_scripts = None
+ self.index_url = None
+ self.find_links = None
+ self.build_directory = None
+ self.args = None
+ self.optimize = self.record = None
+ self.upgrade = self.always_copy = self.multi_version = None
+ self.editable = self.no_deps = self.allow_hosts = None
+ self.root = self.prefix = self.no_report = None
+ self.version = None
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_base = None
+ self.install_platbase = None
+ if HAS_USER_SITE:
+ self.install_userbase = site.USER_BASE
+ self.install_usersite = site.USER_SITE
+ else:
+ self.install_userbase = None
+ self.install_usersite = None
+ self.no_find_links = None
+
+ # Options not specifiable via command line
+ self.package_index = None
+ self.pth_file = self.always_copy_from = None
+ self.delete_conflicting = None
+ self.ignore_conflicts_at_my_risk = None
+ self.site_dirs = None
+ self.installed_projects = {}
+ self.sitepy_installed = False
+ # Always read easy_install options, even if we are subclassed, or have
+ # an independent instance created. This ensures that defaults will
+ # always come from the standard configuration file(s)' "easy_install"
+ # section, even if this is a "develop" or "install" command, or some
+ # other embedding.
+ self._dry_run = None
+ self.verbose = self.distribution.verbose
+ self.distribution._set_command_options(
+ self, self.distribution.get_option_dict('easy_install')
+ )
+
+ def delete_blockers(self, blockers):
+ for filename in blockers:
+ if os.path.exists(filename) or os.path.islink(filename):
+ log.info("Deleting %s", filename)
+ if not self.dry_run:
+ if os.path.isdir(filename) and not os.path.islink(filename):
+ rmtree(filename)
+ else:
+ os.unlink(filename)
+
+ def finalize_options(self):
+ if self.version:
+ print 'distribute %s' % get_distribution('distribute').version
+ sys.exit()
+
+ py_version = sys.version.split()[0]
+ prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
+
+ self.config_vars = {'dist_name': self.distribution.get_name(),
+ 'dist_version': self.distribution.get_version(),
+ 'dist_fullname': self.distribution.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': py_version[0:3],
+ 'py_version_nodot': py_version[0] + py_version[2],
+ 'sys_prefix': prefix,
+ 'prefix': prefix,
+ 'sys_exec_prefix': exec_prefix,
+ 'exec_prefix': exec_prefix,
+ }
+
+ if HAS_USER_SITE:
+ self.config_vars['userbase'] = self.install_userbase
+ self.config_vars['usersite'] = self.install_usersite
+
+ # fix the install_dir if "--user" was used
+ #XXX: duplicate of the code in the setup command
+ if self.user and HAS_USER_SITE:
+ self.create_home_path()
+ if self.install_userbase is None:
+ raise DistutilsPlatformError(
+ "User base directory is not specified")
+ self.install_base = self.install_platbase = self.install_userbase
+ if os.name == 'posix':
+ self.select_scheme("unix_user")
+ else:
+ self.select_scheme(os.name + "_user")
+
+ self.expand_basedirs()
+ self.expand_dirs()
+
+ self._expand('install_dir','script_dir','build_directory','site_dirs')
+ # If a non-default installation directory was specified, default the
+ # script directory to match it.
+ if self.script_dir is None:
+ self.script_dir = self.install_dir
+
+ if self.no_find_links is None:
+ self.no_find_links = False
+
+ # Let install_dir get set by install_lib command, which in turn
+ # gets its info from the install command, and takes into account
+ # --prefix and --home and all that other crud.
+ self.set_undefined_options('install_lib',
+ ('install_dir','install_dir')
+ )
+ # Likewise, set default script_dir from 'install_scripts.install_dir'
+ self.set_undefined_options('install_scripts',
+ ('install_dir', 'script_dir')
+ )
+
+ if self.user and self.install_purelib:
+ self.install_dir = self.install_purelib
+ self.script_dir = self.install_scripts
+ # default --record from the install command
+ self.set_undefined_options('install', ('record', 'record'))
+ normpath = map(normalize_path, sys.path)
+ self.all_site_dirs = get_site_dirs()
+ if self.site_dirs is not None:
+ site_dirs = [
+ os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
+ ]
+ for d in site_dirs:
+ if not os.path.isdir(d):
+ log.warn("%s (in --site-dirs) does not exist", d)
+ elif normalize_path(d) not in normpath:
+ raise DistutilsOptionError(
+ d+" (in --site-dirs) is not on sys.path"
+ )
+ else:
+ self.all_site_dirs.append(normalize_path(d))
+ if not self.editable: self.check_site_dir()
+ self.index_url = self.index_url or "http://pypi.python.org/simple"
+ self.shadow_path = self.all_site_dirs[:]
+ for path_item in self.install_dir, normalize_path(self.script_dir):
+ if path_item not in self.shadow_path:
+ self.shadow_path.insert(0, path_item)
+
+ if self.allow_hosts is not None:
+ hosts = [s.strip() for s in self.allow_hosts.split(',')]
+ else:
+ hosts = ['*']
+ if self.package_index is None:
+ self.package_index = self.create_index(
+ self.index_url, search_path = self.shadow_path, hosts=hosts,
+ )
+ self.local_index = Environment(self.shadow_path+sys.path)
+
+ if self.find_links is not None:
+ if isinstance(self.find_links, basestring):
+ self.find_links = self.find_links.split()
+ else:
+ self.find_links = []
+ if self.local_snapshots_ok:
+ self.package_index.scan_egg_links(self.shadow_path+sys.path)
+ if not self.no_find_links:
+ self.package_index.add_find_links(self.find_links)
+ self.set_undefined_options('install_lib', ('optimize','optimize'))
+ if not isinstance(self.optimize,int):
+ try:
+ self.optimize = int(self.optimize)
+ if not (0 <= self.optimize <= 2): raise ValueError
+ except ValueError:
+ raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+
+ if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
+ raise DistutilsOptionError(
+ "Can't use both --delete-conflicting and "
+ "--ignore-conflicts-at-my-risk at the same time"
+ )
+ if self.editable and not self.build_directory:
+ raise DistutilsArgError(
+ "Must specify a build directory (-b) when using --editable"
+ )
+ if not self.args:
+ raise DistutilsArgError(
+ "No urls, filenames, or requirements specified (see --help)")
+
+ self.outputs = []
+
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ val = subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Calls `os.path.expanduser` on install_base, install_platbase and
+ root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Calls `os.path.expanduser` on install dirs."""
+ self._expand_attrs(['install_purelib', 'install_platlib',
+ 'install_lib', 'install_headers',
+ 'install_scripts', 'install_data',])
+
+ def run(self):
+ if self.verbose != self.distribution.verbose:
+ log.set_verbosity(self.verbose)
+ try:
+ for spec in self.args:
+ self.easy_install(spec, not self.no_deps)
+ if self.record:
+ outputs = self.outputs
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in xrange(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ from distutils import file_util
+ self.execute(
+ file_util.write_file, (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record
+ )
+ self.warn_deprecated_options()
+ finally:
+ log.set_verbosity(self.distribution.verbose)
+
+ def pseudo_tempname(self):
+ """Return a pseudo-tempname base in the install directory.
+ This code is intentionally naive; if a malicious party can write to
+ the target directory you're already in deep doodoo.
+ """
+ try:
+ pid = os.getpid()
+ except:
+ pid = random.randint(0,sys.maxint)
+ return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+ def warn_deprecated_options(self):
+ if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
+ log.warn(
+ "Note: The -D, --delete-conflicting and"
+ " --ignore-conflicts-at-my-risk no longer have any purpose"
+ " and should not be used."
+ )
+
+ def check_site_dir(self):
+ """Verify that self.install_dir is .pth-capable dir, if needed"""
+ print 'install_dir', self.install_dir
+ instdir = normalize_path(self.install_dir)
+ pth_file = os.path.join(instdir,'easy-install.pth')
+
+ # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+ is_site_dir = instdir in self.all_site_dirs
+
+ if not is_site_dir:
+ # No? Then directly test whether it does .pth file processing
+ is_site_dir = self.check_pth_processing()
+ else:
+ # make sure we can write to target dir
+ testfile = self.pseudo_tempname()+'.write-test'
+ test_exists = os.path.exists(testfile)
+ try:
+ if test_exists: os.unlink(testfile)
+ open(testfile,'w').close()
+ os.unlink(testfile)
+ except (OSError,IOError):
+ self.cant_write_to_target()
+
+ if not is_site_dir and not self.multi_version:
+ # Can't install non-multi to non-site dir
+ raise DistutilsError(self.no_default_version_msg())
+
+ if is_site_dir:
+ if self.pth_file is None:
+ self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+ else:
+ self.pth_file = None
+
+ PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
+ if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
+ # only PYTHONPATH dirs need a site.py, so pretend it's there
+ self.sitepy_installed = True
+ elif self.multi_version and not os.path.exists(pth_file):
+ self.sitepy_installed = True # don't need site.py in this case
+ self.pth_file = None # and don't create a .pth file
+ self.install_dir = instdir
+
+ def cant_write_to_target(self):
+ msg = """can't create or remove files in install directory
+
+The following error occurred while trying to add or remove files in the
+installation directory:
+
+ %s
+
+The installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+ %s
+""" % (sys.exc_info()[1], self.install_dir,)
+
+ if not os.path.exists(self.install_dir):
+ msg += """
+This directory does not currently exist. Please create it and try again, or
+choose a different installation directory (using the -d or --install-dir
+option).
+"""
+ else:
+ msg += """
+Perhaps your account does not have write access to this directory? If the
+installation directory is a system-owned directory, you may need to sign in
+as the administrator or "root" account. If you do not have administrative
+access to this machine, you may wish to choose a different installation
+directory, preferably one that is listed in your PYTHONPATH environment
+variable.
+
+For information on other options, you may wish to consult the
+documentation at:
+
+ http://packages.python.org/distribute/easy_install.html
+
+Please make the appropriate changes for your system and try again.
+"""
+ raise DistutilsError(msg)
+
+
+
+
+ def check_pth_processing(self):
+ """Empirically verify whether .pth files are supported in inst. dir"""
+ instdir = self.install_dir
+ log.info("Checking .pth file support in %s", instdir)
+ pth_file = self.pseudo_tempname()+".pth"
+ ok_file = pth_file+'.ok'
+ ok_exists = os.path.exists(ok_file)
+ try:
+ if ok_exists: os.unlink(ok_file)
+ dirname = os.path.dirname(ok_file)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ f = open(pth_file,'w')
+ except (OSError,IOError):
+ self.cant_write_to_target()
+ else:
+ try:
+ f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,))
+ f.close(); f=None
+ executable = sys.executable
+ if os.name=='nt':
+ dirname,basename = os.path.split(executable)
+ alt = os.path.join(dirname,'pythonw.exe')
+ if basename.lower()=='python.exe' and os.path.exists(alt):
+ # use pythonw.exe to avoid opening a console window
+ executable = alt
+
+ from distutils.spawn import spawn
+ spawn([executable,'-E','-c','pass'],0)
+
+ if os.path.exists(ok_file):
+ log.info(
+ "TEST PASSED: %s appears to support .pth files",
+ instdir
+ )
+ return True
+ finally:
+ if f: f.close()
+ if os.path.exists(ok_file): os.unlink(ok_file)
+ if os.path.exists(pth_file): os.unlink(pth_file)
+ if not self.multi_version:
+ log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+ return False
+
+ def install_egg_scripts(self, dist):
+ """Write all the scripts for `dist`, unless scripts are excluded"""
+ if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+ for script_name in dist.metadata_listdir('scripts'):
+ self.install_script(
+ dist, script_name,
+ dist.get_metadata('scripts/'+script_name)
+ )
+ self.install_wrapper_scripts(dist)
+
+ def add_output(self, path):
+ if os.path.isdir(path):
+ for base, dirs, files in os.walk(path):
+ for filename in files:
+ self.outputs.append(os.path.join(base,filename))
+ else:
+ self.outputs.append(path)
+
+ def not_editable(self, spec):
+ if self.editable:
+ raise DistutilsArgError(
+ "Invalid argument %r: you can't use filenames or URLs "
+ "with --editable (except via the --find-links option)."
+ % (spec,)
+ )
+
+ def check_editable(self,spec):
+ if not self.editable:
+ return
+
+ if os.path.exists(os.path.join(self.build_directory, spec.key)):
+ raise DistutilsArgError(
+ "%r already exists in %s; can't do a checkout there" %
+ (spec.key, self.build_directory)
+ )
+
+
+
+
+
+
+ def easy_install(self, spec, deps=False):
+ tmpdir = tempfile.mkdtemp(prefix="easy_install-")
+ download = None
+ if not self.editable: self.install_site_py()
+
+ try:
+ if not isinstance(spec,Requirement):
+ if URL_SCHEME(spec):
+ # It's a url, download it to tmpdir and process
+ self.not_editable(spec)
+ download = self.package_index.download(spec, tmpdir)
+ return self.install_item(None, download, tmpdir, deps, True)
+
+ elif os.path.exists(spec):
+ # Existing file or directory, just process it directly
+ self.not_editable(spec)
+ return self.install_item(None, spec, tmpdir, deps, True)
+ else:
+ spec = parse_requirement_arg(spec)
+
+ self.check_editable(spec)
+ dist = self.package_index.fetch_distribution(
+ spec, tmpdir, self.upgrade, self.editable, not self.always_copy,
+ self.local_index
+ )
+
+ if dist is None:
+ msg = "Could not find suitable distribution for %r" % spec
+ if self.always_copy:
+ msg+=" (--always-copy skips system and development eggs)"
+ raise DistutilsError(msg)
+ elif dist.precedence==DEVELOP_DIST:
+ # .egg-info dists don't need installing, just process deps
+ self.process_distribution(spec, dist, deps, "Using")
+ return dist
+ else:
+ return self.install_item(spec, dist.location, tmpdir, deps)
+
+ finally:
+ if os.path.exists(tmpdir):
+ rmtree(tmpdir)
+
+ def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+ # Installation is also needed if file in tmpdir or is not an egg
+ install_needed = install_needed or self.always_copy
+ install_needed = install_needed or os.path.dirname(download) == tmpdir
+ install_needed = install_needed or not download.endswith('.egg')
+ install_needed = install_needed or (
+ self.always_copy_from is not None and
+ os.path.dirname(normalize_path(download)) ==
+ normalize_path(self.always_copy_from)
+ )
+
+ if spec and not install_needed:
+ # at this point, we know it's a local .egg, we just don't know if
+ # it's already installed.
+ for dist in self.local_index[spec.project_name]:
+ if dist.location==download:
+ break
+ else:
+ install_needed = True # it's not in the local index
+
+ log.info("Processing %s", os.path.basename(download))
+
+ if install_needed:
+ dists = self.install_eggs(spec, download, tmpdir)
+ for dist in dists:
+ self.process_distribution(spec, dist, deps)
+ else:
+ dists = [self.check_conflicts(self.egg_distribution(download))]
+ self.process_distribution(spec, dists[0], deps, "Using")
+
+ if spec is not None:
+ for dist in dists:
+ if dist in spec:
+ return dist
+
+
+
+ def select_scheme(self, name):
+ """Sets the install directories by applying the install schemes."""
+ # it's the caller's problem if they supply a bad name!
+ scheme = INSTALL_SCHEMES[name]
+ for key in SCHEME_KEYS:
+ attrname = 'install_' + key
+ if getattr(self, attrname) is None:
+ setattr(self, attrname, scheme[key])
+
+
+
+
+ def process_distribution(self, requirement, dist, deps=True, *info):
+ self.update_pth(dist)
+ self.package_index.add(dist)
+ self.local_index.add(dist)
+ if not self.editable:
+ self.install_egg_scripts(dist)
+ self.installed_projects[dist.key] = dist
+ log.info(self.installation_report(requirement, dist, *info))
+ if (dist.has_metadata('dependency_links.txt') and
+ not self.no_find_links):
+ self.package_index.add_find_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ if not deps and not self.always_copy:
+ return
+ elif requirement is not None and dist.key != requirement.key:
+ log.warn("Skipping dependencies for %s", dist)
+ return # XXX this is not the distribution we were looking for
+ elif requirement is None or dist not in requirement:
+ # if we wound up with a different version, resolve what we've got
+ distreq = dist.as_requirement()
+ requirement = requirement or distreq
+ requirement = Requirement(
+ distreq.project_name, distreq.specs, requirement.extras
+ )
+ log.info("Processing dependencies for %s", requirement)
+ try:
+ distros = WorkingSet([]).resolve(
+ [requirement], self.local_index, self.easy_install
+ )
+ except DistributionNotFound, e:
+ raise DistutilsError(
+ "Could not find required distribution %s" % e.args
+ )
+ except VersionConflict, e:
+ raise DistutilsError(
+ "Installed distribution %s conflicts with requirement %s"
+ % e.args
+ )
+ if self.always_copy or self.always_copy_from:
+ # Force all the relevant distros to be copied or activated
+ for dist in distros:
+ if dist.key not in self.installed_projects:
+ self.easy_install(dist.as_requirement())
+ log.info("Finished processing dependencies for %s", requirement)
+
+ def should_unzip(self, dist):
+ if self.zip_ok is not None:
+ return not self.zip_ok
+ if dist.has_metadata('not-zip-safe'):
+ return True
+ if not dist.has_metadata('zip-safe'):
+ return True
+ return True
+
+ def maybe_move(self, spec, dist_filename, setup_base):
+ dst = os.path.join(self.build_directory, spec.key)
+ if os.path.exists(dst):
+ log.warn(
+ "%r already exists in %s; build directory %s will not be kept",
+ spec.key, self.build_directory, setup_base
+ )
+ return setup_base
+ if os.path.isdir(dist_filename):
+ setup_base = dist_filename
+ else:
+ if os.path.dirname(dist_filename)==setup_base:
+ os.unlink(dist_filename) # get it out of the tmp dir
+ contents = os.listdir(setup_base)
+ if len(contents)==1:
+ dist_filename = os.path.join(setup_base,contents[0])
+ if os.path.isdir(dist_filename):
+ # if the only thing there is a directory, move it instead
+ setup_base = dist_filename
+ ensure_directory(dst); shutil.move(setup_base, dst)
+ return dst
+
+ def install_wrapper_scripts(self, dist):
+ if not self.exclude_scripts:
+ for args in get_script_args(dist):
+ self.write_script(*args)
+
+
+
+ def install_script(self, dist, script_name, script_text, dev_path=None):
+ """Generate a legacy script wrapper and install it"""
+ spec = str(dist.as_requirement())
+ is_script = is_python_script(script_text, script_name)
+
+ if is_script and dev_path:
+ script_text = get_script_header(script_text) + (
+ "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n"
+ "__requires__ = %(spec)r\n"
+ "from pkg_resources import require; require(%(spec)r)\n"
+ "del require\n"
+ "__file__ = %(dev_path)r\n"
+ "execfile(__file__)\n"
+ ) % locals()
+ elif is_script:
+ script_text = get_script_header(script_text) + (
+ "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n"
+ "__requires__ = %(spec)r\n"
+ "import pkg_resources\n"
+ "pkg_resources.run_script(%(spec)r, %(script_name)r)\n"
+ ) % locals()
+ self.write_script(script_name, _to_ascii(script_text), 'b')
+
+ def write_script(self, script_name, contents, mode="t", blockers=()):
+ """Write an executable file to the scripts directory"""
+ self.delete_blockers( # clean up old .py/.pyw w/o a script
+ [os.path.join(self.script_dir,x) for x in blockers])
+ log.info("Installing %s script to %s", script_name, self.script_dir)
+ target = os.path.join(self.script_dir, script_name)
+ self.add_output(target)
+
+ if not self.dry_run:
+ ensure_directory(target)
+ f = open(target,"w"+mode)
+ f.write(contents)
+ f.close()
+ chmod(target,0755)
+
+
+
+
+ def install_eggs(self, spec, dist_filename, tmpdir):
+ # .egg dirs or files are already built, so just return them
+ if dist_filename.lower().endswith('.egg'):
+ return [self.install_egg(dist_filename, tmpdir)]
+ elif dist_filename.lower().endswith('.exe'):
+ return [self.install_exe(dist_filename, tmpdir)]
+
+ # Anything else, try to extract and build
+ setup_base = tmpdir
+ if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+ unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+ elif os.path.isdir(dist_filename):
+ setup_base = os.path.abspath(dist_filename)
+
+ if (setup_base.startswith(tmpdir) # something we downloaded
+ and self.build_directory and spec is not None
+ ):
+ setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+ # Find the setup.py file
+ setup_script = os.path.join(setup_base, 'setup.py')
+
+ if not os.path.exists(setup_script):
+ setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+ if not setups:
+ raise DistutilsError(
+ "Couldn't find a setup script in %s" % os.path.abspath(dist_filename)
+ )
+ if len(setups)>1:
+ raise DistutilsError(
+ "Multiple setup scripts in %s" % os.path.abspath(dist_filename)
+ )
+ setup_script = setups[0]
+
+ # Now run it, and return the result
+ if self.editable:
+ log.info(self.report_editable(spec, setup_script))
+ return []
+ else:
+ return self.build_and_install(setup_script, setup_base)
+
+ def egg_distribution(self, egg_path):
+ if os.path.isdir(egg_path):
+ metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
+ else:
+ metadata = EggMetadata(zipimport.zipimporter(egg_path))
+ return Distribution.from_filename(egg_path,metadata=metadata)
+
+ def install_egg(self, egg_path, tmpdir):
+ destination = os.path.join(self.install_dir,os.path.basename(egg_path))
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+
+ dist = self.egg_distribution(egg_path)
+ self.check_conflicts(dist)
+ if not samefile(egg_path, destination):
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(os.unlink,(destination,),"Removing "+destination)
+ uncache_zipdir(destination)
+ if os.path.isdir(egg_path):
+ if egg_path.startswith(tmpdir):
+ f,m = shutil.move, "Moving"
+ else:
+ f,m = shutil.copytree, "Copying"
+ elif self.should_unzip(dist):
+ self.mkpath(destination)
+ f,m = self.unpack_and_compile, "Extracting"
+ elif egg_path.startswith(tmpdir):
+ f,m = shutil.move, "Moving"
+ else:
+ f,m = shutil.copy2, "Copying"
+
+ self.execute(f, (egg_path, destination),
+ (m+" %s to %s") %
+ (os.path.basename(egg_path),os.path.dirname(destination)))
+
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ def install_exe(self, dist_filename, tmpdir):
+ # See if it's valid, get data
+ cfg = extract_wininst_cfg(dist_filename)
+ if cfg is None:
+ raise DistutilsError(
+ "%s is not a valid distutils Windows .exe" % dist_filename
+ )
+ # Create a dummy distribution object until we build the real distro
+ dist = Distribution(None,
+ project_name=cfg.get('metadata','name'),
+ version=cfg.get('metadata','version'), platform="win32"
+ )
+
+ # Convert the .exe to an unpacked egg
+ egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
+ egg_tmp = egg_path+'.tmp'
+ egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+ pkg_inf = os.path.join(egg_info, 'PKG-INFO')
+ ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
+ dist._provider = PathMetadata(egg_tmp, egg_info) # XXX
+ self.exe_to_egg(dist_filename, egg_tmp)
+
+ # Write EGG-INFO/PKG-INFO
+ if not os.path.exists(pkg_inf):
+ f = open(pkg_inf,'w')
+ f.write('Metadata-Version: 1.0\n')
+ for k,v in cfg.items('metadata'):
+ if k<>'target_version':
+ f.write('%s: %s\n' % (k.replace('_','-').title(), v))
+ f.close()
+ script_dir = os.path.join(egg_info,'scripts')
+ self.delete_blockers( # delete entry-point scripts to avoid duping
+ [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
+ )
+ # Build .egg file from tmpdir
+ bdist_egg.make_zipfile(
+ egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
+ )
+ # install the .egg
+ return self.install_egg(egg_path, tmpdir)
+
+ def exe_to_egg(self, dist_filename, egg_tmp):
+ """Extract a bdist_wininst to the directories an egg would use"""
+ # Check for .pth file and set up prefix translations
+ prefixes = get_exe_prefixes(dist_filename)
+ to_compile = []
+ native_libs = []
+ top_level = {}
+ def process(src,dst):
+ s = src.lower()
+ for old,new in prefixes:
+ if s.startswith(old):
+ src = new+src[len(old):]
+ parts = src.split('/')
+ dst = os.path.join(egg_tmp, *parts)
+ dl = dst.lower()
+ if dl.endswith('.pyd') or dl.endswith('.dll'):
+ parts[-1] = bdist_egg.strip_module(parts[-1])
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ native_libs.append(src)
+ elif dl.endswith('.py') and old!='SCRIPTS/':
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ to_compile.append(dst)
+ return dst
+ if not src.endswith('.pth'):
+ log.warn("WARNING: can't process %s", src)
+ return None
+ # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+ unpack_archive(dist_filename, egg_tmp, process)
+ stubs = []
+ for res in native_libs:
+ if res.lower().endswith('.pyd'): # create stubs for .pyd's
+ parts = res.split('/')
+ resource = parts[-1]
+ parts[-1] = bdist_egg.strip_module(parts[-1])+'.py'
+ pyfile = os.path.join(egg_tmp, *parts)
+ to_compile.append(pyfile); stubs.append(pyfile)
+ bdist_egg.write_stub(resource, pyfile)
+ self.byte_compile(to_compile) # compile .py's
+ bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
+ bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
+
+ for name in 'top_level','native_libs':
+ if locals()[name]:
+ txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
+ if not os.path.exists(txt):
+ f = open(txt,'w')
+ f.write('\n'.join(locals()[name])+'\n')
+ f.close()
+
+ def check_conflicts(self, dist):
+ """Verify that there are no conflicting "old-style" packages"""
+
+ return dist # XXX temporarily disable until new strategy is stable
+ from imp import find_module, get_suffixes
+ from glob import glob
+
+ blockers = []
+ names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
+
+ exts = {'.pyc':1, '.pyo':1} # get_suffixes() might leave one out
+ for ext,mode,typ in get_suffixes():
+ exts[ext] = 1
+
+ for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
+ for filename in files:
+ base,ext = os.path.splitext(filename)
+ if base in names:
+ if not ext:
+ # no extension, check for package
+ try:
+ f, filename, descr = find_module(base, [path])
+ except ImportError:
+ continue
+ else:
+ if f: f.close()
+ if filename not in blockers:
+ blockers.append(filename)
+ elif ext in exts and base!='site': # XXX ugh
+ blockers.append(os.path.join(path,filename))
+ if blockers:
+ self.found_conflicts(dist, blockers)
+
+ return dist
+
+ def found_conflicts(self, dist, blockers):
+ if self.delete_conflicting:
+ log.warn("Attempting to delete conflicting packages:")
+ return self.delete_blockers(blockers)
+
+ msg = """\
+-------------------------------------------------------------------------
+CONFLICT WARNING:
+
+The following modules or packages have the same names as modules or
+packages being installed, and will be *before* the installed packages in
+Python's search path. You MUST remove all of the relevant files and
+directories before you will be able to use the package(s) you are
+installing:
+
+ %s
+
+""" % '\n '.join(blockers)
+
+ if self.ignore_conflicts_at_my_risk:
+ msg += """\
+(Note: you can run EasyInstall on '%s' with the
+--delete-conflicting option to attempt deletion of the above files
+and/or directories.)
+""" % dist.project_name
+ else:
+ msg += """\
+Note: you can attempt this installation again with EasyInstall, and use
+either the --delete-conflicting (-D) option or the
+--ignore-conflicts-at-my-risk option, to either delete the above files
+and directories, or to ignore the conflicts, respectively. Note that if
+you ignore the conflicts, the installed package(s) may not work.
+"""
+ msg += """\
+-------------------------------------------------------------------------
+"""
+ sys.stderr.write(msg)
+ sys.stderr.flush()
+ if not self.ignore_conflicts_at_my_risk:
+ raise DistutilsError("Installation aborted due to conflicts")
+
+ def installation_report(self, req, dist, what="Installed"):
+ """Helpful installation message for display to package users"""
+ msg = "\n%(what)s %(eggloc)s%(extras)s"
+ if self.multi_version and not self.no_report:
+ msg += """
+
+Because this distribution was installed --multi-version, before you can
+import modules from this package in an application, you will need to
+'import pkg_resources' and then use a 'require()' call similar to one of
+these examples, in order to select the desired version:
+
+ pkg_resources.require("%(name)s") # latest installed version
+ pkg_resources.require("%(name)s==%(version)s") # this exact version
+ pkg_resources.require("%(name)s>=%(version)s") # this version or higher
+"""
+ if self.install_dir not in map(normalize_path,sys.path):
+ msg += """
+
+Note also that the installation directory must be on sys.path at runtime for
+this to work. (e.g. by being the application's script directory, by being on
+PYTHONPATH, or by being added to sys.path by your code.)
+"""
+ eggloc = dist.location
+ name = dist.project_name
+ version = dist.version
+ extras = '' # TODO: self.report_extras(req, dist)
+ return msg % locals()
+
+ def report_editable(self, spec, setup_script):
+ dirname = os.path.dirname(setup_script)
+ python = sys.executable
+ return """\nExtracted editable version of %(spec)s to %(dirname)s
+
+If it uses setuptools in its setup script, you can activate it in
+"development" mode by going to that directory and running::
+
+ %(python)s setup.py develop
+
+See the setuptools documentation for the "develop" command for more info.
+""" % locals()
+
+ def run_setup(self, setup_script, setup_base, args):
+ sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+ sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+ args = list(args)
+ if self.verbose>2:
+ v = 'v' * (self.verbose - 1)
+ args.insert(0,'-'+v)
+ elif self.verbose<2:
+ args.insert(0,'-q')
+ if self.dry_run:
+ args.insert(0,'-n')
+ log.info(
+ "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
+ )
+ try:
+ run_setup(setup_script, args)
+ except SystemExit, v:
+ raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+
+ def build_and_install(self, setup_script, setup_base):
+ args = ['bdist_egg', '--dist-dir']
+ dist_dir = tempfile.mkdtemp(
+ prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+ )
+ try:
+ args.append(dist_dir)
+ self.run_setup(setup_script, setup_base, args)
+ all_eggs = Environment([dist_dir])
+ eggs = []
+ for key in all_eggs:
+ for dist in all_eggs[key]:
+ eggs.append(self.install_egg(dist.location, setup_base))
+ if not eggs and not self.dry_run:
+ log.warn("No eggs found in %s (setup script problem?)",
+ dist_dir)
+ return eggs
+ finally:
+ rmtree(dist_dir)
+ log.set_verbosity(self.verbose) # restore our log verbosity
+
+ def update_pth(self,dist):
+ if self.pth_file is None:
+ return
+
+ for d in self.pth_file[dist.key]: # drop old entries
+ if self.multi_version or d.location != dist.location:
+ log.info("Removing %s from easy-install.pth file", d)
+ self.pth_file.remove(d)
+ if d.location in self.shadow_path:
+ self.shadow_path.remove(d.location)
+
+ if not self.multi_version:
+ if dist.location in self.pth_file.paths:
+ log.info(
+ "%s is already the active version in easy-install.pth",
+ dist
+ )
+ else:
+ log.info("Adding %s to easy-install.pth file", dist)
+ self.pth_file.add(dist) # add new entry
+ if dist.location not in self.shadow_path:
+ self.shadow_path.append(dist.location)
+
+ if not self.dry_run:
+
+ self.pth_file.save()
+ if dist.key=='distribute':
+ # Ensure that setuptools itself never becomes unavailable!
+ # XXX should this check for latest version?
+ filename = os.path.join(self.install_dir,'setuptools.pth')
+ if os.path.islink(filename): os.unlink(filename)
+ f = open(filename, 'wt')
+ f.write(self.pth_file.make_relative(dist.location)+'\n')
+ f.close()
+
+ def unpack_progress(self, src, dst):
+ # Progress filter for unpacking
+ log.debug("Unpacking %s to %s", src, dst)
+ return dst # only unpack-and-compile skips files for dry run
+
+ def unpack_and_compile(self, egg_path, destination):
+ to_compile = []; to_chmod = []
+
+ def pf(src,dst):
+ if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+ to_compile.append(dst)
+ to_chmod.append(dst)
+ elif dst.endswith('.dll') or dst.endswith('.so'):
+ to_chmod.append(dst)
+ self.unpack_progress(src,dst)
+ return not self.dry_run and dst or None
+
+ unpack_archive(egg_path, destination, pf)
+ self.byte_compile(to_compile)
+ if not self.dry_run:
+ for f in to_chmod:
+ mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755
+ chmod(f, mode)
+
+ def byte_compile(self, to_compile):
+ if _dont_write_bytecode:
+ self.warn('byte-compiling is disabled, skipping.')
+ return
+
+ from distutils.util import byte_compile
+ try:
+ # try to make the byte compile messages quieter
+ log.set_verbosity(self.verbose - 1)
+
+ byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+ if self.optimize:
+ byte_compile(
+ to_compile, optimize=self.optimize, force=1,
+ dry_run=self.dry_run
+ )
+ finally:
+ log.set_verbosity(self.verbose) # restore original verbosity
+
+
+
+
+
+
+
+
+ def no_default_version_msg(self):
+ return """bad install directory or PYTHONPATH
+
+You are attempting to install a package to a directory that is not
+on PYTHONPATH and which Python does not read ".pth" files from. The
+installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+ %s
+
+and your PYTHONPATH environment variable currently contains:
+
+ %r
+
+Here are some of your options for correcting the problem:
+
+* You can choose a different installation directory, i.e., one that is
+ on PYTHONPATH or supports .pth files
+
+* You can add the installation directory to the PYTHONPATH environment
+ variable. (It must then also be on PYTHONPATH whenever you run
+ Python and want to use the package(s) you are installing.)
+
+* You can set up the installation directory to support ".pth" files by
+ using one of the approaches described here:
+
+ http://packages.python.org/distribute/easy_install.html#custom-installation-locations
+
+Please make the appropriate changes for your system and try again.""" % (
+ self.install_dir, os.environ.get('PYTHONPATH','')
+ )
+
+
+
+
+
+
+
+
+
+
+ def install_site_py(self):
+ """Make sure there's a site.py in the target dir, if needed"""
+
+ if self.sitepy_installed:
+ return # already did it, or don't need to
+
+ sitepy = os.path.join(self.install_dir, "site.py")
+ source = resource_string(Requirement.parse("distribute"), "site.py")
+ current = ""
+
+ if os.path.exists(sitepy):
+ log.debug("Checking existing site.py in %s", self.install_dir)
+ f = open(sitepy,'rb')
+ current = f.read()
+ # we want str, not bytes
+ if sys.version_info >= (3,):
+ current = current.decode()
+
+ f.close()
+ if not current.startswith('def __boot():'):
+ raise DistutilsError(
+ "%s is not a setuptools-generated site.py; please"
+ " remove it." % sitepy
+ )
+
+ if current != source:
+ log.info("Creating %s", sitepy)
+ if not self.dry_run:
+ ensure_directory(sitepy)
+ f = open(sitepy,'wb')
+ f.write(source)
+ f.close()
+ self.byte_compile([sitepy])
+
+ self.sitepy_installed = True
+
+
+
+
+ def create_home_path(self):
+ """Create directories under ~."""
+ if not self.user:
+ return
+ home = convert_path(os.path.expanduser("~"))
+ for name, path in self.config_vars.iteritems():
+ if path.startswith(home) and not os.path.isdir(path):
+ self.debug_print("os.makedirs('%s', 0700)" % path)
+ os.makedirs(path, 0700)
+
+
+
+
+
+
+
+ INSTALL_SCHEMES = dict(
+ posix = dict(
+ install_dir = '$base/lib/python$py_version_short/site-packages',
+ script_dir = '$base/bin',
+ ),
+ )
+
+ DEFAULT_SCHEME = dict(
+ install_dir = '$base/Lib/site-packages',
+ script_dir = '$base/Scripts',
+ )
+
+ def _expand(self, *attrs):
+ config_vars = self.get_finalized_command('install').config_vars
+
+ if self.prefix:
+ # Set default install_dir/scripts from --prefix
+ config_vars = config_vars.copy()
+ config_vars['base'] = self.prefix
+ scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
+ for attr,val in scheme.items():
+ if getattr(self,attr,None) is None:
+ setattr(self,attr,val)
+
+ from distutils.util import subst_vars
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ val = subst_vars(val, config_vars)
+ if os.name == 'posix':
+ val = os.path.expanduser(val)
+ setattr(self, attr, val)
+
+
+
+
+
+
+
+
+
+def get_site_dirs():
+ # return a list of 'site' dirs
+ sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
+ prefixes = [sys.prefix]
+ if sys.exec_prefix != sys.prefix:
+ prefixes.append(sys.exec_prefix)
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif os.sep == '/':
+ sitedirs.extend([os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python")])
+ else:
+ sitedirs.extend(
+ [prefix, os.path.join(prefix, "lib", "site-packages")]
+ )
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for plat_specific in (0,1):
+ site_lib = get_python_lib(plat_specific)
+ if site_lib not in sitedirs: sitedirs.append(site_lib)
+
+ if HAS_USER_SITE:
+ sitedirs.append(site.USER_SITE)
+
+ sitedirs = map(normalize_path, sitedirs)
+
+ return sitedirs
+
+
+def expand_paths(inputs):
+ """Yield sys.path directories that might contain "old-style" packages"""
+
+ seen = {}
+
+ for dirname in inputs:
+ dirname = normalize_path(dirname)
+ if dirname in seen:
+ continue
+
+ seen[dirname] = 1
+ if not os.path.isdir(dirname):
+ continue
+
+ files = os.listdir(dirname)
+ yield dirname, files
+
+ for name in files:
+ if not name.endswith('.pth'):
+ # We only care about the .pth files
+ continue
+ if name in ('easy-install.pth','setuptools.pth'):
+ # Ignore .pth files that we control
+ continue
+
+ # Read the .pth file
+ f = open(os.path.join(dirname,name))
+ lines = list(yield_lines(f))
+ f.close()
+
+ # Yield existing non-dupe, non-import directory lines from it
+ for line in lines:
+ if not line.startswith("import"):
+ line = normalize_path(line.rstrip())
+ if line not in seen:
+ seen[line] = 1
+ if not os.path.isdir(line):
+ continue
+ yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+ """Extract configuration data from a bdist_wininst .exe
+
+ Returns a ConfigParser.RawConfigParser, or None
+ """
+ f = open(dist_filename,'rb')
+ try:
+ endrec = zipfile._EndRecData(f)
+ if endrec is None:
+ return None
+
+ prepended = (endrec[9] - endrec[5]) - endrec[6]
+ if prepended < 12: # no wininst data here
+ return None
+ f.seek(prepended-12)
+
+ import struct, StringIO, ConfigParser
+ tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
+ if tag not in (0x1234567A, 0x1234567B):
+ return None # not a valid tag
+
+ f.seek(prepended-(12+cfglen))
+ cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
+ try:
+ cfg.readfp(StringIO.StringIO(f.read(cfglen).split(chr(0),1)[0]))
+ except ConfigParser.Error:
+ return None
+ if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+ return None
+ return cfg
+
+ finally:
+ f.close()
+
+
+
+
+
+
+
+
+def get_exe_prefixes(exe_filename):
+ """Get exe->egg path translations for a given .exe file"""
+
+ prefixes = [
+ ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
+ ('PLATLIB/', ''),
+ ('SCRIPTS/', 'EGG-INFO/scripts/')
+ ]
+ z = zipfile.ZipFile(exe_filename)
+ try:
+ for info in z.infolist():
+ name = info.filename
+ parts = name.split('/')
+ if len(parts)==3 and parts[2]=='PKG-INFO':
+ if parts[1].endswith('.egg-info'):
+ prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
+ break
+ if len(parts)<>2 or not name.endswith('.pth'):
+ continue
+ if name.endswith('-nspkg.pth'):
+ continue
+ if parts[0].upper() in ('PURELIB','PLATLIB'):
+ for pth in yield_lines(z.read(name)):
+ pth = pth.strip().replace('\\','/')
+ if not pth.startswith('import'):
+ prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
+ finally:
+ z.close()
+ prefixes = [(x.lower(),y) for x, y in prefixes]
+ prefixes.sort(); prefixes.reverse()
+ return prefixes
+
+
+def parse_requirement_arg(spec):
+ try:
+ return Requirement.parse(spec)
+ except ValueError:
+ raise DistutilsError(
+ "Not a URL, existing file, or requirement spec: %r" % (spec,)
+ )
+
+class PthDistributions(Environment):
+ """A .pth file with Distribution paths in it"""
+
+ dirty = False
+
+ def __init__(self, filename, sitedirs=()):
+ self.filename = filename; self.sitedirs=map(normalize_path, sitedirs)
+ self.basedir = normalize_path(os.path.dirname(self.filename))
+ self._load(); Environment.__init__(self, [], None, None)
+ for path in yield_lines(self.paths):
+ map(self.add, find_distributions(path, True))
+
+ def _load(self):
+ self.paths = []
+ saw_import = False
+ seen = dict.fromkeys(self.sitedirs)
+ if os.path.isfile(self.filename):
+ f = open(self.filename,'rt')
+ for line in f:
+ if line.startswith('import'):
+ saw_import = True
+ continue
+ path = line.rstrip()
+ self.paths.append(path)
+ if not path.strip() or path.strip().startswith('#'):
+ continue
+ # skip non-existent paths, in case somebody deleted a package
+ # manually, and duplicate paths as well
+ path = self.paths[-1] = normalize_path(
+ os.path.join(self.basedir,path)
+ )
+ if not os.path.exists(path) or path in seen:
+ self.paths.pop() # skip it
+ self.dirty = True # we cleaned up, so we're dirty now :)
+ continue
+ seen[path] = 1
+ f.close()
+
+ if self.paths and not saw_import:
+ self.dirty = True # ensure anything we touch has import wrappers
+ while self.paths and not self.paths[-1].strip():
+ self.paths.pop()
+
+ def save(self):
+ """Write changed .pth file back to disk"""
+ if not self.dirty:
+ return
+
+ data = '\n'.join(map(self.make_relative,self.paths))
+ if data:
+ log.debug("Saving %s", self.filename)
+ data = (
+ "import sys; sys.__plen = len(sys.path)\n"
+ "%s\n"
+ "import sys; new=sys.path[sys.__plen:];"
+ " del sys.path[sys.__plen:];"
+ " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
+ " sys.__egginsert = p+len(new)\n"
+ ) % data
+
+ if os.path.islink(self.filename):
+ os.unlink(self.filename)
+ f = open(self.filename,'wt')
+ f.write(data); f.close()
+
+ elif os.path.exists(self.filename):
+ log.debug("Deleting empty %s", self.filename)
+ os.unlink(self.filename)
+
+ self.dirty = False
+
+ def add(self,dist):
+ """Add `dist` to the distribution map"""
+ if (dist.location not in self.paths and (
+ dist.location not in self.sitedirs or
+ dist.location == os.getcwd() #account for '.' being in PYTHONPATH
+ )):
+ self.paths.append(dist.location)
+ self.dirty = True
+ Environment.add(self,dist)
+
+ def remove(self,dist):
+ """Remove `dist` from the distribution map"""
+ while dist.location in self.paths:
+ self.paths.remove(dist.location); self.dirty = True
+ Environment.remove(self,dist)
+
+
+ def make_relative(self,path):
+ npath, last = os.path.split(normalize_path(path))
+ baselen = len(self.basedir)
+ parts = [last]
+ sep = os.altsep=='/' and '/' or os.sep
+ while len(npath)>=baselen:
+ if npath==self.basedir:
+ parts.append(os.curdir)
+ parts.reverse()
+ return sep.join(parts)
+ npath, last = os.path.split(npath)
+ parts.append(last)
+ else:
+ return path
+
+def get_script_header(script_text, executable=sys_executable, wininst=False):
+ """Create a #! line, getting options (if any) from script_text"""
+ from distutils.command.build_scripts import first_line_re
+ first = (script_text+'\n').splitlines()[0]
+ match = first_line_re.match(first)
+ options = ''
+ if match:
+ options = match.group(1) or ''
+ if options: options = ' '+options
+ if wininst:
+ executable = "python.exe"
+ else:
+ executable = nt_quote_arg(executable)
+ hdr = "#!%(executable)s%(options)s\n" % locals()
+ if not isascii(hdr):
+ # Non-ascii path to sys.executable, use -x to prevent warnings
+ if options:
+ if options.strip().startswith('-'):
+ options = ' -x'+options.strip()[1:]
+ # else: punt, we can't do it, let the warning happen anyway
+ else:
+ options = ' -x'
+ executable = fix_jython_executable(executable, options)
+ hdr = "#!%(executable)s%(options)s\n" % locals()
+ return hdr
+
+def auto_chmod(func, arg, exc):
+ if func is os.remove and os.name=='nt':
+ chmod(arg, stat.S_IWRITE)
+ return func(arg)
+ exc = sys.exc_info()
+ raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
+
+def uncache_zipdir(path):
+ """Ensure that the importer caches dont have stale info for `path`"""
+ from zipimport import _zip_directory_cache as zdc
+ _uncache(path, zdc)
+ _uncache(path, sys.path_importer_cache)
+
+def _uncache(path, cache):
+ if path in cache:
+ del cache[path]
+ else:
+ path = normalize_path(path)
+ for p in cache:
+ if normalize_path(p)==path:
+ del cache[p]
+ return
+
+def is_python(text, filename='<string>'):
+ "Is this string a valid Python script?"
+ try:
+ compile(text, filename, 'exec')
+ except (SyntaxError, TypeError):
+ return False
+ else:
+ return True
+
+def is_sh(executable):
+ """Determine if the specified executable is a .sh (contains a #! line)"""
+ try:
+ fp = open(executable)
+ magic = fp.read(2)
+ fp.close()
+ except (OSError,IOError): return executable
+ return magic == '#!'
+
+def nt_quote_arg(arg):
+ """Quote a command line argument according to Windows parsing rules"""
+
+ result = []
+ needquote = False
+ nb = 0
+
+ needquote = (" " in arg) or ("\t" in arg)
+ if needquote:
+ result.append('"')
+
+ for c in arg:
+ if c == '\\':
+ nb += 1
+ elif c == '"':
+ # double preceding backslashes, then add a \"
+ result.append('\\' * (nb*2) + '\\"')
+ nb = 0
+ else:
+ if nb:
+ result.append('\\' * nb)
+ nb = 0
+ result.append(c)
+
+ if nb:
+ result.append('\\' * nb)
+
+ if needquote:
+ result.append('\\' * nb) # double the trailing backslashes
+ result.append('"')
+
+ return ''.join(result)
+
+
+
+
+
+
+
+
+
+def is_python_script(script_text, filename):
+ """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
+ """
+ if filename.endswith('.py') or filename.endswith('.pyw'):
+ return True # extension says it's Python
+ if is_python(script_text, filename):
+ return True # it's syntactically valid Python
+ if script_text.startswith('#!'):
+ # It begins with a '#!' line, so check if 'python' is in it somewhere
+ return 'python' in script_text.splitlines()[0].lower()
+
+ return False # Not any Python I can recognize
+
+try:
+ from os import chmod as _chmod
+except ImportError:
+ # Jython compatibility
+ def _chmod(*args): pass
+
+def chmod(path, mode):
+ log.debug("changing mode of %s to %o", path, mode)
+ try:
+ _chmod(path, mode)
+ except os.error, e:
+ log.debug("chmod failed: %s", e)
+
+def fix_jython_executable(executable, options):
+ if sys.platform.startswith('java') and is_sh(executable):
+ # Workaround Jython's sys.executable being a .sh (an invalid
+ # shebang line interpreter)
+ if options:
+ # Can't apply the workaround, leave it broken
+ log.warn("WARNING: Unable to adapt shebang line for Jython,"
+ " the following script is NOT executable\n"
+ " see http://bugs.jython.org/issue1112 for"
+ " more information.")
+ else:
+ return '/usr/bin/env %s' % executable
+ return executable
+
+
+def get_script_args(dist, executable=sys_executable, wininst=False):
+ """Yield write_script() argument tuples for a distribution's entrypoints"""
+ spec = str(dist.as_requirement())
+ header = get_script_header("", executable, wininst)
+ for group in 'console_scripts', 'gui_scripts':
+ for name, ep in dist.get_entry_map(group).items():
+ script_text = (
+ "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
+ "__requires__ = %(spec)r\n"
+ "import sys\n"
+ "from pkg_resources import load_entry_point\n"
+ "\n"
+ "if __name__ == '__main__':"
+ "\n"
+ " sys.exit(\n"
+ " load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
+ " )\n"
+ ) % locals()
+ if sys.platform=='win32' or wininst:
+ # On Windows/wininst, add a .py extension and an .exe launcher
+ if group=='gui_scripts':
+ ext, launcher = '-script.pyw', 'gui.exe'
+ old = ['.pyw']
+ new_header = re.sub('(?i)python.exe','pythonw.exe',header)
+ else:
+ ext, launcher = '-script.py', 'cli.exe'
+ old = ['.py','.pyc','.pyo']
+ new_header = re.sub('(?i)pythonw.exe','python.exe',header)
+
+ if os.path.exists(new_header[2:-1]) or sys.platform!='win32':
+ hdr = new_header
+ else:
+ hdr = header
+ yield (name+ext, hdr+script_text, 't', [name+x for x in old])
+ yield (
+ name+'.exe', resource_string('setuptools', launcher),
+ 'b' # write in binary mode
+ )
+ else:
+ # On other platforms, we assume the right thing to do is to
+ # just write the stub with no extension.
+ yield (name, header+script_text)
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+ """Recursively delete a directory tree.
+
+ This code is taken from the Python 2.4 version of 'shutil', because
+ the 2.3 version doesn't really work right.
+ """
+ if ignore_errors:
+ def onerror(*args):
+ pass
+ elif onerror is None:
+ def onerror(*args):
+ raise
+ names = []
+ try:
+ names = os.listdir(path)
+ except os.error, err:
+ onerror(os.listdir, path, sys.exc_info())
+ for name in names:
+ fullname = os.path.join(path, name)
+ try:
+ mode = os.lstat(fullname).st_mode
+ except os.error:
+ mode = 0
+ if stat.S_ISDIR(mode):
+ rmtree(fullname, ignore_errors, onerror)
+ else:
+ try:
+ os.remove(fullname)
+ except os.error, err:
+ onerror(os.remove, fullname, sys.exc_info())
+ try:
+ os.rmdir(path)
+ except os.error:
+ onerror(os.rmdir, path, sys.exc_info())
+
+def bootstrap():
+ # This function is called when setuptools*.egg is run using /bin/sh
+ import setuptools; argv0 = os.path.dirname(setuptools.__path__[0])
+ sys.argv[0] = argv0; sys.argv.append(argv0); main()
+
+def main(argv=None, **kw):
+ from setuptools import setup
+ from setuptools.dist import Distribution
+ import distutils.core
+
+ USAGE = """\
+usage: %(script)s [options] requirement_or_url ...
+ or: %(script)s --help
+"""
+
+ def gen_usage (script_name):
+ script = os.path.basename(script_name)
+ return USAGE % vars()
+
+ def with_ei_usage(f):
+ old_gen_usage = distutils.core.gen_usage
+ try:
+ distutils.core.gen_usage = gen_usage
+ return f()
+ finally:
+ distutils.core.gen_usage = old_gen_usage
+
+ class DistributionWithoutHelpCommands(Distribution):
+ common_usage = ""
+
+ def _show_help(self,*args,**kw):
+ with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
+
+ def find_config_files(self):
+ files = Distribution.find_config_files(self)
+ if 'setup.cfg' in files:
+ files.remove('setup.cfg')
+ return files
+
+ if argv is None:
+ argv = sys.argv[1:]
+
+ with_ei_usage(lambda:
+ setup(
+ script_args = ['-q','easy_install', '-v']+argv,
+ script_name = sys.argv[0] or 'easy_install',
+ distclass=DistributionWithoutHelpCommands, **kw
+ )
+ )
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py
new file mode 100755
index 00000000..46cdf4e0
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/egg_info.py
@@ -0,0 +1,457 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+# This module should be kept compatible with Python 2.3
+import os, re, sys
+from setuptools import Command
+from distutils.errors import *
+from distutils import log
+from setuptools.command.sdist import sdist
+from distutils.util import convert_path
+from distutils.filelist import FileList
+from pkg_resources import parse_requirements, safe_name, parse_version, \
+ safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
+from sdist import walk_revctrl
+
+class egg_info(Command):
+ description = "create a distribution's .egg-info directory"
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ('tag-svn-revision', 'r',
+ "Add subversion revision ID to version number"),
+ ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+ ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+ ('no-svn-revision', 'R',
+ "Don't add subversion revision ID [default]"),
+ ('no-date', 'D', "Don't include date stamp [default]"),
+ ]
+
+ boolean_options = ['tag-date', 'tag-svn-revision']
+ negative_opt = {'no-svn-revision': 'tag-svn-revision',
+ 'no-date': 'tag-date'}
+
+
+
+
+
+
+
+ def initialize_options(self):
+ self.egg_name = None
+ self.egg_version = None
+ self.egg_base = None
+ self.egg_info = None
+ self.tag_build = None
+ self.tag_svn_revision = 0
+ self.tag_date = 0
+ self.broken_egg_info = False
+ self.vtags = None
+
+ def save_version_info(self, filename):
+ from setopt import edit_config
+ edit_config(
+ filename,
+ {'egg_info':
+ {'tag_svn_revision':0, 'tag_date': 0, 'tag_build': self.tags()}
+ }
+ )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def finalize_options (self):
+ self.egg_name = safe_name(self.distribution.get_name())
+ self.vtags = self.tags()
+ self.egg_version = self.tagged_version()
+
+ try:
+ list(
+ parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
+ )
+ except ValueError:
+ raise DistutilsOptionError(
+ "Invalid distribution name or version syntax: %s-%s" %
+ (self.egg_name,self.egg_version)
+ )
+
+ if self.egg_base is None:
+ dirs = self.distribution.package_dir
+ self.egg_base = (dirs or {}).get('',os.curdir)
+
+ self.ensure_dirname('egg_base')
+ self.egg_info = to_filename(self.egg_name)+'.egg-info'
+ if self.egg_base != os.curdir:
+ self.egg_info = os.path.join(self.egg_base, self.egg_info)
+ if '-' in self.egg_name: self.check_broken_egg_info()
+
+ # Set package version for the benefit of dumber commands
+ # (e.g. sdist, bdist_wininst, etc.)
+ #
+ self.distribution.metadata.version = self.egg_version
+
+ # If we bootstrapped around the lack of a PKG-INFO, as might be the
+ # case in a fresh checkout, make sure that any special tags get added
+ # to the version info
+ #
+ pd = self.distribution._patched_dist
+ if pd is not None and pd.key==self.egg_name.lower():
+ pd._version = self.egg_version
+ pd._parsed_version = parse_version(self.egg_version)
+ self.distribution._patched_dist = None
+
+
+ def write_or_delete_file(self, what, filename, data, force=False):
+ """Write `data` to `filename` or delete if empty
+
+ If `data` is non-empty, this routine is the same as ``write_file()``.
+ If `data` is empty but not ``None``, this is the same as calling
+ ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
+ unless `filename` exists, in which case a warning is issued about the
+ orphaned file (if `force` is false), or deleted (if `force` is true).
+ """
+ if data:
+ self.write_file(what, filename, data)
+ elif os.path.exists(filename):
+ if data is None and not force:
+ log.warn(
+ "%s not set in setup(), but %s exists", what, filename
+ )
+ return
+ else:
+ self.delete_file(filename)
+
+ def write_file(self, what, filename, data):
+ """Write `data` to `filename` (if not a dry run) after announcing it
+
+ `what` is used in a log message to identify what is being written
+ to the file.
+ """
+ log.info("writing %s to %s", what, filename)
+ if sys.version_info >= (3,):
+ data = data.encode("utf-8")
+ if not self.dry_run:
+ f = open(filename, 'wb')
+ f.write(data)
+ f.close()
+
+ def delete_file(self, filename):
+ """Delete `filename` (if not a dry run) after announcing it"""
+ log.info("deleting %s", filename)
+ if not self.dry_run:
+ os.unlink(filename)
+
+ def tagged_version(self):
+ return safe_version(self.distribution.get_version() + self.vtags)
+
+ def run(self):
+ self.mkpath(self.egg_info)
+ installer = self.distribution.fetch_build_egg
+ for ep in iter_entry_points('egg_info.writers'):
+ writer = ep.load(installer=installer)
+ writer(self, ep.name, os.path.join(self.egg_info,ep.name))
+
+ # Get rid of native_libs.txt if it was put there by older bdist_egg
+ nl = os.path.join(self.egg_info, "native_libs.txt")
+ if os.path.exists(nl):
+ self.delete_file(nl)
+
+ self.find_sources()
+
+ def tags(self):
+ version = ''
+ if self.tag_build:
+ version+=self.tag_build
+ if self.tag_svn_revision and (
+ os.path.exists('.svn') or os.path.exists('PKG-INFO')
+ ): version += '-r%s' % self.get_svn_revision()
+ if self.tag_date:
+ import time; version += time.strftime("-%Y%m%d")
+ return version
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def get_svn_revision(self):
+ revision = 0
+ urlre = re.compile('url="([^"]+)"')
+ revre = re.compile('committed-rev="(\d+)"')
+
+ for base,dirs,files in os.walk(os.curdir):
+ if '.svn' not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove('.svn')
+ f = open(os.path.join(base,'.svn','entries'))
+ data = f.read()
+ f.close()
+
+ if data.startswith('10') or data.startswith('9') or data.startswith('8'):
+ data = map(str.splitlines,data.split('\n\x0c\n'))
+ del data[0][0] # get rid of the '8' or '9' or '10'
+ dirurl = data[0][3]
+ localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0])
+ elif data.startswith('<?xml'):
+ dirurl = urlre.search(data).group(1) # get repository URL
+ localrev = max([int(m.group(1)) for m in revre.finditer(data)]+[0])
+ else:
+ log.warn("unrecognized .svn/entries format; skipping %s", base)
+ dirs[:] = []
+ continue
+ if base==os.curdir:
+ base_url = dirurl+'/' # save the root url
+ elif not dirurl.startswith(base_url):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+
+ return str(revision or get_pkg_info_revision())
+
+
+
+
+
+
+
+ def find_sources(self):
+ """Generate SOURCES.txt manifest file"""
+ manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
+ mm = manifest_maker(self.distribution)
+ mm.manifest = manifest_filename
+ mm.run()
+ self.filelist = mm.filelist
+
+ def check_broken_egg_info(self):
+ bei = self.egg_name+'.egg-info'
+ if self.egg_base != os.curdir:
+ bei = os.path.join(self.egg_base, bei)
+ if os.path.exists(bei):
+ log.warn(
+ "-"*78+'\n'
+ "Note: Your current .egg-info directory has a '-' in its name;"
+ '\nthis will not work correctly with "setup.py develop".\n\n'
+ 'Please rename %s to %s to correct this problem.\n'+'-'*78,
+ bei, self.egg_info
+ )
+ self.broken_egg_info = self.egg_info
+ self.egg_info = bei # make it work for now
+
+class FileList(FileList):
+ """File list that accepts only existing, platform-independent paths"""
+
+ def append(self, item):
+ if item.endswith('\r'): # Fix older sdists built on Windows
+ item = item[:-1]
+ path = convert_path(item)
+ if os.path.exists(path):
+ self.files.append(path)
+
+
+
+
+
+
+
+
+
+class manifest_maker(sdist):
+
+ template = "MANIFEST.in"
+
+ def initialize_options (self):
+ self.use_defaults = 1
+ self.prune = 1
+ self.manifest_only = 1
+ self.force_manifest = 1
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self.filelist = FileList()
+ if not os.path.exists(self.manifest):
+ self.write_manifest() # it must exist so it'll get in the list
+ self.filelist.findall()
+ self.add_defaults()
+ if os.path.exists(self.template):
+ self.read_template()
+ self.prune_file_list()
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ self.write_manifest()
+
+ def write_manifest (self):
+ """Write the file list in 'self.filelist' (presumably as filled in
+ by 'add_defaults()' and 'read_template()') to the manifest file
+ named by 'self.manifest'.
+ """
+ files = self.filelist.files
+ if os.sep!='/':
+ files = [f.replace(os.sep,'/') for f in files]
+ self.execute(write_file, (self.manifest, files),
+ "writing manifest file '%s'" % self.manifest)
+
+ def warn(self, msg): # suppress missing-file warnings from sdist
+ if not msg.startswith("standard file not found:"):
+ sdist.warn(self, msg)
+
+ def add_defaults(self):
+ sdist.add_defaults(self)
+ self.filelist.append(self.template)
+ self.filelist.append(self.manifest)
+ rcfiles = list(walk_revctrl())
+ if rcfiles:
+ self.filelist.extend(rcfiles)
+ elif os.path.exists(self.manifest):
+ self.read_manifest()
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
+
+ def prune_file_list (self):
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+ self.filelist.exclude_pattern(None, prefix=build.build_base)
+ self.filelist.exclude_pattern(None, prefix=base_dir)
+ sep = re.escape(os.sep)
+ self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
+
+
+def write_file (filename, contents):
+ """Create a file with the specified name and write 'contents' (a
+ sequence of strings without line terminators) to it.
+ """
+ contents = "\n".join(contents)
+ if sys.version_info >= (3,):
+ contents = contents.encode("utf-8")
+ f = open(filename, "wb") # always write POSIX-style manifest
+ f.write(contents)
+ f.close()
+
+
+
+
+
+
+
+
+
+
+
+
+
+def write_pkg_info(cmd, basename, filename):
+ log.info("writing %s", filename)
+ if not cmd.dry_run:
+ metadata = cmd.distribution.metadata
+ metadata.version, oldver = cmd.egg_version, metadata.version
+ metadata.name, oldname = cmd.egg_name, metadata.name
+ try:
+ # write unescaped data to PKG-INFO, so older pkg_resources
+ # can still parse it
+ metadata.write_pkg_info(cmd.egg_info)
+ finally:
+ metadata.name, metadata.version = oldname, oldver
+
+ safe = getattr(cmd.distribution,'zip_safe',None)
+ import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+def warn_depends_obsolete(cmd, basename, filename):
+ if os.path.exists(filename):
+ log.warn(
+ "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+
+def write_requirements(cmd, basename, filename):
+ dist = cmd.distribution
+ data = ['\n'.join(yield_lines(dist.install_requires or ()))]
+ for extra,reqs in (dist.extras_require or {}).items():
+ data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
+ cmd.write_or_delete_file("requirements", filename, ''.join(data))
+
+def write_toplevel_names(cmd, basename, filename):
+ pkgs = dict.fromkeys(
+ [k.split('.',1)[0]
+ for k in cmd.distribution.iter_distribution_names()
+ ]
+ )
+ cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
+
+
+
+def overwrite_arg(cmd, basename, filename):
+ write_arg(cmd, basename, filename, True)
+
+def write_arg(cmd, basename, filename, force=False):
+ argname = os.path.splitext(basename)[0]
+ value = getattr(cmd.distribution, argname, None)
+ if value is not None:
+ value = '\n'.join(value)+'\n'
+ cmd.write_or_delete_file(argname, filename, value, force)
+
+def write_entries(cmd, basename, filename):
+ ep = cmd.distribution.entry_points
+
+ if isinstance(ep,basestring) or ep is None:
+ data = ep
+ elif ep is not None:
+ data = []
+ for section, contents in ep.items():
+ if not isinstance(contents,basestring):
+ contents = EntryPoint.parse_group(section, contents)
+ contents = '\n'.join(map(str,contents.values()))
+ data.append('[%s]\n%s\n\n' % (section,contents))
+ data = ''.join(data)
+
+ cmd.write_or_delete_file('entry points', filename, data, True)
+
+def get_pkg_info_revision():
+ # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
+ # a subversion revision
+ #
+ if os.path.exists('PKG-INFO'):
+ f = open('PKG-INFO','rU')
+ for line in f:
+ match = re.match(r"Version:.*-r(\d+)\s*$", line)
+ if match:
+ return int(match.group(1))
+ f.close()
+ return 0
+
+
+
+#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py
new file mode 100755
index 00000000..247c4f25
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install.py
@@ -0,0 +1,124 @@
+import setuptools, sys, glob
+from distutils.command.install import install as _install
+from distutils.errors import DistutilsArgError
+
+class install(_install):
+ """Use easy_install to install the package, w/dependencies"""
+
+ user_options = _install.user_options + [
+ ('old-and-unmanageable', None, "Try not to use this!"),
+ ('single-version-externally-managed', None,
+ "used by system package builders to create 'flat' eggs"),
+ ]
+ boolean_options = _install.boolean_options + [
+ 'old-and-unmanageable', 'single-version-externally-managed',
+ ]
+ new_commands = [
+ ('install_egg_info', lambda self: True),
+ ('install_scripts', lambda self: True),
+ ]
+ _nc = dict(new_commands)
+
+ def initialize_options(self):
+ _install.initialize_options(self)
+ self.old_and_unmanageable = None
+ self.single_version_externally_managed = None
+ self.no_compile = None # make DISTUTILS_DEBUG work right!
+
+ def finalize_options(self):
+ _install.finalize_options(self)
+ if self.root:
+ self.single_version_externally_managed = True
+ elif self.single_version_externally_managed:
+ if not self.root and not self.record:
+ raise DistutilsArgError(
+ "You must specify --record or --root when building system"
+ " packages"
+ )
+
+ def handle_extra_path(self):
+ if self.root or self.single_version_externally_managed:
+ # explicit backward-compatibility mode, allow extra_path to work
+ return _install.handle_extra_path(self)
+
+ # Ignore extra_path when installing an egg (or being run by another
+ # command without --root or --single-version-externally-managed
+ self.path_file = None
+ self.extra_dirs = ''
+
+
+ def run(self):
+ # Explicit request for old-style install? Just do it
+ if self.old_and_unmanageable or self.single_version_externally_managed:
+ return _install.run(self)
+
+ # Attempt to detect whether we were called from setup() or by another
+ # command. If we were called by setup(), our caller will be the
+ # 'run_command' method in 'distutils.dist', and *its* caller will be
+ # the 'run_commands' method. If we were called any other way, our
+ # immediate caller *might* be 'run_command', but it won't have been
+ # called by 'run_commands'. This is slightly kludgy, but seems to
+ # work.
+ #
+ caller = sys._getframe(2)
+ caller_module = caller.f_globals.get('__name__','')
+ caller_name = caller.f_code.co_name
+
+ if caller_module != 'distutils.dist' or caller_name!='run_commands':
+ # We weren't called from the command line or setup(), so we
+ # should run in backward-compatibility mode to support bdist_*
+ # commands.
+ _install.run(self)
+ else:
+ self.do_egg_install()
+
+
+
+
+
+
+ def do_egg_install(self):
+
+ easy_install = self.distribution.get_command_class('easy_install')
+
+ cmd = easy_install(
+ self.distribution, args="x", root=self.root, record=self.record,
+ )
+ cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
+ cmd.always_copy_from = '.' # make sure local-dir eggs get installed
+
+ # pick up setup-dir .egg files only: no .egg-info
+ cmd.package_index.scan(glob.glob('*.egg'))
+
+ self.run_command('bdist_egg')
+ args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+ if setuptools.bootstrap_install_from:
+ # Bootstrap self-installation of setuptools
+ args.insert(0, setuptools.bootstrap_install_from)
+
+ cmd.args = args
+ cmd.run()
+ setuptools.bootstrap_install_from = None
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = [
+ cmd for cmd in _install.sub_commands if cmd[0] not in install._nc
+ ] + install.new_commands
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py
new file mode 100755
index 00000000..dd95552e
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_egg_info.py
@@ -0,0 +1,123 @@
+from setuptools import Command
+from setuptools.archive_util import unpack_archive
+from distutils import log, dir_util
+import os, shutil, pkg_resources
+
+class install_egg_info(Command):
+ """Install an .egg-info directory for the package"""
+
+ description = "Install an .egg-info directory for the package"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ]
+
+ def initialize_options(self):
+ self.install_dir = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install_lib',('install_dir','install_dir'))
+ ei_cmd = self.get_finalized_command("egg_info")
+ basename = pkg_resources.Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version
+ ).egg_name()+'.egg-info'
+ self.source = ei_cmd.egg_info
+ self.target = os.path.join(self.install_dir, basename)
+ self.outputs = [self.target]
+
+ def run(self):
+ self.run_command('egg_info')
+ target = self.target
+ if os.path.isdir(self.target) and not os.path.islink(self.target):
+ dir_util.remove_tree(self.target, dry_run=self.dry_run)
+ elif os.path.exists(self.target):
+ self.execute(os.unlink,(self.target,),"Removing "+self.target)
+ if not self.dry_run:
+ pkg_resources.ensure_directory(self.target)
+ self.execute(self.copytree, (),
+ "Copying %s to %s" % (self.source, self.target)
+ )
+ self.install_namespaces()
+
+ def get_outputs(self):
+ return self.outputs
+
+ def copytree(self):
+ # Copy the .egg-info tree to site-packages
+ def skimmer(src,dst):
+ # filter out source-control directories; note that 'src' is always
+ # a '/'-separated path, regardless of platform. 'dst' is a
+ # platform-specific path.
+ for skip in '.svn/','CVS/':
+ if src.startswith(skip) or '/'+skip in src:
+ return None
+ self.outputs.append(dst)
+ log.debug("Copying %s to %s", src, dst)
+ return dst
+ unpack_archive(self.source, self.target, skimmer)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def install_namespaces(self):
+ nsp = self._get_all_ns_packages()
+ if not nsp: return
+ filename,ext = os.path.splitext(self.target)
+ filename += '-nspkg.pth'; self.outputs.append(filename)
+ log.info("Installing %s",filename)
+ if not self.dry_run:
+ f = open(filename,'wt')
+ for pkg in nsp:
+ pth = tuple(pkg.split('.'))
+ trailer = '\n'
+ if '.' in pkg:
+ trailer = (
+ "; m and setattr(sys.modules[%r], %r, m)\n"
+ % ('.'.join(pth[:-1]), pth[-1])
+ )
+ f.write(
+ "import sys,types,os; "
+ "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
+ "*%(pth)r); "
+ "ie = os.path.exists(os.path.join(p,'__init__.py')); "
+ "m = not ie and "
+ "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
+ "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
+ "(p not in mp) and mp.append(p)%(trailer)s"
+ % locals()
+ )
+ f.close()
+
+ def _get_all_ns_packages(self):
+ nsp = {}
+ for pkg in self.distribution.namespace_packages or []:
+ pkg = pkg.split('.')
+ while pkg:
+ nsp['.'.join(pkg)] = 1
+ pkg.pop()
+ nsp=list(nsp)
+ nsp.sort() # set up shorter names first
+ return nsp
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py
new file mode 100755
index 00000000..82afa142
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_lib.py
@@ -0,0 +1,82 @@
+from distutils.command.install_lib import install_lib as _install_lib
+import os
+
+class install_lib(_install_lib):
+ """Don't add compiled flags to filenames of non-Python files"""
+
+ def _bytecode_filenames (self, py_filenames):
+ bytecode_files = []
+ for py_file in py_filenames:
+ if not py_file.endswith('.py'):
+ continue
+ if self.compile:
+ bytecode_files.append(py_file + "c")
+ if self.optimize > 0:
+ bytecode_files.append(py_file + "o")
+
+ return bytecode_files
+
+ def run(self):
+ self.build()
+ outfiles = self.install()
+ if outfiles is not None:
+ # always compile, in case we have any extension stubs to deal with
+ self.byte_compile(outfiles)
+
+ def get_exclusions(self):
+ exclude = {}
+ nsp = self.distribution.namespace_packages
+
+ if (nsp and self.get_finalized_command('install')
+ .single_version_externally_managed
+ ):
+ for pkg in nsp:
+ parts = pkg.split('.')
+ while parts:
+ pkgdir = os.path.join(self.install_dir, *parts)
+ for f in '__init__.py', '__init__.pyc', '__init__.pyo':
+ exclude[os.path.join(pkgdir,f)] = 1
+ parts.pop()
+ return exclude
+
+ def copy_tree(
+ self, infile, outfile,
+ preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+ ):
+ assert preserve_mode and preserve_times and not preserve_symlinks
+ exclude = self.get_exclusions()
+
+ if not exclude:
+ return _install_lib.copy_tree(self, infile, outfile)
+
+ # Exclude namespace package __init__.py* files from the output
+
+ from setuptools.archive_util import unpack_directory
+ from distutils import log
+
+ outfiles = []
+
+ def pf(src, dst):
+ if dst in exclude:
+ log.warn("Skipping installation of %s (namespace package)",dst)
+ return False
+
+ log.info("copying %s -> %s", src, os.path.dirname(dst))
+ outfiles.append(dst)
+ return dst
+
+ unpack_directory(infile, outfile, pf)
+ return outfiles
+
+ def get_outputs(self):
+ outputs = _install_lib.get_outputs(self)
+ exclude = self.get_exclusions()
+ if exclude:
+ return [f for f in outputs if f not in exclude]
+ return outputs
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py
new file mode 100755
index 00000000..6ce1b993
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/install_scripts.py
@@ -0,0 +1,53 @@
+from distutils.command.install_scripts import install_scripts \
+ as _install_scripts
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+import os
+from distutils import log
+
+class install_scripts(_install_scripts):
+ """Do normal script install, plus any egg_info wrapper scripts"""
+
+ def initialize_options(self):
+ _install_scripts.initialize_options(self)
+ self.no_ep = False
+
+ def run(self):
+ from setuptools.command.easy_install import get_script_args
+ from setuptools.command.easy_install import sys_executable
+
+ self.run_command("egg_info")
+ if self.distribution.scripts:
+ _install_scripts.run(self) # run first to set up self.outfiles
+ else:
+ self.outfiles = []
+ if self.no_ep:
+ # don't install entry point scripts into .egg file!
+ return
+
+ ei_cmd = self.get_finalized_command("egg_info")
+ dist = Distribution(
+ ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+ ei_cmd.egg_name, ei_cmd.egg_version,
+ )
+ bs_cmd = self.get_finalized_command('build_scripts')
+ executable = getattr(bs_cmd,'executable',sys_executable)
+ is_wininst = getattr(
+ self.get_finalized_command("bdist_wininst"), '_is_running', False
+ )
+ for args in get_script_args(dist, executable, is_wininst):
+ self.write_script(*args)
+
+ def write_script(self, script_name, contents, mode="t", *ignored):
+ """Write an executable file to the scripts directory"""
+ from setuptools.command.easy_install import chmod
+ log.info("Installing %s script to %s", script_name, self.install_dir)
+ target = os.path.join(self.install_dir, script_name)
+ self.outfiles.append(target)
+
+ if not self.dry_run:
+ ensure_directory(target)
+ f = open(target,"w"+mode)
+ f.write(contents)
+ f.close()
+ chmod(target,0755)
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py
new file mode 100755
index 00000000..3b2e0859
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/register.py
@@ -0,0 +1,10 @@
+from distutils.command.register import register as _register
+
+class register(_register):
+ __doc__ = _register.__doc__
+
+ def run(self):
+ # Make sure that we are using valid current name/version info
+ self.run_command('egg_info')
+ _register.run(self)
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py
new file mode 100755
index 00000000..11b6eae8
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/rotate.py
@@ -0,0 +1,82 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+class rotate(Command):
+ """Delete older distributions"""
+
+ description = "delete older distributions, keeping N newest files"
+ user_options = [
+ ('match=', 'm', "patterns to match (required)"),
+ ('dist-dir=', 'd', "directory where the distributions are"),
+ ('keep=', 'k', "number of matching distributions to keep"),
+ ]
+
+ boolean_options = []
+
+ def initialize_options(self):
+ self.match = None
+ self.dist_dir = None
+ self.keep = None
+
+ def finalize_options(self):
+ if self.match is None:
+ raise DistutilsOptionError(
+ "Must specify one or more (comma-separated) match patterns "
+ "(e.g. '.zip' or '.egg')"
+ )
+ if self.keep is None:
+ raise DistutilsOptionError("Must specify number of files to keep")
+ try:
+ self.keep = int(self.keep)
+ except ValueError:
+ raise DistutilsOptionError("--keep must be an integer")
+ if isinstance(self.match, basestring):
+ self.match = [
+ convert_path(p.strip()) for p in self.match.split(',')
+ ]
+ self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+ def run(self):
+ self.run_command("egg_info")
+ from glob import glob
+ for pattern in self.match:
+ pattern = self.distribution.get_name()+'*'+pattern
+ files = glob(os.path.join(self.dist_dir,pattern))
+ files = [(os.path.getmtime(f),f) for f in files]
+ files.sort()
+ files.reverse()
+
+ log.info("%d file(s) matching %s", len(files), pattern)
+ files = files[self.keep:]
+ for (t,f) in files:
+ log.info("Deleting %s", f)
+ if not self.dry_run:
+ os.unlink(f)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py
new file mode 100755
index 00000000..1180a440
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/saveopts.py
@@ -0,0 +1,25 @@
+import distutils, os
+from setuptools import Command
+from setuptools.command.setopt import edit_config, option_base
+
+class saveopts(option_base):
+ """Save command-line options to a file"""
+
+ description = "save supplied options to setup.cfg or other config file"
+
+ def run(self):
+ dist = self.distribution
+ commands = dist.command_options.keys()
+ settings = {}
+
+ for cmd in commands:
+
+ if cmd=='saveopts':
+ continue # don't save our own options!
+
+ for opt,(src,val) in dist.get_option_dict(cmd).items():
+ if src=="command line":
+ settings.setdefault(cmd,{})[opt] = val
+
+ edit_config(self.filename, settings, self.dry_run)
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py
new file mode 100755
index 00000000..3442fe4b
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/sdist.py
@@ -0,0 +1,252 @@
+from distutils.command.sdist import sdist as _sdist
+from distutils.util import convert_path
+from distutils import log
+import os, re, sys, pkg_resources
+from glob import glob
+
+entities = [
+ ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
+ ("&amp;", "&")
+]
+
+def unescape(data):
+ for old,new in entities:
+ data = data.replace(old,new)
+ return data
+
+def re_finder(pattern, postproc=None):
+ def find(dirname, filename):
+ f = open(filename,'rU')
+ data = f.read()
+ f.close()
+ for match in pattern.finditer(data):
+ path = match.group(1)
+ if postproc:
+ path = postproc(path)
+ yield joinpath(dirname,path)
+ return find
+
+def joinpath(prefix,suffix):
+ if not prefix:
+ return suffix
+ return os.path.join(prefix,suffix)
+
+
+
+
+
+
+
+
+
+
+def walk_revctrl(dirname=''):
+ """Find all files under revision control"""
+ for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+ for item in ep.load()(dirname):
+ yield item
+
+def _default_revctrl(dirname=''):
+ for path, finder in finders:
+ path = joinpath(dirname,path)
+ if os.path.isfile(path):
+ for path in finder(dirname,path):
+ if os.path.isfile(path):
+ yield path
+ elif os.path.isdir(path):
+ for item in _default_revctrl(path):
+ yield item
+
+def externals_finder(dirname, filename):
+ """Find any 'svn:externals' directories"""
+ found = False
+ f = open(filename,'rt')
+ for line in iter(f.readline, ''): # can't use direct iter!
+ parts = line.split()
+ if len(parts)==2:
+ kind,length = parts
+ data = f.read(int(length))
+ if kind=='K' and data=='svn:externals':
+ found = True
+ elif kind=='V' and found:
+ f.close()
+ break
+ else:
+ f.close()
+ return
+
+ for line in data.splitlines():
+ parts = line.split()
+ if parts:
+ yield joinpath(dirname, parts[0])
+
+
+entries_pattern = re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I)
+
+def entries_finder(dirname, filename):
+ f = open(filename,'rU')
+ data = f.read()
+ f.close()
+ if data.startswith('10') or data.startswith('9') or data.startswith('8'):
+ for record in map(str.splitlines, data.split('\n\x0c\n')[1:]):
+ # subversion 1.6/1.5/1.4
+ if not record or len(record)>=6 and record[5]=="delete":
+ continue # skip deleted
+ yield joinpath(dirname, record[0])
+ elif data.startswith('<?xml'):
+ for match in entries_pattern.finditer(data):
+ yield joinpath(dirname,unescape(match.group(1)))
+ else:
+ log.warn("unrecognized .svn/entries format in %s", dirname)
+
+
+finders = [
+ (convert_path('CVS/Entries'),
+ re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
+ (convert_path('.svn/entries'), entries_finder),
+ (convert_path('.svn/dir-props'), externals_finder),
+ (convert_path('.svn/dir-prop-base'), externals_finder), # svn 1.4
+]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class sdist(_sdist):
+ """Smart sdist that finds anything supported by revision control"""
+
+ user_options = [
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ]
+
+ negative_opt = {}
+
+ def run(self):
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist = ei_cmd.filelist
+ self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
+ self.check_readme()
+ self.check_metadata()
+ self.make_distribution()
+
+ dist_files = getattr(self.distribution,'dist_files',[])
+ for file in self.archive_files:
+ data = ('sdist', '', file)
+ if data not in dist_files:
+ dist_files.append(data)
+
+ def add_defaults(self):
+ standards = [('README', 'README.txt'),
+ self.distribution.script_name]
+ for fn in standards:
+ if isinstance(fn, tuple):
+ alts = fn
+ got_it = 0
+ for fn in alts:
+ if os.path.exists(fn):
+ got_it = 1
+ self.filelist.append(fn)
+ break
+
+ if not got_it:
+ self.warn("standard file not found: should have one of " +
+ ', '.join(alts))
+ else:
+ if os.path.exists(fn):
+ self.filelist.append(fn)
+ else:
+ self.warn("standard file '%s' not found" % fn)
+
+ optional = ['test/test*.py', 'setup.cfg']
+ for pattern in optional:
+ files = filter(os.path.isfile, glob(pattern))
+ if files:
+ self.filelist.extend(files)
+
+ # getting python files
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ self.filelist.extend(build_py.get_source_files())
+
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ self.filelist.extend(build_ext.get_source_files())
+
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.filelist.extend(build_clib.get_source_files())
+
+ if self.distribution.has_scripts():
+ build_scripts = self.get_finalized_command('build_scripts')
+ self.filelist.extend(build_scripts.get_source_files())
+
+ def read_template(self):
+ try:
+ _sdist.read_template(self)
+ except:
+ # grody hack to close the template file (MANIFEST.in)
+ # this prevents easy_install's attempt at deleting the file from
+ # dying and thus masking the real error
+ sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
+ raise
+
+ def check_readme(self):
+ alts = ("README", "README.txt")
+ for f in alts:
+ if os.path.exists(f):
+ return
+ else:
+ self.warn(
+ "standard file not found: should have one of " +', '.join(alts)
+ )
+
+
+ def make_release_tree(self, base_dir, files):
+ _sdist.make_release_tree(self, base_dir, files)
+
+ # Save any egg_info command line options used to create this sdist
+ dest = os.path.join(base_dir, 'setup.cfg')
+ if hasattr(os,'link') and os.path.exists(dest):
+ # unlink and re-copy, since it might be hard-linked, and
+ # we don't want to change the source version
+ os.unlink(dest)
+ self.copy_file('setup.cfg', dest)
+
+ self.get_finalized_command('egg_info').save_version_info(dest)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py
new file mode 100755
index 00000000..dbf3a94e
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/setopt.py
@@ -0,0 +1,164 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+ """Get the filename of the distutils, local, global, or per-user config
+
+ `kind` must be one of "local", "global", or "user"
+ """
+ if kind=='local':
+ return 'setup.cfg'
+ if kind=='global':
+ return os.path.join(
+ os.path.dirname(distutils.__file__),'distutils.cfg'
+ )
+ if kind=='user':
+ dot = os.name=='posix' and '.' or ''
+ return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+ raise ValueError(
+ "config_file() type must be 'local', 'global', or 'user'", kind
+ )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def edit_config(filename, settings, dry_run=False):
+ """Edit a configuration file to include `settings`
+
+ `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+ command/section name. A ``None`` value means to delete the entire section,
+ while a dictionary lists settings to be changed or deleted in that section.
+ A setting of ``None`` means to delete that setting.
+ """
+ from ConfigParser import RawConfigParser
+ log.debug("Reading configuration from %s", filename)
+ opts = RawConfigParser()
+ opts.read([filename])
+ for section, options in settings.items():
+ if options is None:
+ log.info("Deleting section [%s] from %s", section, filename)
+ opts.remove_section(section)
+ else:
+ if not opts.has_section(section):
+ log.debug("Adding new section [%s] to %s", section, filename)
+ opts.add_section(section)
+ for option,value in options.items():
+ if value is None:
+ log.debug("Deleting %s.%s from %s",
+ section, option, filename
+ )
+ opts.remove_option(section,option)
+ if not opts.options(section):
+ log.info("Deleting empty [%s] section from %s",
+ section, filename)
+ opts.remove_section(section)
+ else:
+ log.debug(
+ "Setting %s.%s to %r in %s",
+ section, option, value, filename
+ )
+ opts.set(section,option,value)
+
+ log.info("Writing %s", filename)
+ if not dry_run:
+ f = open(filename,'w'); opts.write(f); f.close()
+
+class option_base(Command):
+ """Abstract base class for commands that mess with config files"""
+
+ user_options = [
+ ('global-config', 'g',
+ "save options to the site-wide distutils.cfg file"),
+ ('user-config', 'u',
+ "save options to the current user's pydistutils.cfg file"),
+ ('filename=', 'f',
+ "configuration file to use (default=setup.cfg)"),
+ ]
+
+ boolean_options = [
+ 'global-config', 'user-config',
+ ]
+
+ def initialize_options(self):
+ self.global_config = None
+ self.user_config = None
+ self.filename = None
+
+ def finalize_options(self):
+ filenames = []
+ if self.global_config:
+ filenames.append(config_file('global'))
+ if self.user_config:
+ filenames.append(config_file('user'))
+ if self.filename is not None:
+ filenames.append(self.filename)
+ if not filenames:
+ filenames.append(config_file('local'))
+ if len(filenames)>1:
+ raise DistutilsOptionError(
+ "Must specify only one configuration file option",
+ filenames
+ )
+ self.filename, = filenames
+
+
+
+
+class setopt(option_base):
+ """Save command-line options to a file"""
+
+ description = "set an option in setup.cfg or another config file"
+
+ user_options = [
+ ('command=', 'c', 'command to set an option for'),
+ ('option=', 'o', 'option to set'),
+ ('set-value=', 's', 'value of the option'),
+ ('remove', 'r', 'remove (unset) the value'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.command = None
+ self.option = None
+ self.set_value = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.command is None or self.option is None:
+ raise DistutilsOptionError("Must specify --command *and* --option")
+ if self.set_value is None and not self.remove:
+ raise DistutilsOptionError("Must specify --set-value or --remove")
+
+ def run(self):
+ edit_config(
+ self.filename, {
+ self.command: {self.option.replace('-','_'):self.set_value}
+ },
+ self.dry_run
+ )
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py
new file mode 100755
index 00000000..b7aef969
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/test.py
@@ -0,0 +1,180 @@
+from setuptools import Command
+from distutils.errors import DistutilsOptionError
+import sys
+from pkg_resources import *
+from unittest import TestLoader, main
+
+class ScanningLoader(TestLoader):
+
+ def loadTestsFromModule(self, module):
+ """Return a suite of all tests cases contained in the given module
+
+ If the module is a package, load tests from all the modules in it.
+ If the module has an ``additional_tests`` function, call it and add
+ the return value to the tests.
+ """
+ tests = []
+ if module.__name__!='setuptools.tests.doctest': # ugh
+ tests.append(TestLoader.loadTestsFromModule(self,module))
+
+ if hasattr(module, "additional_tests"):
+ tests.append(module.additional_tests())
+
+ if hasattr(module, '__path__'):
+ for file in resource_listdir(module.__name__, ''):
+ if file.endswith('.py') and file!='__init__.py':
+ submodule = module.__name__+'.'+file[:-3]
+ else:
+ if resource_exists(
+ module.__name__, file+'/__init__.py'
+ ):
+ submodule = module.__name__+'.'+file
+ else:
+ continue
+ tests.append(self.loadTestsFromName(submodule))
+
+ if len(tests)!=1:
+ return self.suiteClass(tests)
+ else:
+ return tests[0] # don't create a nested suite for only one return
+
+
+class test(Command):
+
+ """Command to run unit tests after in-place build"""
+
+ description = "run unit tests after in-place build"
+
+ user_options = [
+ ('test-module=','m', "Run 'test_suite' in specified module"),
+ ('test-suite=','s',
+ "Test suite to run (e.g. 'some_module.test_suite')"),
+ ]
+
+ def initialize_options(self):
+ self.test_suite = None
+ self.test_module = None
+ self.test_loader = None
+
+
+ def finalize_options(self):
+
+ if self.test_suite is None:
+ if self.test_module is None:
+ self.test_suite = self.distribution.test_suite
+ else:
+ self.test_suite = self.test_module+".test_suite"
+ elif self.test_module:
+ raise DistutilsOptionError(
+ "You may specify a module or a suite, but not both"
+ )
+
+ self.test_args = [self.test_suite]
+
+ if self.verbose:
+ self.test_args.insert(0,'--verbose')
+ if self.test_loader is None:
+ self.test_loader = getattr(self.distribution,'test_loader',None)
+ if self.test_loader is None:
+ self.test_loader = "setuptools.command.test:ScanningLoader"
+
+
+
+ def with_project_on_sys_path(self, func):
+ if getattr(self.distribution, 'use_2to3', False):
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ self.reinitialize_command('build_py', inplace=0)
+ self.run_command('build_py')
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = normalize_path(bpy_cmd.build_lib)
+
+ # Build extensions
+ self.reinitialize_command('egg_info', egg_base=build_path)
+ self.run_command('egg_info')
+
+ self.reinitialize_command('build_ext', inplace=0)
+ self.run_command('build_ext')
+ else:
+ # Without 2to3 inplace works fine:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ ei_cmd = self.get_finalized_command("egg_info")
+
+ old_path = sys.path[:]
+ old_modules = sys.modules.copy()
+
+ try:
+ sys.path.insert(0, normalize_path(ei_cmd.egg_base))
+ working_set.__init__()
+ add_activation_listener(lambda dist: dist.activate())
+ require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
+ func()
+ finally:
+ sys.path[:] = old_path
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+ working_set.__init__()
+
+
+ def run(self):
+ if self.distribution.install_requires:
+ self.distribution.fetch_build_eggs(self.distribution.install_requires)
+ if self.distribution.tests_require:
+ self.distribution.fetch_build_eggs(self.distribution.tests_require)
+
+ if self.test_suite:
+ cmd = ' '.join(self.test_args)
+ if self.dry_run:
+ self.announce('skipping "unittest %s" (dry run)' % cmd)
+ else:
+ self.announce('running "unittest %s"' % cmd)
+ self.with_project_on_sys_path(self.run_tests)
+
+
+ def run_tests(self):
+ import unittest
+ loader_ep = EntryPoint.parse("x="+self.test_loader)
+ loader_class = loader_ep.load(require=False)
+ unittest.main(
+ None, None, [unittest.__file__]+self.test_args,
+ testLoader = loader_class()
+ )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py
new file mode 100755
index 00000000..1f49745e
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload.py
@@ -0,0 +1,183 @@
+"""distutils.command.upload
+
+Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
+
+from distutils.errors import *
+from distutils.core import Command
+from distutils.spawn import spawn
+from distutils import log
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+import os
+import socket
+import platform
+import ConfigParser
+import httplib
+import base64
+import urlparse
+import cStringIO as StringIO
+
+class upload(Command):
+
+ description = "upload binary package to PyPI"
+
+ DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server'),
+ ('sign', 's',
+ 'sign files to upload using gpg'),
+ ('identity=', 'i', 'GPG identity used to sign files'),
+ ]
+ boolean_options = ['show-response', 'sign']
+
+ def initialize_options(self):
+ self.username = ''
+ self.password = ''
+ self.repository = ''
+ self.show_response = 0
+ self.sign = False
+ self.identity = None
+
+ def finalize_options(self):
+ if self.identity and not self.sign:
+ raise DistutilsOptionError(
+ "Must use --sign for --identity to have meaning"
+ )
+ if os.environ.has_key('HOME'):
+ rc = os.path.join(os.environ['HOME'], '.pypirc')
+ if os.path.exists(rc):
+ self.announce('Using PyPI login from %s' % rc)
+ config = ConfigParser.ConfigParser({
+ 'username':'',
+ 'password':'',
+ 'repository':''})
+ config.read(rc)
+ if not self.repository:
+ self.repository = config.get('server-login', 'repository')
+ if not self.username:
+ self.username = config.get('server-login', 'username')
+ if not self.password:
+ self.password = config.get('server-login', 'password')
+ if not self.repository:
+ self.repository = self.DEFAULT_REPOSITORY
+
+ def run(self):
+ if not self.distribution.dist_files:
+ raise DistutilsOptionError("No dist file created in earlier command")
+ for command, pyversion, filename in self.distribution.dist_files:
+ self.upload_file(command, pyversion, filename)
+
+ def upload_file(self, command, pyversion, filename):
+ # Sign if requested
+ if self.sign:
+ gpg_args = ["gpg", "--detach-sign", "-a", filename]
+ if self.identity:
+ gpg_args[2:2] = ["--local-user", self.identity]
+ spawn(gpg_args,
+ dry_run=self.dry_run)
+
+ # Fill in the data
+ f = open(filename,'rb')
+ content = f.read()
+ f.close()
+ basename = os.path.basename(filename)
+ comment = ''
+ if command=='bdist_egg' and self.distribution.has_ext_modules():
+ comment = "built on %s" % platform.platform(terse=1)
+ data = {
+ ':action':'file_upload',
+ 'protcol_version':'1',
+ 'name':self.distribution.get_name(),
+ 'version':self.distribution.get_version(),
+ 'content':(basename,content),
+ 'filetype':command,
+ 'pyversion':pyversion,
+ 'md5_digest':md5(content).hexdigest(),
+ }
+ if command == 'bdist_rpm':
+ dist, version, id = platform.dist()
+ if dist:
+ comment = 'built for %s %s' % (dist, version)
+ elif command == 'bdist_dumb':
+ comment = 'built for %s' % platform.platform(terse=1)
+ data['comment'] = comment
+
+ if self.sign:
+ data['gpg_signature'] = (os.path.basename(filename) + ".asc",
+ open(filename+".asc").read())
+
+ # set up the authentication
+ auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
+
+ # Build up the MIME payload for the POST data
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = '\n--' + boundary
+ end_boundary = sep_boundary + '--'
+ body = StringIO.StringIO()
+ for key, value in data.items():
+ # handle multiple entries for the same name
+ if type(value) != type([]):
+ value = [value]
+ for value in value:
+ if type(value) is tuple:
+ fn = ';filename="%s"' % value[0]
+ value = value[1]
+ else:
+ fn = ""
+ value = str(value)
+ body.write(sep_boundary)
+ body.write('\nContent-Disposition: form-data; name="%s"'%key)
+ body.write(fn)
+ body.write("\n\n")
+ body.write(value)
+ if value and value[-1] == '\r':
+ body.write('\n') # write an extra newline (lurve Macs)
+ body.write(end_boundary)
+ body.write("\n")
+ body = body.getvalue()
+
+ self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
+
+ # build the Request
+ # We can't use urllib2 since we need to send the Basic
+ # auth right with the first request
+ schema, netloc, url, params, query, fragments = \
+ urlparse.urlparse(self.repository)
+ assert not params and not query and not fragments
+ if schema == 'http':
+ http = httplib.HTTPConnection(netloc)
+ elif schema == 'https':
+ http = httplib.HTTPSConnection(netloc)
+ else:
+ raise AssertionError, "unsupported schema "+schema
+
+ data = ''
+ loglevel = log.INFO
+ try:
+ http.connect()
+ http.putrequest("POST", url)
+ http.putheader('Content-type',
+ 'multipart/form-data; boundary=%s'%boundary)
+ http.putheader('Content-length', str(len(body)))
+ http.putheader('Authorization', auth)
+ http.endheaders()
+ http.send(body)
+ except socket.error, e:
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = http.getresponse()
+ if r.status == 200:
+ self.announce('Server response (%s): %s' % (r.status, r.reason),
+ log.INFO)
+ else:
+ self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+ log.ERROR)
+ if self.show_response:
+ print '-'*75, r.read(), '-'*75
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py
new file mode 100755
index 00000000..213f7b58
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/command/upload_docs.py
@@ -0,0 +1,178 @@
+# -*- coding: utf-8 -*-
+"""upload_docs
+
+Implements a Distutils 'upload_docs' subcommand (upload documentation to
+PyPI's packages.python.org).
+"""
+
+import os
+import socket
+import zipfile
+import httplib
+import base64
+import urlparse
+import tempfile
+import sys
+
+from distutils import log
+from distutils.errors import DistutilsOptionError
+
+try:
+ from distutils.command.upload import upload
+except ImportError:
+ from setuptools.command.upload import upload
+
+_IS_PYTHON3 = sys.version > '3'
+
+try:
+ bytes
+except NameError:
+ bytes = str
+
+def b(str_or_bytes):
+ """Return bytes by either encoding the argument as ASCII or simply return
+ the argument as-is."""
+ if not isinstance(str_or_bytes, bytes):
+ return str_or_bytes.encode('ascii')
+ else:
+ return str_or_bytes
+
+
+class upload_docs(upload):
+
+ description = 'Upload documentation to PyPI'
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server'),
+ ('upload-dir=', None, 'directory to upload'),
+ ]
+ boolean_options = upload.boolean_options
+
+ def initialize_options(self):
+ upload.initialize_options(self)
+ self.upload_dir = None
+
+ def finalize_options(self):
+ upload.finalize_options(self)
+ if self.upload_dir is None:
+ build = self.get_finalized_command('build')
+ self.upload_dir = os.path.join(build.build_base, 'docs')
+ self.mkpath(self.upload_dir)
+ self.ensure_dirname('upload_dir')
+ self.announce('Using upload directory %s' % self.upload_dir)
+
+ def create_zipfile(self):
+ name = self.distribution.metadata.get_name()
+ tmp_dir = tempfile.mkdtemp()
+ tmp_file = os.path.join(tmp_dir, "%s.zip" % name)
+ zip_file = zipfile.ZipFile(tmp_file, "w")
+ for root, dirs, files in os.walk(self.upload_dir):
+ if root == self.upload_dir and not files:
+ raise DistutilsOptionError(
+ "no files found in upload directory '%s'"
+ % self.upload_dir)
+ for name in files:
+ full = os.path.join(root, name)
+ relative = root[len(self.upload_dir):].lstrip(os.path.sep)
+ dest = os.path.join(relative, name)
+ zip_file.write(full, dest)
+ zip_file.close()
+ return tmp_file
+
+ def run(self):
+ zip_file = self.create_zipfile()
+ self.upload_file(zip_file)
+
+ def upload_file(self, filename):
+ content = open(filename, 'rb').read()
+ meta = self.distribution.metadata
+ data = {
+ ':action': 'doc_upload',
+ 'name': meta.get_name(),
+ 'content': (os.path.basename(filename), content),
+ }
+ # set up the authentication
+ credentials = self.username + ':' + self.password
+ if _IS_PYTHON3: # base64 only works with bytes in Python 3.
+ encoded_creds = base64.encodebytes(credentials.encode('utf8'))
+ auth = bytes("Basic ")
+ else:
+ encoded_creds = base64.encodestring(credentials)
+ auth = "Basic "
+ auth += encoded_creds.strip()
+
+ # Build up the MIME payload for the POST data
+ boundary = b('--------------GHSKFJDLGDS7543FJKLFHRE75642756743254')
+ sep_boundary = b('\n--') + boundary
+ end_boundary = sep_boundary + b('--')
+ body = []
+ for key, values in data.items():
+ # handle multiple entries for the same name
+ if type(values) != type([]):
+ values = [values]
+ for value in values:
+ if type(value) is tuple:
+ fn = b(';filename="%s"' % value[0])
+ value = value[1]
+ else:
+ fn = b("")
+ body.append(sep_boundary)
+ body.append(b('\nContent-Disposition: form-data; name="%s"'%key))
+ body.append(fn)
+ body.append(b("\n\n"))
+ body.append(b(value))
+ if value and value[-1] == b('\r'):
+ body.append(b('\n')) # write an extra newline (lurve Macs)
+ body.append(end_boundary)
+ body.append(b("\n"))
+ body = b('').join(body)
+
+ self.announce("Submitting documentation to %s" % (self.repository),
+ log.INFO)
+
+ # build the Request
+ # We can't use urllib2 since we need to send the Basic
+ # auth right with the first request
+ schema, netloc, url, params, query, fragments = \
+ urlparse.urlparse(self.repository)
+ assert not params and not query and not fragments
+ if schema == 'http':
+ conn = httplib.HTTPConnection(netloc)
+ elif schema == 'https':
+ conn = httplib.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema "+schema)
+
+ data = ''
+ loglevel = log.INFO
+ try:
+ conn.connect()
+ conn.putrequest("POST", url)
+ conn.putheader('Content-type',
+ 'multipart/form-data; boundary=%s'%boundary)
+ conn.putheader('Content-length', str(len(body)))
+ conn.putheader('Authorization', auth)
+ conn.endheaders()
+ conn.send(body)
+ except socket.error, e:
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = conn.getresponse()
+ if r.status == 200:
+ self.announce('Server response (%s): %s' % (r.status, r.reason),
+ log.INFO)
+ elif r.status == 301:
+ location = r.getheader('Location')
+ if location is None:
+ location = 'http://packages.python.org/%s/' % meta.get_name()
+ self.announce('Upload successful. Visit %s' % location,
+ log.INFO)
+ else:
+ self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+ log.ERROR)
+ if self.show_response:
+ print '-'*75, r.read(), '-'*75
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py
new file mode 100755
index 00000000..4b7b3437
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/depends.py
@@ -0,0 +1,246 @@
+from __future__ import generators
+import sys, imp, marshal
+from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
+from distutils.version import StrictVersion, LooseVersion
+
+__all__ = [
+ 'Require', 'find_module', 'get_module_constant', 'extract_constant'
+]
+
+class Require:
+ """A prerequisite to building or installing a distribution"""
+
+ def __init__(self,name,requested_version,module,homepage='',
+ attribute=None,format=None
+ ):
+
+ if format is None and requested_version is not None:
+ format = StrictVersion
+
+ if format is not None:
+ requested_version = format(requested_version)
+ if attribute is None:
+ attribute = '__version__'
+
+ self.__dict__.update(locals())
+ del self.self
+
+
+ def full_name(self):
+ """Return full package/distribution name, w/version"""
+ if self.requested_version is not None:
+ return '%s-%s' % (self.name,self.requested_version)
+ return self.name
+
+
+ def version_ok(self,version):
+ """Is 'version' sufficiently up-to-date?"""
+ return self.attribute is None or self.format is None or \
+ str(version)<>"unknown" and version >= self.requested_version
+
+
+ def get_version(self, paths=None, default="unknown"):
+
+ """Get version number of installed module, 'None', or 'default'
+
+ Search 'paths' for module. If not found, return 'None'. If found,
+ return the extracted version attribute, or 'default' if no version
+ attribute was specified, or the value cannot be determined without
+ importing the module. The version is formatted according to the
+ requirement's version format (if any), unless it is 'None' or the
+ supplied 'default'.
+ """
+
+ if self.attribute is None:
+ try:
+ f,p,i = find_module(self.module,paths)
+ if f: f.close()
+ return default
+ except ImportError:
+ return None
+
+ v = get_module_constant(self.module,self.attribute,default,paths)
+
+ if v is not None and v is not default and self.format is not None:
+ return self.format(v)
+
+ return v
+
+
+ def is_present(self,paths=None):
+ """Return true if dependency is present on 'paths'"""
+ return self.get_version(paths) is not None
+
+
+ def is_current(self,paths=None):
+ """Return true if dependency is present and up-to-date on 'paths'"""
+ version = self.get_version(paths)
+ if version is None:
+ return False
+ return self.version_ok(version)
+
+
+def _iter_code(code):
+
+ """Yield '(op,arg)' pair for each operation in code object 'code'"""
+
+ from array import array
+ from dis import HAVE_ARGUMENT, EXTENDED_ARG
+
+ bytes = array('b',code.co_code)
+ eof = len(code.co_code)
+
+ ptr = 0
+ extended_arg = 0
+
+ while ptr<eof:
+
+ op = bytes[ptr]
+
+ if op>=HAVE_ARGUMENT:
+
+ arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
+ ptr += 3
+
+ if op==EXTENDED_ARG:
+ extended_arg = arg * 65536L
+ continue
+
+ else:
+ arg = None
+ ptr += 1
+
+ yield op,arg
+
+
+
+
+
+
+
+
+
+
+def find_module(module, paths=None):
+ """Just like 'imp.find_module()', but with package support"""
+
+ parts = module.split('.')
+
+ while parts:
+ part = parts.pop(0)
+ f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
+
+ if kind==PKG_DIRECTORY:
+ parts = parts or ['__init__']
+ paths = [path]
+
+ elif parts:
+ raise ImportError("Can't find %r in %s" % (parts,module))
+
+ return info
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def get_module_constant(module, symbol, default=-1, paths=None):
+
+ """Find 'module' by searching 'paths', and extract 'symbol'
+
+ Return 'None' if 'module' does not exist on 'paths', or it does not define
+ 'symbol'. If the module defines 'symbol' as a constant, return the
+ constant. Otherwise, return 'default'."""
+
+ try:
+ f, path, (suffix,mode,kind) = find_module(module,paths)
+ except ImportError:
+ # Module doesn't exist
+ return None
+
+ try:
+ if kind==PY_COMPILED:
+ f.read(8) # skip magic & date
+ code = marshal.load(f)
+ elif kind==PY_FROZEN:
+ code = imp.get_frozen_object(module)
+ elif kind==PY_SOURCE:
+ code = compile(f.read(), path, 'exec')
+ else:
+ # Not something we can parse; we'll have to import it. :(
+ if module not in sys.modules:
+ imp.load_module(module,f,path,(suffix,mode,kind))
+ return getattr(sys.modules[module],symbol,None)
+
+ finally:
+ if f:
+ f.close()
+
+ return extract_constant(code,symbol,default)
+
+
+
+
+
+
+
+
+def extract_constant(code,symbol,default=-1):
+ """Extract the constant value of 'symbol' from 'code'
+
+ If the name 'symbol' is bound to a constant value by the Python code
+ object 'code', return that value. If 'symbol' is bound to an expression,
+ return 'default'. Otherwise, return 'None'.
+
+ Return value is based on the first assignment to 'symbol'. 'symbol' must
+ be a global, or at least a non-"fast" local in the code block. That is,
+ only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+ must be present in 'code.co_names'.
+ """
+
+ if symbol not in code.co_names:
+ # name's not there, can't possibly be an assigment
+ return None
+
+ name_idx = list(code.co_names).index(symbol)
+
+ STORE_NAME = 90
+ STORE_GLOBAL = 97
+ LOAD_CONST = 100
+
+ const = default
+
+ for op, arg in _iter_code(code):
+
+ if op==LOAD_CONST:
+ const = code.co_consts[arg]
+ elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
+ return const
+ else:
+ const = default
+
+if sys.platform.startswith('java') or sys.platform == 'cli':
+ # XXX it'd be better to test assertions about bytecode instead...
+ del extract_constant, get_module_constant
+ __all__.remove('extract_constant')
+ __all__.remove('get_module_constant')
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py
new file mode 100755
index 00000000..fd4ca66b
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/dist.py
@@ -0,0 +1,816 @@
+__all__ = ['Distribution']
+
+import re
+from distutils.core import Distribution as _Distribution
+from setuptools.depends import Require
+from setuptools.command.install import install
+from setuptools.command.sdist import sdist
+from setuptools.command.install_lib import install_lib
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
+import os, distutils.log
+
+def _get_unpatched(cls):
+ """Protect against re-patching the distutils if reloaded
+
+ Also ensures that no other distutils extension monkeypatched the distutils
+ first.
+ """
+ while cls.__module__.startswith('setuptools'):
+ cls, = cls.__bases__
+ if not cls.__module__.startswith('distutils'):
+ raise AssertionError(
+ "distutils has already been patched by %r" % cls
+ )
+ return cls
+
+_Distribution = _get_unpatched(_Distribution)
+
+sequence = tuple, list
+
+def check_importable(dist, attr, value):
+ try:
+ ep = pkg_resources.EntryPoint.parse('x='+value)
+ assert not ep.extras
+ except (TypeError,ValueError,AttributeError,AssertionError):
+ raise DistutilsSetupError(
+ "%r must be importable 'module:attrs' string (got %r)"
+ % (attr,value)
+ )
+
+
+def assert_string_list(dist, attr, value):
+ """Verify that value is a string list or None"""
+ try:
+ assert ''.join(value)!=value
+ except (TypeError,ValueError,AttributeError,AssertionError):
+ raise DistutilsSetupError(
+ "%r must be a list of strings (got %r)" % (attr,value)
+ )
+
+def check_nsp(dist, attr, value):
+ """Verify that namespace packages are valid"""
+ assert_string_list(dist,attr,value)
+ for nsp in value:
+ if not dist.has_contents_for(nsp):
+ raise DistutilsSetupError(
+ "Distribution contains no modules or packages for " +
+ "namespace package %r" % nsp
+ )
+ if '.' in nsp:
+ parent = '.'.join(nsp.split('.')[:-1])
+ if parent not in value:
+ distutils.log.warn(
+ "%r is declared as a package namespace, but %r is not:"
+ " please correct this in setup.py", nsp, parent
+ )
+
+def check_extras(dist, attr, value):
+ """Verify that extras_require mapping is valid"""
+ try:
+ for k,v in value.items():
+ list(pkg_resources.parse_requirements(v))
+ except (TypeError,ValueError,AttributeError):
+ raise DistutilsSetupError(
+ "'extras_require' must be a dictionary whose values are "
+ "strings or lists of strings containing valid project/version "
+ "requirement specifiers."
+ )
+
+
+
+
+def assert_bool(dist, attr, value):
+ """Verify that value is True, False, 0, or 1"""
+ if bool(value) != value:
+ raise DistutilsSetupError(
+ "%r must be a boolean value (got %r)" % (attr,value)
+ )
+def check_requirements(dist, attr, value):
+ """Verify that install_requires is a valid requirements list"""
+ try:
+ list(pkg_resources.parse_requirements(value))
+ except (TypeError,ValueError):
+ raise DistutilsSetupError(
+ "%r must be a string or list of strings "
+ "containing valid project/version requirement specifiers" % (attr,)
+ )
+def check_entry_points(dist, attr, value):
+ """Verify that entry_points map is parseable"""
+ try:
+ pkg_resources.EntryPoint.parse_map(value)
+ except ValueError, e:
+ raise DistutilsSetupError(e)
+
+def check_test_suite(dist, attr, value):
+ if not isinstance(value,basestring):
+ raise DistutilsSetupError("test_suite must be a string")
+
+def check_package_data(dist, attr, value):
+ """Verify that value is a dictionary of package names to glob lists"""
+ if isinstance(value,dict):
+ for k,v in value.items():
+ if not isinstance(k,str): break
+ try: iter(v)
+ except TypeError:
+ break
+ else:
+ return
+ raise DistutilsSetupError(
+ attr+" must be a dictionary mapping package names to lists of "
+ "wildcard patterns"
+ )
+
+class Distribution(_Distribution):
+ """Distribution with support for features, tests, and package data
+
+ This is an enhanced version of 'distutils.dist.Distribution' that
+ effectively adds the following new optional keyword arguments to 'setup()':
+
+ 'install_requires' -- a string or sequence of strings specifying project
+ versions that the distribution requires when installed, in the format
+ used by 'pkg_resources.require()'. They will be installed
+ automatically when the package is installed. If you wish to use
+ packages that are not available in PyPI, or want to give your users an
+ alternate download location, you can add a 'find_links' option to the
+ '[easy_install]' section of your project's 'setup.cfg' file, and then
+ setuptools will scan the listed web pages for links that satisfy the
+ requirements.
+
+ 'extras_require' -- a dictionary mapping names of optional "extras" to the
+ additional requirement(s) that using those extras incurs. For example,
+ this::
+
+ extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
+
+ indicates that the distribution can optionally provide an extra
+ capability called "reST", but it can only be used if docutils and
+ reSTedit are installed. If the user installs your package using
+ EasyInstall and requests one of your extras, the corresponding
+ additional requirements will be installed if needed.
+
+ 'features' -- a dictionary mapping option names to 'setuptools.Feature'
+ objects. Features are a portion of the distribution that can be
+ included or excluded based on user options, inter-feature dependencies,
+ and availability on the current system. Excluded features are omitted
+ from all setup commands, including source and binary distributions, so
+ you can create multiple distributions from the same source tree.
+ Feature names should be valid Python identifiers, except that they may
+ contain the '-' (minus) sign. Features can be included or excluded
+ via the command line options '--with-X' and '--without-X', where 'X' is
+ the name of the feature. Whether a feature is included by default, and
+ whether you are allowed to control this from the command line, is
+ determined by the Feature object. See the 'Feature' class for more
+ information.
+
+ 'test_suite' -- the name of a test suite to run for the 'test' command.
+ If the user runs 'python setup.py test', the package will be installed,
+ and the named test suite will be run. The format is the same as
+ would be used on a 'unittest.py' command line. That is, it is the
+ dotted name of an object to import and call to generate a test suite.
+
+ 'package_data' -- a dictionary mapping package names to lists of filenames
+ or globs to use to find data files contained in the named packages.
+ If the dictionary has filenames or globs listed under '""' (the empty
+ string), those names will be searched for in every package, in addition
+ to any names for the specific package. Data files found using these
+ names/globs will be installed along with the package, in the same
+ location as the package. Note that globs are allowed to reference
+ the contents of non-package subdirectories, as long as you use '/' as
+ a path separator. (Globs are automatically converted to
+ platform-specific paths at runtime.)
+
+ In addition to these new keywords, this class also has several new methods
+ for manipulating the distribution's contents. For example, the 'include()'
+ and 'exclude()' methods can be thought of as in-place add and subtract
+ commands that add or remove packages, modules, extensions, and so on from
+ the distribution. They are used by the feature subsystem to configure the
+ distribution for the included and excluded features.
+ """
+
+ _patched_dist = None
+
+ def patch_missing_pkg_info(self, attrs):
+ # Fake up a replacement for the data that would normally come from
+ # PKG-INFO, but which might not yet be built if this is a fresh
+ # checkout.
+ #
+ if not attrs or 'name' not in attrs or 'version' not in attrs:
+ return
+ key = pkg_resources.safe_name(str(attrs['name'])).lower()
+ dist = pkg_resources.working_set.by_key.get(key)
+ if dist is not None and not dist.has_metadata('PKG-INFO'):
+ dist._version = pkg_resources.safe_version(str(attrs['version']))
+ self._patched_dist = dist
+
+ def __init__ (self, attrs=None):
+ have_package_data = hasattr(self, "package_data")
+ if not have_package_data:
+ self.package_data = {}
+ self.require_features = []
+ self.features = {}
+ self.dist_files = []
+ self.src_root = attrs and attrs.pop("src_root", None)
+ self.patch_missing_pkg_info(attrs)
+ # Make sure we have any eggs needed to interpret 'attrs'
+ if attrs is not None:
+ self.dependency_links = attrs.pop('dependency_links', [])
+ assert_string_list(self,'dependency_links',self.dependency_links)
+ if attrs and 'setup_requires' in attrs:
+ self.fetch_build_eggs(attrs.pop('setup_requires'))
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ if not hasattr(self,ep.name):
+ setattr(self,ep.name,None)
+ _Distribution.__init__(self,attrs)
+ if isinstance(self.metadata.version, (int,long,float)):
+ # Some people apparently take "version number" too literally :)
+ self.metadata.version = str(self.metadata.version)
+
+ def parse_command_line(self):
+ """Process features after parsing command line options"""
+ result = _Distribution.parse_command_line(self)
+ if self.features:
+ self._finalize_features()
+ return result
+
+ def _feature_attrname(self,name):
+ """Convert feature name to corresponding option attribute name"""
+ return 'with_'+name.replace('-','_')
+
+ def fetch_build_eggs(self, requires):
+ """Resolve pre-setup requirements"""
+ from pkg_resources import working_set, parse_requirements
+ for dist in working_set.resolve(
+ parse_requirements(requires), installer=self.fetch_build_egg
+ ):
+ working_set.add(dist)
+
+ def finalize_options(self):
+ _Distribution.finalize_options(self)
+ if self.features:
+ self._set_global_opts_from_features()
+
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ value = getattr(self,ep.name,None)
+ if value is not None:
+ ep.require(installer=self.fetch_build_egg)
+ ep.load()(self, ep.name, value)
+ if getattr(self, 'convert_2to3_doctests', None):
+ # XXX may convert to set here when we can rely on set being builtin
+ self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
+ else:
+ self.convert_2to3_doctests = []
+
+ def fetch_build_egg(self, req):
+ """Fetch an egg needed for building"""
+ try:
+ cmd = self._egg_fetcher
+ except AttributeError:
+ from setuptools.command.easy_install import easy_install
+ dist = self.__class__({'script_args':['easy_install']})
+ dist.parse_config_files()
+ opts = dist.get_option_dict('easy_install')
+ keep = (
+ 'find_links', 'site_dirs', 'index_url', 'optimize',
+ 'site_dirs', 'allow_hosts'
+ )
+ for key in opts.keys():
+ if key not in keep:
+ del opts[key] # don't use any other settings
+ if self.dependency_links:
+ links = self.dependency_links[:]
+ if 'find_links' in opts:
+ links = opts['find_links'][1].split() + links
+ opts['find_links'] = ('setup', links)
+ cmd = easy_install(
+ dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
+ always_copy=False, build_directory=None, editable=False,
+ upgrade=False, multi_version=True, no_report = True
+ )
+ cmd.ensure_finalized()
+ self._egg_fetcher = cmd
+ return cmd.easy_install(req)
+
+ def _set_global_opts_from_features(self):
+ """Add --with-X/--without-X options based on optional features"""
+
+ go = []
+ no = self.negative_opt.copy()
+
+ for name,feature in self.features.items():
+ self._set_feature(name,None)
+ feature.validate(self)
+
+ if feature.optional:
+ descr = feature.description
+ incdef = ' (default)'
+ excdef=''
+ if not feature.include_by_default():
+ excdef, incdef = incdef, excdef
+
+ go.append(('with-'+name, None, 'include '+descr+incdef))
+ go.append(('without-'+name, None, 'exclude '+descr+excdef))
+ no['without-'+name] = 'with-'+name
+
+ self.global_options = self.feature_options = go + self.global_options
+ self.negative_opt = self.feature_negopt = no
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def _finalize_features(self):
+ """Add/remove features and resolve dependencies between them"""
+
+ # First, flag all the enabled items (and thus their dependencies)
+ for name,feature in self.features.items():
+ enabled = self.feature_is_included(name)
+ if enabled or (enabled is None and feature.include_by_default()):
+ feature.include_in(self)
+ self._set_feature(name,1)
+
+ # Then disable the rest, so that off-by-default features don't
+ # get flagged as errors when they're required by an enabled feature
+ for name,feature in self.features.items():
+ if not self.feature_is_included(name):
+ feature.exclude_from(self)
+ self._set_feature(name,0)
+
+
+ def get_command_class(self, command):
+ """Pluggable version of get_command_class()"""
+ if command in self.cmdclass:
+ return self.cmdclass[command]
+
+ for ep in pkg_resources.iter_entry_points('distutils.commands',command):
+ ep.require(installer=self.fetch_build_egg)
+ self.cmdclass[command] = cmdclass = ep.load()
+ return cmdclass
+ else:
+ return _Distribution.get_command_class(self, command)
+
+ def print_commands(self):
+ for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ if ep.name not in self.cmdclass:
+ cmdclass = ep.load(False) # don't require extras, we're not running
+ self.cmdclass[ep.name] = cmdclass
+ return _Distribution.print_commands(self)
+
+
+
+
+
+ def _set_feature(self,name,status):
+ """Set feature's inclusion status"""
+ setattr(self,self._feature_attrname(name),status)
+
+ def feature_is_included(self,name):
+ """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
+ return getattr(self,self._feature_attrname(name))
+
+ def include_feature(self,name):
+ """Request inclusion of feature named 'name'"""
+
+ if self.feature_is_included(name)==0:
+ descr = self.features[name].description
+ raise DistutilsOptionError(
+ descr + " is required, but was excluded or is not available"
+ )
+ self.features[name].include_in(self)
+ self._set_feature(name,1)
+
+ def include(self,**attrs):
+ """Add items to distribution that are named in keyword arguments
+
+ For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
+ the distribution's 'py_modules' attribute, if it was not already
+ there.
+
+ Currently, this method only supports inclusion for attributes that are
+ lists or tuples. If you need to add support for adding to other
+ attributes in this or a subclass, you can add an '_include_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
+ will try to call 'dist._include_foo({"bar":"baz"})', which can then
+ handle whatever special inclusion logic is needed.
+ """
+ for k,v in attrs.items():
+ include = getattr(self, '_include_'+k, None)
+ if include:
+ include(v)
+ else:
+ self._include_misc(k,v)
+
+ def exclude_package(self,package):
+ """Remove packages, modules, and extensions in named package"""
+
+ pfx = package+'.'
+ if self.packages:
+ self.packages = [
+ p for p in self.packages
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.py_modules:
+ self.py_modules = [
+ p for p in self.py_modules
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.ext_modules:
+ self.ext_modules = [
+ p for p in self.ext_modules
+ if p.name != package and not p.name.startswith(pfx)
+ ]
+
+
+ def has_contents_for(self,package):
+ """Return true if 'exclude_package(package)' would do something"""
+
+ pfx = package+'.'
+
+ for p in self.iter_distribution_names():
+ if p==package or p.startswith(pfx):
+ return True
+
+
+
+
+
+
+
+
+
+
+ def _exclude_misc(self,name,value):
+ """Handle 'exclude()' for list/tuple attrs without a special handler"""
+ if not isinstance(value,sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list or tuple (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self,name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is not None and not isinstance(old,sequence):
+ raise DistutilsSetupError(
+ name+": this setting cannot be changed via include/exclude"
+ )
+ elif old:
+ setattr(self,name,[item for item in old if item not in value])
+
+ def _include_misc(self,name,value):
+ """Handle 'include()' for list/tuple attrs without a special handler"""
+
+ if not isinstance(value,sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self,name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is None:
+ setattr(self,name,value)
+ elif not isinstance(old,sequence):
+ raise DistutilsSetupError(
+ name+": this setting cannot be changed via include/exclude"
+ )
+ else:
+ setattr(self,name,old+[item for item in value if item not in old])
+
+ def exclude(self,**attrs):
+ """Remove items from distribution that are named in keyword arguments
+
+ For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
+ the distribution's 'py_modules' attribute. Excluding packages uses
+ the 'exclude_package()' method, so all of the package's contained
+ packages, modules, and extensions are also excluded.
+
+ Currently, this method only supports exclusion from attributes that are
+ lists or tuples. If you need to add support for excluding from other
+ attributes in this or a subclass, you can add an '_exclude_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
+ will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
+ handle whatever special exclusion logic is needed.
+ """
+ for k,v in attrs.items():
+ exclude = getattr(self, '_exclude_'+k, None)
+ if exclude:
+ exclude(v)
+ else:
+ self._exclude_misc(k,v)
+
+ def _exclude_packages(self,packages):
+ if not isinstance(packages,sequence):
+ raise DistutilsSetupError(
+ "packages: setting must be a list or tuple (%r)" % (packages,)
+ )
+ map(self.exclude_package, packages)
+
+
+
+
+
+
+
+
+
+
+
+
+ def _parse_command_opts(self, parser, args):
+ # Remove --with-X/--without-X options when processing command args
+ self.global_options = self.__class__.global_options
+ self.negative_opt = self.__class__.negative_opt
+
+ # First, expand any aliases
+ command = args[0]
+ aliases = self.get_option_dict('aliases')
+ while command in aliases:
+ src,alias = aliases[command]
+ del aliases[command] # ensure each alias can expand only once!
+ import shlex
+ args[:1] = shlex.split(alias,True)
+ command = args[0]
+
+ nargs = _Distribution._parse_command_opts(self, parser, args)
+
+ # Handle commands that want to consume all remaining arguments
+ cmd_class = self.get_command_class(command)
+ if getattr(cmd_class,'command_consumes_arguments',None):
+ self.get_option_dict(command)['args'] = ("command line", nargs)
+ if nargs is not None:
+ return []
+
+ return nargs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ def get_cmdline_options(self):
+ """Return a '{cmd: {opt:val}}' map of all command-line options
+
+ Option names are all long, but do not include the leading '--', and
+ contain dashes rather than underscores. If the option doesn't take
+ an argument (e.g. '--quiet'), the 'val' is 'None'.
+
+ Note that options provided by config files are intentionally excluded.
+ """
+
+ d = {}
+
+ for cmd,opts in self.command_options.items():
+
+ for opt,(src,val) in opts.items():
+
+ if src != "command line":
+ continue
+
+ opt = opt.replace('_','-')
+
+ if val==0:
+ cmdobj = self.get_command_obj(cmd)
+ neg_opt = self.negative_opt.copy()
+ neg_opt.update(getattr(cmdobj,'negative_opt',{}))
+ for neg,pos in neg_opt.items():
+ if pos==opt:
+ opt=neg
+ val=None
+ break
+ else:
+ raise AssertionError("Shouldn't be able to get here")
+
+ elif val==1:
+ val = None
+
+ d.setdefault(cmd,{})[opt] = val
+
+ return d
+
+
+ def iter_distribution_names(self):
+ """Yield all packages, modules, and extension names in distribution"""
+
+ for pkg in self.packages or ():
+ yield pkg
+
+ for module in self.py_modules or ():
+ yield module
+
+ for ext in self.ext_modules or ():
+ if isinstance(ext,tuple):
+ name, buildinfo = ext
+ else:
+ name = ext.name
+ if name.endswith('module'):
+ name = name[:-6]
+ yield name
+
+# Install it throughout the distutils
+for module in distutils.dist, distutils.core, distutils.cmd:
+ module.Distribution = Distribution
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Feature:
+ """A subset of the distribution that can be excluded if unneeded/wanted
+
+ Features are created using these keyword arguments:
+
+ 'description' -- a short, human readable description of the feature, to
+ be used in error messages, and option help messages.
+
+ 'standard' -- if true, the feature is included by default if it is
+ available on the current system. Otherwise, the feature is only
+ included if requested via a command line '--with-X' option, or if
+ another included feature requires it. The default setting is 'False'.
+
+ 'available' -- if true, the feature is available for installation on the
+ current system. The default setting is 'True'.
+
+ 'optional' -- if true, the feature's inclusion can be controlled from the
+ command line, using the '--with-X' or '--without-X' options. If
+ false, the feature's inclusion status is determined automatically,
+ based on 'availabile', 'standard', and whether any other feature
+ requires it. The default setting is 'True'.
+
+ 'require_features' -- a string or sequence of strings naming features
+ that should also be included if this feature is included. Defaults to
+ empty list. May also contain 'Require' objects that should be
+ added/removed from the distribution.
+
+ 'remove' -- a string or list of strings naming packages to be removed
+ from the distribution if this feature is *not* included. If the
+ feature *is* included, this argument is ignored. This argument exists
+ to support removing features that "crosscut" a distribution, such as
+ defining a 'tests' feature that removes all the 'tests' subpackages
+ provided by other features. The default for this argument is an empty
+ list. (Note: the named package(s) or modules must exist in the base
+ distribution when the 'setup()' function is initially called.)
+
+ other keywords -- any other keyword arguments are saved, and passed to
+ the distribution's 'include()' and 'exclude()' methods when the
+ feature is included or excluded, respectively. So, for example, you
+ could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
+ added or removed from the distribution as appropriate.
+
+ A feature must include at least one 'requires', 'remove', or other
+ keyword argument. Otherwise, it can't affect the distribution in any way.
+ Note also that you can subclass 'Feature' to create your own specialized
+ feature types that modify the distribution in other ways when included or
+ excluded. See the docstrings for the various methods here for more detail.
+ Aside from the methods, the only feature attributes that distributions look
+ at are 'description' and 'optional'.
+ """
+ def __init__(self, description, standard=False, available=True,
+ optional=True, require_features=(), remove=(), **extras
+ ):
+
+ self.description = description
+ self.standard = standard
+ self.available = available
+ self.optional = optional
+ if isinstance(require_features,(str,Require)):
+ require_features = require_features,
+
+ self.require_features = [
+ r for r in require_features if isinstance(r,str)
+ ]
+ er = [r for r in require_features if not isinstance(r,str)]
+ if er: extras['require_features'] = er
+
+ if isinstance(remove,str):
+ remove = remove,
+ self.remove = remove
+ self.extras = extras
+
+ if not remove and not require_features and not extras:
+ raise DistutilsSetupError(
+ "Feature %s: must define 'require_features', 'remove', or at least one"
+ " of 'packages', 'py_modules', etc."
+ )
+
+ def include_by_default(self):
+ """Should this feature be included by default?"""
+ return self.available and self.standard
+
+ def include_in(self,dist):
+
+ """Ensure feature and its requirements are included in distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. Note that this method may be called more than once
+ per feature, and so should be idempotent.
+
+ """
+
+ if not self.available:
+ raise DistutilsPlatformError(
+ self.description+" is required,"
+ "but is not available on this platform"
+ )
+
+ dist.include(**self.extras)
+
+ for f in self.require_features:
+ dist.include_feature(f)
+
+
+
+ def exclude_from(self,dist):
+
+ """Ensure feature is excluded from distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. This method will be called at most once per
+ feature, and only after all included features have been asked to
+ include themselves.
+ """
+
+ dist.exclude(**self.extras)
+
+ if self.remove:
+ for item in self.remove:
+ dist.exclude_package(item)
+
+
+
+ def validate(self,dist):
+
+ """Verify that feature makes sense in context of distribution
+
+ This method is called by the distribution just before it parses its
+ command line. It checks to ensure that the 'remove' attribute, if any,
+ contains only valid package/module names that are present in the base
+ distribution when 'setup()' is called. You may override it in a
+ subclass to perform any other required validation of the feature
+ against a target distribution.
+ """
+
+ for item in self.remove:
+ if not dist.has_contents_for(item):
+ raise DistutilsSetupError(
+ "%s wants to be able to remove %s, but the distribution"
+ " doesn't contain any packages or modules under %s"
+ % (self.description, item, item)
+ )
+
+
+
+def check_packages(dist, attr, value):
+ for pkgname in value:
+ if not re.match(r'\w+(\.\w+)*', pkgname):
+ distutils.log.warn(
+ "WARNING: %r not a valid package name; please use only"
+ ".-separated package names in setup.py", pkgname
+ )
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py
new file mode 100755
index 00000000..d186c7a2
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/extension.py
@@ -0,0 +1,36 @@
+from distutils.core import Extension as _Extension
+from setuptools.dist import _get_unpatched
+_Extension = _get_unpatched(_Extension)
+
+try:
+ from Pyrex.Distutils.build_ext import build_ext
+except ImportError:
+ have_pyrex = False
+else:
+ have_pyrex = True
+
+
+class Extension(_Extension):
+ """Extension that uses '.c' files in place of '.pyx' files"""
+
+ if not have_pyrex:
+ # convert .pyx extensions to .c
+ def __init__(self,*args,**kw):
+ _Extension.__init__(self,*args,**kw)
+ sources = []
+ for s in self.sources:
+ if s.endswith('.pyx'):
+ sources.append(s[:-3]+'c')
+ else:
+ sources.append(s)
+ self.sources = sources
+
+class Library(Extension):
+ """Just like a regular Extension, but built as a library instead"""
+
+import sys, distutils.core, distutils.extension
+distutils.core.Extension = Extension
+distutils.extension.Extension = Extension
+if 'distutils.command.build_ext' in sys.modules:
+ sys.modules['distutils.command.build_ext'].Extension = Extension
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe
new file mode 100644
index 00000000..474838d5
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/gui.exe
Binary files differ
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py
new file mode 100755
index 00000000..1d467f78
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/package_index.py
@@ -0,0 +1,830 @@
+"""PyPI and direct package downloading"""
+import sys, os.path, re, urlparse, urllib2, shutil, random, socket, cStringIO
+import httplib
+from pkg_resources import *
+from distutils import log
+from distutils.errors import DistutilsError
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+from fnmatch import translate
+
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+# this is here to fix emacs' cruddy broken syntax highlighting
+PYPI_MD5 = re.compile(
+ '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
+ 'href="[^?]+\?:action=show_md5&amp;digest=([0-9a-f]{32})">md5</a>\\)'
+)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
+
+__all__ = [
+ 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+ 'interpret_distro_name',
+]
+
+_SOCKET_TIMEOUT = 15
+
+def parse_bdist_wininst(name):
+ """Return (base,pyversion) or (None,None) for possible .exe name"""
+
+ lower = name.lower()
+ base, py_ver = None, None
+
+ if lower.endswith('.exe'):
+ if lower.endswith('.win32.exe'):
+ base = name[:-10]
+ elif lower.startswith('.win32-py',-16):
+ py_ver = name[-7:-4]
+ base = name[:-16]
+
+ return base,py_ver
+
+def egg_info_for_url(url):
+ scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
+ base = urllib2.unquote(path.split('/')[-1])
+ if '#' in base: base, fragment = base.split('#',1)
+ return base,fragment
+
+def distros_for_url(url, metadata=None):
+ """Yield egg or source distribution objects that might be found at a URL"""
+ base, fragment = egg_info_for_url(url)
+ for dist in distros_for_location(url, base, metadata): yield dist
+ if fragment:
+ match = EGG_FRAGMENT.match(fragment)
+ if match:
+ for dist in interpret_distro_name(
+ url, match.group(1), metadata, precedence = CHECKOUT_DIST
+ ):
+ yield dist
+
+def distros_for_location(location, basename, metadata=None):
+ """Yield egg or source distribution objects based on basename"""
+ if basename.endswith('.egg.zip'):
+ basename = basename[:-4] # strip the .zip
+ if basename.endswith('.egg') and '-' in basename:
+ # only one, unambiguous interpretation
+ return [Distribution.from_location(location, basename, metadata)]
+
+ if basename.endswith('.exe'):
+ win_base, py_ver = parse_bdist_wininst(basename)
+ if win_base is not None:
+ return interpret_distro_name(
+ location, win_base, metadata, py_ver, BINARY_DIST, "win32"
+ )
+
+ # Try source distro extensions (.zip, .tgz, etc.)
+ #
+ for ext in EXTENSIONS:
+ if basename.endswith(ext):
+ basename = basename[:-len(ext)]
+ return interpret_distro_name(location, basename, metadata)
+ return [] # no extension matched
+
+def distros_for_filename(filename, metadata=None):
+ """Yield possible egg or source distribution objects based on a filename"""
+ return distros_for_location(
+ normalize_path(filename), os.path.basename(filename), metadata
+ )
+
+
+def interpret_distro_name(location, basename, metadata,
+ py_version=None, precedence=SOURCE_DIST, platform=None
+):
+ """Generate alternative interpretations of a source distro name
+
+ Note: if `location` is a filesystem filename, you should call
+ ``pkg_resources.normalize_path()`` on it before passing it to this
+ routine!
+ """
+ # Generate alternative interpretations of a source distro name
+ # Because some packages are ambiguous as to name/versions split
+ # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
+ # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+ # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
+ # the spurious interpretations should be ignored, because in the event
+ # there's also an "adns" package, the spurious "python-1.1.0" version will
+ # compare lower than any numeric version number, and is therefore unlikely
+ # to match a request for it. It's still a potential problem, though, and
+ # in the long run PyPI and the distutils should go for "safe" names and
+ # versions in distribution archive names (sdist and bdist).
+
+ parts = basename.split('-')
+ if not py_version:
+ for i,p in enumerate(parts[2:]):
+ if len(p)==5 and p.startswith('py2.'):
+ return # It's a bdist_dumb, not an sdist -- bail out
+
+ for p in range(1,len(parts)+1):
+ yield Distribution(
+ location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
+ py_version=py_version, precedence = precedence,
+ platform = platform
+ )
+
+REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+# this line is here to fix emacs' cruddy broken syntax highlighting
+
+def find_external_links(url, page):
+ """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
+
+ for match in REL.finditer(page):
+ tag, rel = match.groups()
+ rels = map(str.strip, rel.lower().split(','))
+ if 'homepage' in rels or 'download' in rels:
+ for match in HREF.finditer(tag):
+ yield urlparse.urljoin(url, htmldecode(match.group(1)))
+
+ for tag in ("<th>Home Page", "<th>Download URL"):
+ pos = page.find(tag)
+ if pos!=-1:
+ match = HREF.search(page,pos)
+ if match:
+ yield urlparse.urljoin(url, htmldecode(match.group(1)))
+
+user_agent = "Python-urllib/%s distribute/%s" % (
+ sys.version[:3], require('distribute')[0].version
+)
+
+
+class PackageIndex(Environment):
+ """A distribution index that scans web pages for download URLs"""
+
+ def __init__(self, index_url="http://pypi.python.org/simple", hosts=('*',),
+ *args, **kw
+ ):
+ Environment.__init__(self,*args,**kw)
+ self.index_url = index_url + "/"[:not index_url.endswith('/')]
+ self.scanned_urls = {}
+ self.fetched_urls = {}
+ self.package_pages = {}
+ self.allows = re.compile('|'.join(map(translate,hosts))).match
+ self.to_scan = []
+
+
+
+ def process_url(self, url, retrieve=False):
+ """Evaluate a URL as a possible download, and maybe retrieve it"""
+ if url in self.scanned_urls and not retrieve:
+ return
+ self.scanned_urls[url] = True
+ if not URL_SCHEME(url):
+ self.process_filename(url)
+ return
+ else:
+ dists = list(distros_for_url(url))
+ if dists:
+ if not self.url_ok(url):
+ return
+ self.debug("Found link: %s", url)
+
+ if dists or not retrieve or url in self.fetched_urls:
+ map(self.add, dists)
+ return # don't need the actual page
+
+ if not self.url_ok(url):
+ self.fetched_urls[url] = True
+ return
+
+ self.info("Reading %s", url)
+ f = self.open_url(url, "Download error: %s -- Some packages may not be found!")
+ if f is None: return
+ self.fetched_urls[url] = self.fetched_urls[f.url] = True
+
+ if 'html' not in f.headers.get('content-type', '').lower():
+ f.close() # not html, we can't process it
+ return
+
+ base = f.url # handle redirects
+ page = f.read()
+ if sys.version_info >= (3,):
+ charset = f.headers.get_param('charset') or 'latin-1'
+ page = page.decode(charset, "ignore")
+ f.close()
+ for match in HREF.finditer(page):
+ link = urlparse.urljoin(base, htmldecode(match.group(1)))
+ self.process_url(link)
+ if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
+ page = self.process_index(url, page)
+
+ def process_filename(self, fn, nested=False):
+ # process filenames or directories
+ if not os.path.exists(fn):
+ self.warn("Not found: %s", fn)
+ return
+
+ if os.path.isdir(fn) and not nested:
+ path = os.path.realpath(fn)
+ for item in os.listdir(path):
+ self.process_filename(os.path.join(path,item), True)
+
+ dists = distros_for_filename(fn)
+ if dists:
+ self.debug("Found: %s", fn)
+ map(self.add, dists)
+
+ def url_ok(self, url, fatal=False):
+ s = URL_SCHEME(url)
+ if (s and s.group(1).lower()=='file') or self.allows(urlparse.urlparse(url)[1]):
+ return True
+ msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
+ if fatal:
+ raise DistutilsError(msg % url)
+ else:
+ self.warn(msg, url)
+
+ def scan_egg_links(self, search_path):
+ for item in search_path:
+ if os.path.isdir(item):
+ for entry in os.listdir(item):
+ if entry.endswith('.egg-link'):
+ self.scan_egg_link(item, entry)
+
+ def scan_egg_link(self, path, entry):
+ lines = filter(None, map(str.strip, open(os.path.join(path, entry))))
+ if len(lines)==2:
+ for dist in find_distributions(os.path.join(path, lines[0])):
+ dist.location = os.path.join(path, *lines)
+ dist.precedence = SOURCE_DIST
+ self.add(dist)
+
+ def process_index(self,url,page):
+ """Process the contents of a PyPI page"""
+ def scan(link):
+ # Process a URL to see if it's for a package page
+ if link.startswith(self.index_url):
+ parts = map(
+ urllib2.unquote, link[len(self.index_url):].split('/')
+ )
+ if len(parts)==2 and '#' not in parts[1]:
+ # it's a package page, sanitize and index it
+ pkg = safe_name(parts[0])
+ ver = safe_version(parts[1])
+ self.package_pages.setdefault(pkg.lower(),{})[link] = True
+ return to_filename(pkg), to_filename(ver)
+ return None, None
+
+ # process an index page into the package-page index
+ for match in HREF.finditer(page):
+ try:
+ scan( urlparse.urljoin(url, htmldecode(match.group(1))) )
+ except ValueError:
+ pass
+
+ pkg, ver = scan(url) # ensure this page is in the page index
+ if pkg:
+ # process individual package page
+ for new_url in find_external_links(url, page):
+ # Process the found URL
+ base, frag = egg_info_for_url(new_url)
+ if base.endswith('.py') and not frag:
+ if ver:
+ new_url+='#egg=%s-%s' % (pkg,ver)
+ else:
+ self.need_version_info(url)
+ self.scan_url(new_url)
+
+ return PYPI_MD5.sub(
+ lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
+ )
+ else:
+ return "" # no sense double-scanning non-package pages
+
+
+
+ def need_version_info(self, url):
+ self.scan_all(
+ "Page at %s links to .py file(s) without version info; an index "
+ "scan is required.", url
+ )
+
+ def scan_all(self, msg=None, *args):
+ if self.index_url not in self.fetched_urls:
+ if msg: self.warn(msg,*args)
+ self.info(
+ "Scanning index of all packages (this may take a while)"
+ )
+ self.scan_url(self.index_url)
+
+ def find_packages(self, requirement):
+ self.scan_url(self.index_url + requirement.unsafe_name+'/')
+
+ if not self.package_pages.get(requirement.key):
+ # Fall back to safe version of the name
+ self.scan_url(self.index_url + requirement.project_name+'/')
+
+ if not self.package_pages.get(requirement.key):
+ # We couldn't find the target package, so search the index page too
+ self.not_found_in_index(requirement)
+
+ for url in list(self.package_pages.get(requirement.key,())):
+ # scan each page that might be related to the desired package
+ self.scan_url(url)
+
+ def obtain(self, requirement, installer=None):
+ self.prescan(); self.find_packages(requirement)
+ for dist in self[requirement.key]:
+ if dist in requirement:
+ return dist
+ self.debug("%s does not match %s", requirement, dist)
+ return super(PackageIndex, self).obtain(requirement,installer)
+
+
+
+
+
+ def check_md5(self, cs, info, filename, tfp):
+ if re.match('md5=[0-9a-f]{32}$', info):
+ self.debug("Validating md5 checksum for %s", filename)
+ if cs.hexdigest()<>info[4:]:
+ tfp.close()
+ os.unlink(filename)
+ raise DistutilsError(
+ "MD5 validation failed for "+os.path.basename(filename)+
+ "; possible download problem?"
+ )
+
+ def add_find_links(self, urls):
+ """Add `urls` to the list that will be prescanned for searches"""
+ for url in urls:
+ if (
+ self.to_scan is None # if we have already "gone online"
+ or not URL_SCHEME(url) # or it's a local file/directory
+ or url.startswith('file:')
+ or list(distros_for_url(url)) # or a direct package link
+ ):
+ # then go ahead and process it now
+ self.scan_url(url)
+ else:
+ # otherwise, defer retrieval till later
+ self.to_scan.append(url)
+
+ def prescan(self):
+ """Scan urls scheduled for prescanning (e.g. --find-links)"""
+ if self.to_scan:
+ map(self.scan_url, self.to_scan)
+ self.to_scan = None # from now on, go ahead and process immediately
+
+ def not_found_in_index(self, requirement):
+ if self[requirement.key]: # we've seen at least one distro
+ meth, msg = self.info, "Couldn't retrieve index page for %r"
+ else: # no distros seen for this name, might be misspelled
+ meth, msg = (self.warn,
+ "Couldn't find index page for %r (maybe misspelled?)")
+ meth(msg, requirement.unsafe_name)
+ self.scan_all()
+
+ def download(self, spec, tmpdir):
+ """Locate and/or download `spec` to `tmpdir`, returning a local path
+
+ `spec` may be a ``Requirement`` object, or a string containing a URL,
+ an existing local filename, or a project/version requirement spec
+ (i.e. the string form of a ``Requirement`` object). If it is the URL
+ of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
+ that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
+ automatically created alongside the downloaded file.
+
+ If `spec` is a ``Requirement`` object or a string containing a
+ project/version requirement spec, this method returns the location of
+ a matching distribution (possibly after downloading it to `tmpdir`).
+ If `spec` is a locally existing file or directory name, it is simply
+ returned unchanged. If `spec` is a URL, it is downloaded to a subpath
+ of `tmpdir`, and the local filename is returned. Various errors may be
+ raised if a problem occurs during downloading.
+ """
+ if not isinstance(spec,Requirement):
+ scheme = URL_SCHEME(spec)
+ if scheme:
+ # It's a url, download it to tmpdir
+ found = self._download_url(scheme.group(1), spec, tmpdir)
+ base, fragment = egg_info_for_url(spec)
+ if base.endswith('.py'):
+ found = self.gen_setup(found,fragment,tmpdir)
+ return found
+ elif os.path.exists(spec):
+ # Existing file or directory, just return it
+ return spec
+ else:
+ try:
+ spec = Requirement.parse(spec)
+ except ValueError:
+ raise DistutilsError(
+ "Not a URL, existing file, or requirement spec: %r" %
+ (spec,)
+ )
+ return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
+
+
+ def fetch_distribution(self,
+ requirement, tmpdir, force_scan=False, source=False, develop_ok=False,
+ local_index=None
+ ):
+ """Obtain a distribution suitable for fulfilling `requirement`
+
+ `requirement` must be a ``pkg_resources.Requirement`` instance.
+ If necessary, or if the `force_scan` flag is set, the requirement is
+ searched for in the (online) package index as well as the locally
+ installed packages. If a distribution matching `requirement` is found,
+ the returned distribution's ``location`` is the value you would have
+ gotten from calling the ``download()`` method with the matching
+ distribution's URL or filename. If no matching distribution is found,
+ ``None`` is returned.
+
+ If the `source` flag is set, only source distributions and source
+ checkout links will be considered. Unless the `develop_ok` flag is
+ set, development and system eggs (i.e., those using the ``.egg-info``
+ format) will be ignored.
+ """
+
+ # process a Requirement
+ self.info("Searching for %s", requirement)
+ skipped = {}
+ dist = None
+
+ def find(req, env=None):
+ if env is None:
+ env = self
+ # Find a matching distribution; may be called more than once
+
+ for dist in env[req.key]:
+
+ if dist.precedence==DEVELOP_DIST and not develop_ok:
+ if dist not in skipped:
+ self.warn("Skipping development or system egg: %s",dist)
+ skipped[dist] = 1
+ continue
+
+ if dist in req and (dist.precedence<=SOURCE_DIST or not source):
+ self.info("Best match: %s", dist)
+ return dist.clone(
+ location=self.download(dist.location, tmpdir)
+ )
+
+ if force_scan:
+ self.prescan()
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if local_index is not None:
+ dist = dist or find(requirement, local_index)
+
+ if dist is None and self.to_scan is not None:
+ self.prescan()
+ dist = find(requirement)
+
+ if dist is None and not force_scan:
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if dist is None:
+ self.warn(
+ "No local packages or download links found for %s%s",
+ (source and "a source distribution of " or ""),
+ requirement,
+ )
+ return dist
+
+ def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+ """Obtain a file suitable for fulfilling `requirement`
+
+ DEPRECATED; use the ``fetch_distribution()`` method now instead. For
+ backward compatibility, this routine is identical but returns the
+ ``location`` of the downloaded distribution instead of a distribution
+ object.
+ """
+ dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
+ if dist is not None:
+ return dist.location
+ return None
+
+
+
+
+
+
+
+
+ def gen_setup(self, filename, fragment, tmpdir):
+ match = EGG_FRAGMENT.match(fragment)
+ dists = match and [d for d in
+ interpret_distro_name(filename, match.group(1), None) if d.version
+ ] or []
+
+ if len(dists)==1: # unambiguous ``#egg`` fragment
+ basename = os.path.basename(filename)
+
+ # Make sure the file has been downloaded to the temp dir.
+ if os.path.dirname(filename) != tmpdir:
+ dst = os.path.join(tmpdir, basename)
+ from setuptools.command.easy_install import samefile
+ if not samefile(filename, dst):
+ shutil.copy2(filename, dst)
+ filename=dst
+
+ file = open(os.path.join(tmpdir, 'setup.py'), 'w')
+ file.write(
+ "from setuptools import setup\n"
+ "setup(name=%r, version=%r, py_modules=[%r])\n"
+ % (
+ dists[0].project_name, dists[0].version,
+ os.path.splitext(basename)[0]
+ )
+ )
+ file.close()
+ return filename
+
+ elif match:
+ raise DistutilsError(
+ "Can't unambiguously interpret project/version identifier %r; "
+ "any dashes in the name or version should be escaped using "
+ "underscores. %r" % (fragment,dists)
+ )
+ else:
+ raise DistutilsError(
+ "Can't process plain .py files without an '#egg=name-version'"
+ " suffix to enable automatic setup script generation."
+ )
+
+ dl_blocksize = 8192
+ def _download_to(self, url, filename):
+ self.info("Downloading %s", url)
+ # Download the file
+ fp, tfp, info = None, None, None
+ try:
+ if '#' in url:
+ url, info = url.split('#', 1)
+ fp = self.open_url(url)
+ if isinstance(fp, urllib2.HTTPError):
+ raise DistutilsError(
+ "Can't download %s: %s %s" % (url, fp.code,fp.msg)
+ )
+ cs = md5()
+ headers = fp.info()
+ blocknum = 0
+ bs = self.dl_blocksize
+ size = -1
+ if "content-length" in headers:
+ size = int(headers["Content-Length"])
+ self.reporthook(url, filename, blocknum, bs, size)
+ tfp = open(filename,'wb')
+ while True:
+ block = fp.read(bs)
+ if block:
+ cs.update(block)
+ tfp.write(block)
+ blocknum += 1
+ self.reporthook(url, filename, blocknum, bs, size)
+ else:
+ break
+ if info: self.check_md5(cs, info, filename, tfp)
+ return headers
+ finally:
+ if fp: fp.close()
+ if tfp: tfp.close()
+
+ def reporthook(self, url, filename, blocknum, blksize, size):
+ pass # no-op
+
+
+ def open_url(self, url, warning=None):
+ if url.startswith('file:'):
+ return local_open(url)
+ try:
+ return open_with_auth(url)
+ except (ValueError, httplib.InvalidURL), v:
+ msg = ' '.join([str(arg) for arg in v.args])
+ if warning:
+ self.warn(warning, msg)
+ else:
+ raise DistutilsError('%s %s' % (url, msg))
+ except urllib2.HTTPError, v:
+ return v
+ except urllib2.URLError, v:
+ if warning:
+ self.warn(warning, v.reason)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v.reason))
+ except httplib.BadStatusLine, v:
+ if warning:
+ self.warn(warning, v.line)
+ else:
+ raise DistutilsError('%s returned a bad status line. '
+ 'The server might be down, %s' % \
+ (url, v.line))
+ except httplib.HTTPException, v:
+ if warning:
+ self.warn(warning, v)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v))
+
+ def _download_url(self, scheme, url, tmpdir):
+ # Determine download filename
+ #
+ name = filter(None,urlparse.urlparse(url)[2].split('/'))
+ if name:
+ name = name[-1]
+ while '..' in name:
+ name = name.replace('..','.').replace('\\','_')
+ else:
+ name = "__downloaded__" # default if URL has no path contents
+
+ if name.endswith('.egg.zip'):
+ name = name[:-4] # strip the extra .zip before download
+
+ filename = os.path.join(tmpdir,name)
+
+ # Download the file
+ #
+ if scheme=='svn' or scheme.startswith('svn+'):
+ return self._download_svn(url, filename)
+ elif scheme=='file':
+ return urllib2.url2pathname(urlparse.urlparse(url)[2])
+ else:
+ self.url_ok(url, True) # raises error if not allowed
+ return self._attempt_download(url, filename)
+
+
+
+ def scan_url(self, url):
+ self.process_url(url, True)
+
+
+ def _attempt_download(self, url, filename):
+ headers = self._download_to(url, filename)
+ if 'html' in headers.get('content-type','').lower():
+ return self._download_html(url, headers, filename)
+ else:
+ return filename
+
+ def _download_html(self, url, headers, filename):
+ file = open(filename)
+ for line in file:
+ if line.strip():
+ # Check for a subversion index page
+ if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
+ # it's a subversion index page:
+ file.close()
+ os.unlink(filename)
+ return self._download_svn(url, filename)
+ break # not an index page
+ file.close()
+ os.unlink(filename)
+ raise DistutilsError("Unexpected HTML page found at "+url)
+
+ def _download_svn(self, url, filename):
+ url = url.split('#',1)[0] # remove any fragment for svn's sake
+ self.info("Doing subversion checkout from %s to %s", url, filename)
+ os.system("svn checkout -q %s %s" % (url, filename))
+ return filename
+
+ def debug(self, msg, *args):
+ log.debug(msg, *args)
+
+ def info(self, msg, *args):
+ log.info(msg, *args)
+
+ def warn(self, msg, *args):
+ log.warn(msg, *args)
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+
+def uchr(c):
+ if not isinstance(c, int):
+ return c
+ if c>255: return unichr(c)
+ return chr(c)
+
+def decode_entity(match):
+ what = match.group(1)
+ if what.startswith('#x'):
+ what = int(what[2:], 16)
+ elif what.startswith('#'):
+ what = int(what[1:])
+ else:
+ from htmlentitydefs import name2codepoint
+ what = name2codepoint.get(what, match.group(0))
+ return uchr(what)
+
+def htmldecode(text):
+ """Decode HTML entities in the given text."""
+ return entity_sub(decode_entity, text)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def socket_timeout(timeout=15):
+ def _socket_timeout(func):
+ def _socket_timeout(*args, **kwargs):
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(timeout)
+ try:
+ return func(*args, **kwargs)
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+ return _socket_timeout
+ return _socket_timeout
+
+
+def open_with_auth(url):
+ """Open a urllib2 request, handling HTTP authentication"""
+
+ scheme, netloc, path, params, query, frag = urlparse.urlparse(url)
+
+ if scheme in ('http', 'https'):
+ auth, host = urllib2.splituser(netloc)
+ else:
+ auth = None
+
+ if auth:
+ auth = "Basic " + urllib2.unquote(auth).encode('base64').strip()
+ new_url = urlparse.urlunparse((scheme,host,path,params,query,frag))
+ request = urllib2.Request(new_url)
+ request.add_header("Authorization", auth)
+ else:
+ request = urllib2.Request(url)
+
+ request.add_header('User-Agent', user_agent)
+ fp = urllib2.urlopen(request)
+
+ if auth:
+ # Put authentication info back into request URL if same host,
+ # so that links found on the page will work
+ s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url)
+ if s2==scheme and h2==host:
+ fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2))
+
+ return fp
+
+# adding a timeout to avoid freezing package_index
+open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
+
+
+
+
+
+
+
+
+
+
+
+def fix_sf_url(url):
+ return url # backward compatibility
+
+def local_open(url):
+ """Read a local path, with special support for directories"""
+ scheme, server, path, param, query, frag = urlparse.urlparse(url)
+ filename = urllib2.url2pathname(path)
+ if os.path.isfile(filename):
+ return urllib2.urlopen(url)
+ elif path.endswith('/') and os.path.isdir(filename):
+ files = []
+ for f in os.listdir(filename):
+ if f=='index.html':
+ fp = open(os.path.join(filename,f),'rb')
+ body = fp.read()
+ fp.close()
+ break
+ elif os.path.isdir(os.path.join(filename,f)):
+ f+='/'
+ files.append("<a href=%r>%s</a>" % (f,f))
+ else:
+ body = ("<html><head><title>%s</title>" % url) + \
+ "</head><body>%s</body></html>" % '\n'.join(files)
+ status, message = 200, "OK"
+ else:
+ status, message, body = 404, "Path not found", "Not found"
+
+ return urllib2.HTTPError(url, status, message,
+ {'content-type':'text/html'}, cStringIO.StringIO(body))
+
+
+
+
+
+
+
+
+
+
+
+
+
+# this line is a kludge to keep the trailing blank lines for pje's editor
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py
new file mode 100755
index 00000000..a06d4483
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/sandbox.py
@@ -0,0 +1,282 @@
+import os, sys, __builtin__, tempfile, operator
+_os = sys.modules[os.name]
+try:
+ _file = file
+except NameError:
+ _file = None
+_open = open
+from distutils.errors import DistutilsError
+__all__ = [
+ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+]
+def run_setup(setup_script, args):
+ """Run a distutils setup script, sandboxed in its directory"""
+ old_dir = os.getcwd()
+ save_argv = sys.argv[:]
+ save_path = sys.path[:]
+ setup_dir = os.path.abspath(os.path.dirname(setup_script))
+ temp_dir = os.path.join(setup_dir,'temp')
+ if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
+ save_tmp = tempfile.tempdir
+ save_modules = sys.modules.copy()
+ try:
+ tempfile.tempdir = temp_dir
+ os.chdir(setup_dir)
+ try:
+ sys.argv[:] = [setup_script]+list(args)
+ sys.path.insert(0, setup_dir)
+ DirectorySandbox(setup_dir).run(
+ lambda: execfile(
+ "setup.py",
+ {'__file__':setup_script, '__name__':'__main__'}
+ )
+ )
+ except SystemExit, v:
+ if v.args and v.args[0]:
+ raise
+ # Normal exit, just return
+ finally:
+ sys.modules.update(save_modules)
+ for key in list(sys.modules):
+ if key not in save_modules: del sys.modules[key]
+ os.chdir(old_dir)
+ sys.path[:] = save_path
+ sys.argv[:] = save_argv
+ tempfile.tempdir = save_tmp
+
+class AbstractSandbox:
+ """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
+
+ _active = False
+
+ def __init__(self):
+ self._attrs = [
+ name for name in dir(_os)
+ if not name.startswith('_') and hasattr(self,name)
+ ]
+
+ def _copy(self, source):
+ for name in self._attrs:
+ setattr(os, name, getattr(source,name))
+
+ def run(self, func):
+ """Run 'func' under os sandboxing"""
+ try:
+ self._copy(self)
+ if _file:
+ __builtin__.file = self._file
+ __builtin__.open = self._open
+ self._active = True
+ return func()
+ finally:
+ self._active = False
+ if _file:
+ __builtin__.file = _file
+ __builtin__.open = _open
+ self._copy(_os)
+
+
+ def _mk_dual_path_wrapper(name):
+ original = getattr(_os,name)
+ def wrap(self,src,dst,*args,**kw):
+ if self._active:
+ src,dst = self._remap_pair(name,src,dst,*args,**kw)
+ return original(src,dst,*args,**kw)
+ return wrap
+
+
+ for name in ["rename", "link", "symlink"]:
+ if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
+
+
+ def _mk_single_path_wrapper(name, original=None):
+ original = original or getattr(_os,name)
+ def wrap(self,path,*args,**kw):
+ if self._active:
+ path = self._remap_input(name,path,*args,**kw)
+ return original(path,*args,**kw)
+ return wrap
+
+ if _file:
+ _file = _mk_single_path_wrapper('file', _file)
+ _open = _mk_single_path_wrapper('open', _open)
+ for name in [
+ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
+ "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
+ "startfile", "mkfifo", "mknod", "pathconf", "access"
+ ]:
+ if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
+
+
+ def _mk_single_with_return(name):
+ original = getattr(_os,name)
+ def wrap(self,path,*args,**kw):
+ if self._active:
+ path = self._remap_input(name,path,*args,**kw)
+ return self._remap_output(name, original(path,*args,**kw))
+ return original(path,*args,**kw)
+ return wrap
+
+ for name in ['readlink', 'tempnam']:
+ if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
+
+ def _mk_query(name):
+ original = getattr(_os,name)
+ def wrap(self,*args,**kw):
+ retval = original(*args,**kw)
+ if self._active:
+ return self._remap_output(name, retval)
+ return retval
+ return wrap
+
+ for name in ['getcwd', 'tmpnam']:
+ if hasattr(_os,name): locals()[name] = _mk_query(name)
+
+ def _validate_path(self,path):
+ """Called to remap or validate any path, whether input or output"""
+ return path
+
+ def _remap_input(self,operation,path,*args,**kw):
+ """Called for path inputs"""
+ return self._validate_path(path)
+
+ def _remap_output(self,operation,path):
+ """Called for path outputs"""
+ return self._validate_path(path)
+
+ def _remap_pair(self,operation,src,dst,*args,**kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ return (
+ self._remap_input(operation+'-from',src,*args,**kw),
+ self._remap_input(operation+'-to',dst,*args,**kw)
+ )
+
+
+if hasattr(os, 'devnull'):
+ _EXCEPTIONS = [os.devnull,]
+else:
+ _EXCEPTIONS = []
+
+try:
+ from win32com.client.gencache import GetGeneratePath
+ _EXCEPTIONS.append(GetGeneratePath())
+ del GetGeneratePath
+except ImportError:
+ # it appears pywin32 is not installed, so no need to exclude.
+ pass
+
+class DirectorySandbox(AbstractSandbox):
+ """Restrict operations to a single subdirectory - pseudo-chroot"""
+
+ write_ops = dict.fromkeys([
+ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
+ "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
+ ])
+
+ def __init__(self, sandbox, exceptions=_EXCEPTIONS):
+ self._sandbox = os.path.normcase(os.path.realpath(sandbox))
+ self._prefix = os.path.join(self._sandbox,'')
+ self._exceptions = [os.path.normcase(os.path.realpath(path)) for path in exceptions]
+ AbstractSandbox.__init__(self)
+
+ def _violation(self, operation, *args, **kw):
+ raise SandboxViolation(operation, args, kw)
+
+ if _file:
+ def _file(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("file", path, mode, *args, **kw)
+ return _file(path,mode,*args,**kw)
+
+ def _open(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("open", path, mode, *args, **kw)
+ return _open(path,mode,*args,**kw)
+
+ def tmpnam(self):
+ self._violation("tmpnam")
+
+ def _ok(self,path):
+ active = self._active
+ try:
+ self._active = False
+ realpath = os.path.normcase(os.path.realpath(path))
+ if (self._exempted(realpath) or realpath == self._sandbox
+ or realpath.startswith(self._prefix)):
+ return True
+ finally:
+ self._active = active
+
+ def _exempted(self, filepath):
+ exception_matches = map(filepath.startswith, self._exceptions)
+ return True in exception_matches
+
+ def _remap_input(self,operation,path,*args,**kw):
+ """Called for path inputs"""
+ if operation in self.write_ops and not self._ok(path):
+ self._violation(operation, os.path.realpath(path), *args, **kw)
+ return path
+
+ def _remap_pair(self,operation,src,dst,*args,**kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ if not self._ok(src) or not self._ok(dst):
+ self._violation(operation, src, dst, *args, **kw)
+ return (src,dst)
+
+ def open(self, file, flags, mode=0777):
+ """Called for low-level os.open()"""
+ if flags & WRITE_FLAGS and not self._ok(file):
+ self._violation("os.open", file, flags, mode)
+ return _os.open(file,flags,mode)
+
+
+WRITE_FLAGS = reduce(
+ operator.or_,
+ [getattr(_os, a, 0) for a in
+ "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
+)
+
+
+
+
+class SandboxViolation(DistutilsError):
+ """A setup script attempted to modify the filesystem outside the sandbox"""
+
+ def __str__(self):
+ return """SandboxViolation: %s%r %s
+
+The package setup script has attempted to modify files on your system
+that are not within the EasyInstall build area, and has been aborted.
+
+This package cannot be safely installed by EasyInstall, and may not
+support alternate installation locations even if you run its setup
+script by hand. Please inform the package's author and the EasyInstall
+maintainers to find out if a fix or workaround is available.""" % self.args
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py
new file mode 100755
index 00000000..9af44a88
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/__init__.py
@@ -0,0 +1,370 @@
+"""Tests for the 'setuptools' package"""
+from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
+import distutils.core, distutils.cmd
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+import setuptools, setuptools.dist
+from setuptools import Feature
+from distutils.core import Extension
+extract_constant, get_module_constant = None, None
+from setuptools.depends import *
+from distutils.version import StrictVersion, LooseVersion
+from distutils.util import convert_path
+import sys, os.path
+
+def additional_tests():
+ import doctest, unittest
+ suite = unittest.TestSuite((
+ doctest.DocFileSuite(
+ os.path.join('tests', 'api_tests.txt'),
+ optionflags=doctest.ELLIPSIS, package='pkg_resources',
+ ),
+ ))
+ if sys.platform == 'win32':
+ suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt'))
+ return suite
+
+def makeSetup(**args):
+ """Return distribution from 'setup(**args)', without executing commands"""
+
+ distutils.core._setup_stop_after = "commandline"
+
+ # Don't let system command line leak into tests!
+ args.setdefault('script_args',['install'])
+
+ try:
+ return setuptools.setup(**args)
+ finally:
+ distutils.core_setup_stop_after = None
+
+
+
+
+class DependsTests(TestCase):
+
+ def testExtractConst(self):
+ if not extract_constant: return # skip on non-bytecode platforms
+
+ def f1():
+ global x,y,z
+ x = "test"
+ y = z
+
+ # unrecognized name
+ self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
+
+ # constant assigned
+ self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
+
+ # expression assigned
+ self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
+
+ # recognized name, not assigned
+ self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
+
+
+ def testFindModule(self):
+ self.assertRaises(ImportError, find_module, 'no-such.-thing')
+ self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
+ f,p,i = find_module('setuptools.tests'); f.close()
+
+ def testModuleExtract(self):
+ if not get_module_constant: return # skip on non-bytecode platforms
+ from email import __version__
+ self.assertEqual(
+ get_module_constant('email','__version__'), __version__
+ )
+ self.assertEqual(
+ get_module_constant('sys','version'), sys.version
+ )
+ self.assertEqual(
+ get_module_constant('setuptools.tests','__doc__'),__doc__
+ )
+
+ def testRequire(self):
+ if not extract_constant: return # skip on non-bytecode platforms
+
+ req = Require('Email','1.0.3','email')
+
+ self.assertEqual(req.name, 'Email')
+ self.assertEqual(req.module, 'email')
+ self.assertEqual(req.requested_version, '1.0.3')
+ self.assertEqual(req.attribute, '__version__')
+ self.assertEqual(req.full_name(), 'Email-1.0.3')
+
+ from email import __version__
+ self.assertEqual(req.get_version(), __version__)
+ self.assert_(req.version_ok('1.0.9'))
+ self.assert_(not req.version_ok('0.9.1'))
+ self.assert_(not req.version_ok('unknown'))
+
+ self.assert_(req.is_present())
+ self.assert_(req.is_current())
+
+ req = Require('Email 3000','03000','email',format=LooseVersion)
+ self.assert_(req.is_present())
+ self.assert_(not req.is_current())
+ self.assert_(not req.version_ok('unknown'))
+
+ req = Require('Do-what-I-mean','1.0','d-w-i-m')
+ self.assert_(not req.is_present())
+ self.assert_(not req.is_current())
+
+ req = Require('Tests', None, 'tests', homepage="http://example.com")
+ self.assertEqual(req.format, None)
+ self.assertEqual(req.attribute, None)
+ self.assertEqual(req.requested_version, None)
+ self.assertEqual(req.full_name(), 'Tests')
+ self.assertEqual(req.homepage, 'http://example.com')
+
+ paths = [os.path.dirname(p) for p in __path__]
+ self.assert_(req.is_present(paths))
+ self.assert_(req.is_current(paths))
+
+
+class DistroTests(TestCase):
+
+ def setUp(self):
+ self.e1 = Extension('bar.ext',['bar.c'])
+ self.e2 = Extension('c.y', ['y.c'])
+
+ self.dist = makeSetup(
+ packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
+ py_modules=['b.d','x'],
+ ext_modules = (self.e1, self.e2),
+ package_dir = {},
+ )
+
+
+ def testDistroType(self):
+ self.assert_(isinstance(self.dist,setuptools.dist.Distribution))
+
+
+ def testExcludePackage(self):
+ self.dist.exclude_package('a')
+ self.assertEqual(self.dist.packages, ['b','c'])
+
+ self.dist.exclude_package('b')
+ self.assertEqual(self.dist.packages, ['c'])
+ self.assertEqual(self.dist.py_modules, ['x'])
+ self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
+
+ self.dist.exclude_package('c')
+ self.assertEqual(self.dist.packages, [])
+ self.assertEqual(self.dist.py_modules, ['x'])
+ self.assertEqual(self.dist.ext_modules, [self.e1])
+
+ # test removals from unspecified options
+ makeSetup().exclude_package('x')
+
+
+
+
+
+
+
+ def testIncludeExclude(self):
+ # remove an extension
+ self.dist.exclude(ext_modules=[self.e1])
+ self.assertEqual(self.dist.ext_modules, [self.e2])
+
+ # add it back in
+ self.dist.include(ext_modules=[self.e1])
+ self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+ # should not add duplicate
+ self.dist.include(ext_modules=[self.e1])
+ self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+ def testExcludePackages(self):
+ self.dist.exclude(packages=['c','b','a'])
+ self.assertEqual(self.dist.packages, [])
+ self.assertEqual(self.dist.py_modules, ['x'])
+ self.assertEqual(self.dist.ext_modules, [self.e1])
+
+ def testEmpty(self):
+ dist = makeSetup()
+ dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+ dist = makeSetup()
+ dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+
+ def testContents(self):
+ self.assert_(self.dist.has_contents_for('a'))
+ self.dist.exclude_package('a')
+ self.assert_(not self.dist.has_contents_for('a'))
+
+ self.assert_(self.dist.has_contents_for('b'))
+ self.dist.exclude_package('b')
+ self.assert_(not self.dist.has_contents_for('b'))
+
+ self.assert_(self.dist.has_contents_for('c'))
+ self.dist.exclude_package('c')
+ self.assert_(not self.dist.has_contents_for('c'))
+
+
+
+
+ def testInvalidIncludeExclude(self):
+ self.assertRaises(DistutilsSetupError,
+ self.dist.include, nonexistent_option='x'
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.exclude, nonexistent_option='x'
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.include, packages={'x':'y'}
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.exclude, packages={'x':'y'}
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.include, ext_modules={'x':'y'}
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.exclude, ext_modules={'x':'y'}
+ )
+
+ self.assertRaises(DistutilsSetupError,
+ self.dist.include, package_dir=['q']
+ )
+ self.assertRaises(DistutilsSetupError,
+ self.dist.exclude, package_dir=['q']
+ )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class FeatureTests(TestCase):
+
+ def setUp(self):
+ self.req = Require('Distutils','1.0.3','distutils')
+ self.dist = makeSetup(
+ features={
+ 'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
+ 'bar': Feature("bar", standard=True, packages=['pkg.bar'],
+ py_modules=['bar_et'], remove=['bar.ext'],
+ ),
+ 'baz': Feature(
+ "baz", optional=False, packages=['pkg.baz'],
+ scripts = ['scripts/baz_it'],
+ libraries=[('libfoo','foo/foofoo.c')]
+ ),
+ 'dwim': Feature("DWIM", available=False, remove='bazish'),
+ },
+ script_args=['--without-bar', 'install'],
+ packages = ['pkg.bar', 'pkg.foo'],
+ py_modules = ['bar_et', 'bazish'],
+ ext_modules = [Extension('bar.ext',['bar.c'])]
+ )
+
+ def testDefaults(self):
+ self.assert_(not
+ Feature(
+ "test",standard=True,remove='x',available=False
+ ).include_by_default()
+ )
+ self.assert_(
+ Feature("test",standard=True,remove='x').include_by_default()
+ )
+ # Feature must have either kwargs, removes, or require_features
+ self.assertRaises(DistutilsSetupError, Feature, "test")
+
+ def testAvailability(self):
+ self.assertRaises(
+ DistutilsPlatformError,
+ self.dist.features['dwim'].include_in, self.dist
+ )
+
+ def testFeatureOptions(self):
+ dist = self.dist
+ self.assert_(
+ ('with-dwim',None,'include DWIM') in dist.feature_options
+ )
+ self.assert_(
+ ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
+ )
+ self.assert_(
+ ('with-bar',None,'include bar (default)') in dist.feature_options
+ )
+ self.assert_(
+ ('without-bar',None,'exclude bar') in dist.feature_options
+ )
+ self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
+ self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
+ self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
+ self.assert_(not 'without-baz' in dist.feature_negopt)
+
+ def testUseFeatures(self):
+ dist = self.dist
+ self.assertEqual(dist.with_foo,1)
+ self.assertEqual(dist.with_bar,0)
+ self.assertEqual(dist.with_baz,1)
+ self.assert_(not 'bar_et' in dist.py_modules)
+ self.assert_(not 'pkg.bar' in dist.packages)
+ self.assert_('pkg.baz' in dist.packages)
+ self.assert_('scripts/baz_it' in dist.scripts)
+ self.assert_(('libfoo','foo/foofoo.c') in dist.libraries)
+ self.assertEqual(dist.ext_modules,[])
+ self.assertEqual(dist.require_features, [self.req])
+
+ # If we ask for bar, it should fail because we explicitly disabled
+ # it on the command line
+ self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
+
+ def testFeatureWithInvalidRemove(self):
+ self.assertRaises(
+ SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
+ )
+
+class TestCommandTests(TestCase):
+
+ def testTestIsCommand(self):
+ test_cmd = makeSetup().get_command_obj('test')
+ self.assert_(isinstance(test_cmd, distutils.cmd.Command))
+
+ def testLongOptSuiteWNoDefault(self):
+ ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
+ ts1 = ts1.get_command_obj('test')
+ ts1.ensure_finalized()
+ self.assertEqual(ts1.test_suite, 'foo.tests.suite')
+
+ def testDefaultSuite(self):
+ ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
+ ts2.ensure_finalized()
+ self.assertEqual(ts2.test_suite, 'bar.tests.suite')
+
+ def testDefaultWModuleOnCmdLine(self):
+ ts3 = makeSetup(
+ test_suite='bar.tests',
+ script_args=['test','-m','foo.tests']
+ ).get_command_obj('test')
+ ts3.ensure_finalized()
+ self.assertEqual(ts3.test_module, 'foo.tests')
+ self.assertEqual(ts3.test_suite, 'foo.tests.test_suite')
+
+ def testConflictingOptions(self):
+ ts4 = makeSetup(
+ script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
+ ).get_command_obj('test')
+ self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
+
+ def testNoSuite(self):
+ ts5 = makeSetup().get_command_obj('test')
+ ts5.ensure_finalized()
+ self.assertEqual(ts5.test_suite, None)
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py
new file mode 100755
index 00000000..be399a9d
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/doctest.py
@@ -0,0 +1,2679 @@
+# Module doctest.
+# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
+# Major enhancements and refactoring by:
+# Jim Fulton
+# Edward Loper
+
+# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
+
+try:
+ basestring
+except NameError:
+ basestring = str,unicode
+
+try:
+ enumerate
+except NameError:
+ def enumerate(seq):
+ return zip(range(len(seq)),seq)
+
+r"""Module doctest -- a framework for running examples in docstrings.
+
+In simplest use, end each module M to be tested with:
+
+def _test():
+ import doctest
+ doctest.testmod()
+
+if __name__ == "__main__":
+ _test()
+
+Then running the module as a script will cause the examples in the
+docstrings to get executed and verified:
+
+python M.py
+
+This won't display anything unless an example fails, in which case the
+failing example(s) and the cause(s) of the failure(s) are printed to stdout
+(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
+line of output is "Test failed.".
+
+Run it with the -v switch instead:
+
+python M.py -v
+
+and a detailed report of all examples tried is printed to stdout, along
+with assorted summaries at the end.
+
+You can force verbose mode by passing "verbose=True" to testmod, or prohibit
+it by passing "verbose=False". In either of those cases, sys.argv is not
+examined by testmod.
+
+There are a variety of other ways to run doctests, including integration
+with the unittest framework, and support for running non-Python text
+files containing doctests. There are also many ways to override parts
+of doctest's default behaviors. See the Library Reference Manual for
+details.
+"""
+
+__docformat__ = 'reStructuredText en'
+
+__all__ = [
+ # 0, Option Flags
+ 'register_optionflag',
+ 'DONT_ACCEPT_TRUE_FOR_1',
+ 'DONT_ACCEPT_BLANKLINE',
+ 'NORMALIZE_WHITESPACE',
+ 'ELLIPSIS',
+ 'IGNORE_EXCEPTION_DETAIL',
+ 'COMPARISON_FLAGS',
+ 'REPORT_UDIFF',
+ 'REPORT_CDIFF',
+ 'REPORT_NDIFF',
+ 'REPORT_ONLY_FIRST_FAILURE',
+ 'REPORTING_FLAGS',
+ # 1. Utility Functions
+ 'is_private',
+ # 2. Example & DocTest
+ 'Example',
+ 'DocTest',
+ # 3. Doctest Parser
+ 'DocTestParser',
+ # 4. Doctest Finder
+ 'DocTestFinder',
+ # 5. Doctest Runner
+ 'DocTestRunner',
+ 'OutputChecker',
+ 'DocTestFailure',
+ 'UnexpectedException',
+ 'DebugRunner',
+ # 6. Test Functions
+ 'testmod',
+ 'testfile',
+ 'run_docstring_examples',
+ # 7. Tester
+ 'Tester',
+ # 8. Unittest Support
+ 'DocTestSuite',
+ 'DocFileSuite',
+ 'set_unittest_reportflags',
+ # 9. Debugging Support
+ 'script_from_examples',
+ 'testsource',
+ 'debug_src',
+ 'debug',
+]
+
+import __future__
+
+import sys, traceback, inspect, linecache, os, re, types
+import unittest, difflib, pdb, tempfile
+import warnings
+from StringIO import StringIO
+
+# Don't whine about the deprecated is_private function in this
+# module's tests.
+warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
+ __name__, 0)
+
+# There are 4 basic classes:
+# - Example: a <source, want> pair, plus an intra-docstring line number.
+# - DocTest: a collection of examples, parsed from a docstring, plus
+# info about where the docstring came from (name, filename, lineno).
+# - DocTestFinder: extracts DocTests from a given object's docstring and
+# its contained objects' docstrings.
+# - DocTestRunner: runs DocTest cases, and accumulates statistics.
+#
+# So the basic picture is:
+#
+# list of:
+# +------+ +---------+ +-------+
+# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
+# +------+ +---------+ +-------+
+# | Example |
+# | ... |
+# | Example |
+# +---------+
+
+# Option constants.
+
+OPTIONFLAGS_BY_NAME = {}
+def register_optionflag(name):
+ flag = 1 << len(OPTIONFLAGS_BY_NAME)
+ OPTIONFLAGS_BY_NAME[name] = flag
+ return flag
+
+DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
+DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
+NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
+ELLIPSIS = register_optionflag('ELLIPSIS')
+IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
+
+COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
+ DONT_ACCEPT_BLANKLINE |
+ NORMALIZE_WHITESPACE |
+ ELLIPSIS |
+ IGNORE_EXCEPTION_DETAIL)
+
+REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
+REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
+REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
+REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+
+REPORTING_FLAGS = (REPORT_UDIFF |
+ REPORT_CDIFF |
+ REPORT_NDIFF |
+ REPORT_ONLY_FIRST_FAILURE)
+
+# Special string markers for use in `want` strings:
+BLANKLINE_MARKER = '<BLANKLINE>'
+ELLIPSIS_MARKER = '...'
+
+######################################################################
+## Table of Contents
+######################################################################
+# 1. Utility Functions
+# 2. Example & DocTest -- store test cases
+# 3. DocTest Parser -- extracts examples from strings
+# 4. DocTest Finder -- extracts test cases from objects
+# 5. DocTest Runner -- runs test cases
+# 6. Test Functions -- convenient wrappers for testing
+# 7. Tester Class -- for backwards compatibility
+# 8. Unittest Support
+# 9. Debugging Support
+# 10. Example Usage
+
+######################################################################
+## 1. Utility Functions
+######################################################################
+
+def is_private(prefix, base):
+ """prefix, base -> true iff name prefix + "." + base is "private".
+
+ Prefix may be an empty string, and base does not contain a period.
+ Prefix is ignored (although functions you write conforming to this
+ protocol may make use of it).
+ Return true iff base begins with an (at least one) underscore, but
+ does not both begin and end with (at least) two underscores.
+
+ >>> is_private("a.b", "my_func")
+ False
+ >>> is_private("____", "_my_func")
+ True
+ >>> is_private("someclass", "__init__")
+ False
+ >>> is_private("sometypo", "__init_")
+ True
+ >>> is_private("x.y.z", "_")
+ True
+ >>> is_private("_x.y.z", "__")
+ False
+ >>> is_private("", "") # senseless but consistent
+ False
+ """
+ warnings.warn("is_private is deprecated; it wasn't useful; "
+ "examine DocTestFinder.find() lists instead",
+ DeprecationWarning, stacklevel=2)
+ return base[:1] == "_" and not base[:2] == "__" == base[-2:]
+
+def _extract_future_flags(globs):
+ """
+ Return the compiler-flags associated with the future features that
+ have been imported into the given namespace (globs).
+ """
+ flags = 0
+ for fname in __future__.all_feature_names:
+ feature = globs.get(fname, None)
+ if feature is getattr(__future__, fname):
+ flags |= feature.compiler_flag
+ return flags
+
+def _normalize_module(module, depth=2):
+ """
+ Return the module specified by `module`. In particular:
+ - If `module` is a module, then return module.
+ - If `module` is a string, then import and return the
+ module with that name.
+ - If `module` is None, then return the calling module.
+ The calling module is assumed to be the module of
+ the stack frame at the given depth in the call stack.
+ """
+ if inspect.ismodule(module):
+ return module
+ elif isinstance(module, (str, unicode)):
+ return __import__(module, globals(), locals(), ["*"])
+ elif module is None:
+ return sys.modules[sys._getframe(depth).f_globals['__name__']]
+ else:
+ raise TypeError("Expected a module, string, or None")
+
+def _indent(s, indent=4):
+ """
+ Add the given number of space characters to the beginning every
+ non-blank line in `s`, and return the result.
+ """
+ # This regexp matches the start of non-blank lines:
+ return re.sub('(?m)^(?!$)', indent*' ', s)
+
+def _exception_traceback(exc_info):
+ """
+ Return a string containing a traceback message for the given
+ exc_info tuple (as returned by sys.exc_info()).
+ """
+ # Get a traceback message.
+ excout = StringIO()
+ exc_type, exc_val, exc_tb = exc_info
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
+ return excout.getvalue()
+
+# Override some StringIO methods.
+class _SpoofOut(StringIO):
+ def getvalue(self):
+ result = StringIO.getvalue(self)
+ # If anything at all was written, make sure there's a trailing
+ # newline. There's no way for the expected output to indicate
+ # that a trailing newline is missing.
+ if result and not result.endswith("\n"):
+ result += "\n"
+ # Prevent softspace from screwing up the next test case, in
+ # case they used print with a trailing comma in an example.
+ if hasattr(self, "softspace"):
+ del self.softspace
+ return result
+
+ def truncate(self, size=None):
+ StringIO.truncate(self, size)
+ if hasattr(self, "softspace"):
+ del self.softspace
+
+# Worst-case linear-time ellipsis matching.
+def _ellipsis_match(want, got):
+ """
+ Essentially the only subtle case:
+ >>> _ellipsis_match('aa...aa', 'aaa')
+ False
+ """
+ if want.find(ELLIPSIS_MARKER)==-1:
+ return want == got
+
+ # Find "the real" strings.
+ ws = want.split(ELLIPSIS_MARKER)
+ assert len(ws) >= 2
+
+ # Deal with exact matches possibly needed at one or both ends.
+ startpos, endpos = 0, len(got)
+ w = ws[0]
+ if w: # starts with exact match
+ if got.startswith(w):
+ startpos = len(w)
+ del ws[0]
+ else:
+ return False
+ w = ws[-1]
+ if w: # ends with exact match
+ if got.endswith(w):
+ endpos -= len(w)
+ del ws[-1]
+ else:
+ return False
+
+ if startpos > endpos:
+ # Exact end matches required more characters than we have, as in
+ # _ellipsis_match('aa...aa', 'aaa')
+ return False
+
+ # For the rest, we only need to find the leftmost non-overlapping
+ # match for each piece. If there's no overall match that way alone,
+ # there's no overall match period.
+ for w in ws:
+ # w may be '' at times, if there are consecutive ellipses, or
+ # due to an ellipsis at the start or end of `want`. That's OK.
+ # Search for an empty string succeeds, and doesn't change startpos.
+ startpos = got.find(w, startpos, endpos)
+ if startpos < 0:
+ return False
+ startpos += len(w)
+
+ return True
+
+def _comment_line(line):
+ "Return a commented form of the given line"
+ line = line.rstrip()
+ if line:
+ return '# '+line
+ else:
+ return '#'
+
+class _OutputRedirectingPdb(pdb.Pdb):
+ """
+ A specialized version of the python debugger that redirects stdout
+ to a given stream when interacting with the user. Stdout is *not*
+ redirected when traced code is executed.
+ """
+ def __init__(self, out):
+ self.__out = out
+ pdb.Pdb.__init__(self)
+
+ def trace_dispatch(self, *args):
+ # Redirect stdout to the given stream.
+ save_stdout = sys.stdout
+ sys.stdout = self.__out
+ # Call Pdb's trace dispatch method.
+ try:
+ return pdb.Pdb.trace_dispatch(self, *args)
+ finally:
+ sys.stdout = save_stdout
+
+# [XX] Normalize with respect to os.path.pardir?
+def _module_relative_path(module, path):
+ if not inspect.ismodule(module):
+ raise TypeError, 'Expected a module: %r' % module
+ if path.startswith('/'):
+ raise ValueError, 'Module-relative files may not have absolute paths'
+
+ # Find the base directory for the path.
+ if hasattr(module, '__file__'):
+ # A normal module/package
+ basedir = os.path.split(module.__file__)[0]
+ elif module.__name__ == '__main__':
+ # An interactive session.
+ if len(sys.argv)>0 and sys.argv[0] != '':
+ basedir = os.path.split(sys.argv[0])[0]
+ else:
+ basedir = os.curdir
+ else:
+ # A module w/o __file__ (this includes builtins)
+ raise ValueError("Can't resolve paths relative to the module " +
+ module + " (it has no __file__)")
+
+ # Combine the base directory and the path.
+ return os.path.join(basedir, *(path.split('/')))
+
+######################################################################
+## 2. Example & DocTest
+######################################################################
+## - An "example" is a <source, want> pair, where "source" is a
+## fragment of source code, and "want" is the expected output for
+## "source." The Example class also includes information about
+## where the example was extracted from.
+##
+## - A "doctest" is a collection of examples, typically extracted from
+## a string (such as an object's docstring). The DocTest class also
+## includes information about where the string was extracted from.
+
+class Example:
+ """
+ A single doctest example, consisting of source code and expected
+ output. `Example` defines the following attributes:
+
+ - source: A single Python statement, always ending with a newline.
+ The constructor adds a newline if needed.
+
+ - want: The expected output from running the source code (either
+ from stdout, or a traceback in case of exception). `want` ends
+ with a newline unless it's empty, in which case it's an empty
+ string. The constructor adds a newline if needed.
+
+ - exc_msg: The exception message generated by the example, if
+ the example is expected to generate an exception; or `None` if
+ it is not expected to generate an exception. This exception
+ message is compared against the return value of
+ `traceback.format_exception_only()`. `exc_msg` ends with a
+ newline unless it's `None`. The constructor adds a newline
+ if needed.
+
+ - lineno: The line number within the DocTest string containing
+ this Example where the Example begins. This line number is
+ zero-based, with respect to the beginning of the DocTest.
+
+ - indent: The example's indentation in the DocTest string.
+ I.e., the number of space characters that preceed the
+ example's first prompt.
+
+ - options: A dictionary mapping from option flags to True or
+ False, which is used to override default options for this
+ example. Any option flags not contained in this dictionary
+ are left at their default value (as specified by the
+ DocTestRunner's optionflags). By default, no options are set.
+ """
+ def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
+ options=None):
+ # Normalize inputs.
+ if not source.endswith('\n'):
+ source += '\n'
+ if want and not want.endswith('\n'):
+ want += '\n'
+ if exc_msg is not None and not exc_msg.endswith('\n'):
+ exc_msg += '\n'
+ # Store properties.
+ self.source = source
+ self.want = want
+ self.lineno = lineno
+ self.indent = indent
+ if options is None: options = {}
+ self.options = options
+ self.exc_msg = exc_msg
+
+class DocTest:
+ """
+ A collection of doctest examples that should be run in a single
+ namespace. Each `DocTest` defines the following attributes:
+
+ - examples: the list of examples.
+
+ - globs: The namespace (aka globals) that the examples should
+ be run in.
+
+ - name: A name identifying the DocTest (typically, the name of
+ the object whose docstring this DocTest was extracted from).
+
+ - filename: The name of the file that this DocTest was extracted
+ from, or `None` if the filename is unknown.
+
+ - lineno: The line number within filename where this DocTest
+ begins, or `None` if the line number is unavailable. This
+ line number is zero-based, with respect to the beginning of
+ the file.
+
+ - docstring: The string that the examples were extracted from,
+ or `None` if the string is unavailable.
+ """
+ def __init__(self, examples, globs, name, filename, lineno, docstring):
+ """
+ Create a new DocTest containing the given examples. The
+ DocTest's globals are initialized with a copy of `globs`.
+ """
+ assert not isinstance(examples, basestring), \
+ "DocTest no longer accepts str; use DocTestParser instead"
+ self.examples = examples
+ self.docstring = docstring
+ self.globs = globs.copy()
+ self.name = name
+ self.filename = filename
+ self.lineno = lineno
+
+ def __repr__(self):
+ if len(self.examples) == 0:
+ examples = 'no examples'
+ elif len(self.examples) == 1:
+ examples = '1 example'
+ else:
+ examples = '%d examples' % len(self.examples)
+ return ('<DocTest %s from %s:%s (%s)>' %
+ (self.name, self.filename, self.lineno, examples))
+
+
+ # This lets us sort tests by name:
+ def __cmp__(self, other):
+ if not isinstance(other, DocTest):
+ return -1
+ return cmp((self.name, self.filename, self.lineno, id(self)),
+ (other.name, other.filename, other.lineno, id(other)))
+
+######################################################################
+## 3. DocTestParser
+######################################################################
+
+class DocTestParser:
+ """
+ A class used to parse strings containing doctest examples.
+ """
+ # This regular expression is used to find doctest examples in a
+ # string. It defines three groups: `source` is the source code
+ # (including leading indentation and prompts); `indent` is the
+ # indentation of the first (PS1) line of the source code; and
+ # `want` is the expected output (including leading indentation).
+ _EXAMPLE_RE = re.compile(r'''
+ # Source consists of a PS1 line followed by zero or more PS2 lines.
+ (?P<source>
+ (?:^(?P<indent> [ ]*) >>> .*) # PS1 line
+ (?:\n [ ]* \.\.\. .*)*) # PS2 lines
+ \n?
+ # Want consists of any non-blank lines that do not start with PS1.
+ (?P<want> (?:(?![ ]*$) # Not a blank line
+ (?![ ]*>>>) # Not a line starting with PS1
+ .*$\n? # But any other line
+ )*)
+ ''', re.MULTILINE | re.VERBOSE)
+
+ # A regular expression for handling `want` strings that contain
+ # expected exceptions. It divides `want` into three pieces:
+ # - the traceback header line (`hdr`)
+ # - the traceback stack (`stack`)
+ # - the exception message (`msg`), as generated by
+ # traceback.format_exception_only()
+ # `msg` may have multiple lines. We assume/require that the
+ # exception message is the first non-indented line starting with a word
+ # character following the traceback header line.
+ _EXCEPTION_RE = re.compile(r"""
+ # Grab the traceback header. Different versions of Python have
+ # said different things on the first traceback line.
+ ^(?P<hdr> Traceback\ \(
+ (?: most\ recent\ call\ last
+ | innermost\ last
+ ) \) :
+ )
+ \s* $ # toss trailing whitespace on the header.
+ (?P<stack> .*?) # don't blink: absorb stuff until...
+ ^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
+ """, re.VERBOSE | re.MULTILINE | re.DOTALL)
+
+ # A callable returning a true value iff its argument is a blank line
+ # or contains a single comment.
+ _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
+
+ def parse(self, string, name='<string>'):
+ """
+ Divide the given string into examples and intervening text,
+ and return them as a list of alternating Examples and strings.
+ Line numbers for the Examples are 0-based. The optional
+ argument `name` is a name identifying this string, and is only
+ used for error messages.
+ """
+ string = string.expandtabs()
+ # If all lines begin with the same indentation, then strip it.
+ min_indent = self._min_indent(string)
+ if min_indent > 0:
+ string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+ output = []
+ charno, lineno = 0, 0
+ # Find all doctest examples in the string:
+ for m in self._EXAMPLE_RE.finditer(string):
+ # Add the pre-example text to `output`.
+ output.append(string[charno:m.start()])
+ # Update lineno (lines before this example)
+ lineno += string.count('\n', charno, m.start())
+ # Extract info from the regexp match.
+ (source, options, want, exc_msg) = \
+ self._parse_example(m, name, lineno)
+ # Create an Example, and add it to the list.
+ if not self._IS_BLANK_OR_COMMENT(source):
+ output.append( Example(source, want, exc_msg,
+ lineno=lineno,
+ indent=min_indent+len(m.group('indent')),
+ options=options) )
+ # Update lineno (lines inside this example)
+ lineno += string.count('\n', m.start(), m.end())
+ # Update charno.
+ charno = m.end()
+ # Add any remaining post-example text to `output`.
+ output.append(string[charno:])
+ return output
+
+ def get_doctest(self, string, globs, name, filename, lineno):
+ """
+ Extract all doctest examples from the given string, and
+ collect them into a `DocTest` object.
+
+ `globs`, `name`, `filename`, and `lineno` are attributes for
+ the new `DocTest` object. See the documentation for `DocTest`
+ for more information.
+ """
+ return DocTest(self.get_examples(string, name), globs,
+ name, filename, lineno, string)
+
+ def get_examples(self, string, name='<string>'):
+ """
+ Extract all doctest examples from the given string, and return
+ them as a list of `Example` objects. Line numbers are
+ 0-based, because it's most common in doctests that nothing
+ interesting appears on the same line as opening triple-quote,
+ and so the first interesting line is called \"line 1\" then.
+
+ The optional argument `name` is a name identifying this
+ string, and is only used for error messages.
+ """
+ return [x for x in self.parse(string, name)
+ if isinstance(x, Example)]
+
+ def _parse_example(self, m, name, lineno):
+ """
+ Given a regular expression match from `_EXAMPLE_RE` (`m`),
+ return a pair `(source, want)`, where `source` is the matched
+ example's source code (with prompts and indentation stripped);
+ and `want` is the example's expected output (with indentation
+ stripped).
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+ """
+ # Get the example's indentation level.
+ indent = len(m.group('indent'))
+
+ # Divide source into lines; check that they're properly
+ # indented; and then strip their indentation & prompts.
+ source_lines = m.group('source').split('\n')
+ self._check_prompt_blank(source_lines, indent, name, lineno)
+ self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
+ source = '\n'.join([sl[indent+4:] for sl in source_lines])
+
+ # Divide want into lines; check that it's properly indented; and
+ # then strip the indentation. Spaces before the last newline should
+ # be preserved, so plain rstrip() isn't good enough.
+ want = m.group('want')
+ want_lines = want.split('\n')
+ if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+ del want_lines[-1] # forget final newline & spaces after it
+ self._check_prefix(want_lines, ' '*indent, name,
+ lineno + len(source_lines))
+ want = '\n'.join([wl[indent:] for wl in want_lines])
+
+ # If `want` contains a traceback message, then extract it.
+ m = self._EXCEPTION_RE.match(want)
+ if m:
+ exc_msg = m.group('msg')
+ else:
+ exc_msg = None
+
+ # Extract options from the source.
+ options = self._find_options(source, name, lineno)
+
+ return source, options, want, exc_msg
+
+ # This regular expression looks for option directives in the
+ # source code of an example. Option directives are comments
+ # starting with "doctest:". Warning: this may give false
+ # positives for string-literals that contain the string
+ # "#doctest:". Eliminating these false positives would require
+ # actually parsing the string; but we limit them by ignoring any
+ # line containing "#doctest:" that is *followed* by a quote mark.
+ _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
+ re.MULTILINE)
+
+ def _find_options(self, source, name, lineno):
+ """
+ Return a dictionary containing option overrides extracted from
+ option directives in the given source string.
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+ """
+ options = {}
+ # (note: with the current regexp, this will match at most once:)
+ for m in self._OPTION_DIRECTIVE_RE.finditer(source):
+ option_strings = m.group(1).replace(',', ' ').split()
+ for option in option_strings:
+ if (option[0] not in '+-' or
+ option[1:] not in OPTIONFLAGS_BY_NAME):
+ raise ValueError('line %r of the doctest for %s '
+ 'has an invalid option: %r' %
+ (lineno+1, name, option))
+ flag = OPTIONFLAGS_BY_NAME[option[1:]]
+ options[flag] = (option[0] == '+')
+ if options and self._IS_BLANK_OR_COMMENT(source):
+ raise ValueError('line %r of the doctest for %s has an option '
+ 'directive on a line with no example: %r' %
+ (lineno, name, source))
+ return options
+
+ # This regular expression finds the indentation of every non-blank
+ # line in a string.
+ _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
+
+ def _min_indent(self, s):
+ "Return the minimum indentation of any non-blank line in `s`"
+ indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
+ if len(indents) > 0:
+ return min(indents)
+ else:
+ return 0
+
+ def _check_prompt_blank(self, lines, indent, name, lineno):
+ """
+ Given the lines of a source string (including prompts and
+ leading indentation), check to make sure that every prompt is
+ followed by a space character. If any line is not followed by
+ a space character, then raise ValueError.
+ """
+ for i, line in enumerate(lines):
+ if len(line) >= indent+4 and line[indent+3] != ' ':
+ raise ValueError('line %r of the docstring for %s '
+ 'lacks blank after %s: %r' %
+ (lineno+i+1, name,
+ line[indent:indent+3], line))
+
+ def _check_prefix(self, lines, prefix, name, lineno):
+ """
+ Check that every line in the given list starts with the given
+ prefix; if any line does not, then raise a ValueError.
+ """
+ for i, line in enumerate(lines):
+ if line and not line.startswith(prefix):
+ raise ValueError('line %r of the docstring for %s has '
+ 'inconsistent leading whitespace: %r' %
+ (lineno+i+1, name, line))
+
+
+######################################################################
+## 4. DocTest Finder
+######################################################################
+
+class DocTestFinder:
+ """
+ A class used to extract the DocTests that are relevant to a given
+ object, from its docstring and the docstrings of its contained
+ objects. Doctests can currently be extracted from the following
+ object types: modules, functions, classes, methods, staticmethods,
+ classmethods, and properties.
+ """
+
+ def __init__(self, verbose=False, parser=DocTestParser(),
+ recurse=True, _namefilter=None, exclude_empty=True):
+ """
+ Create a new doctest finder.
+
+ The optional argument `parser` specifies a class or
+ function that should be used to create new DocTest objects (or
+ objects that implement the same interface as DocTest). The
+ signature for this factory function should match the signature
+ of the DocTest constructor.
+
+ If the optional argument `recurse` is false, then `find` will
+ only examine the given object, and not any contained objects.
+
+ If the optional argument `exclude_empty` is false, then `find`
+ will include tests for objects with empty docstrings.
+ """
+ self._parser = parser
+ self._verbose = verbose
+ self._recurse = recurse
+ self._exclude_empty = exclude_empty
+ # _namefilter is undocumented, and exists only for temporary backward-
+ # compatibility support of testmod's deprecated isprivate mess.
+ self._namefilter = _namefilter
+
+ def find(self, obj, name=None, module=None, globs=None,
+ extraglobs=None):
+ """
+ Return a list of the DocTests that are defined by the given
+ object's docstring, or by any of its contained objects'
+ docstrings.
+
+ The optional parameter `module` is the module that contains
+ the given object. If the module is not specified or is None, then
+ the test finder will attempt to automatically determine the
+ correct module. The object's module is used:
+
+ - As a default namespace, if `globs` is not specified.
+ - To prevent the DocTestFinder from extracting DocTests
+ from objects that are imported from other modules.
+ - To find the name of the file containing the object.
+ - To help find the line number of the object within its
+ file.
+
+ Contained objects whose module does not match `module` are ignored.
+
+ If `module` is False, no attempt to find the module will be made.
+ This is obscure, of use mostly in tests: if `module` is False, or
+ is None but cannot be found automatically, then all objects are
+ considered to belong to the (non-existent) module, so all contained
+ objects will (recursively) be searched for doctests.
+
+ The globals for each DocTest is formed by combining `globs`
+ and `extraglobs` (bindings in `extraglobs` override bindings
+ in `globs`). A new copy of the globals dictionary is created
+ for each DocTest. If `globs` is not specified, then it
+ defaults to the module's `__dict__`, if specified, or {}
+ otherwise. If `extraglobs` is not specified, then it defaults
+ to {}.
+
+ """
+ # If name was not specified, then extract it from the object.
+ if name is None:
+ name = getattr(obj, '__name__', None)
+ if name is None:
+ raise ValueError("DocTestFinder.find: name must be given "
+ "when obj.__name__ doesn't exist: %r" %
+ (type(obj),))
+
+ # Find the module that contains the given object (if obj is
+ # a module, then module=obj.). Note: this may fail, in which
+ # case module will be None.
+ if module is False:
+ module = None
+ elif module is None:
+ module = inspect.getmodule(obj)
+
+ # Read the module's source code. This is used by
+ # DocTestFinder._find_lineno to find the line number for a
+ # given object's docstring.
+ try:
+ file = inspect.getsourcefile(obj) or inspect.getfile(obj)
+ source_lines = linecache.getlines(file)
+ if not source_lines:
+ source_lines = None
+ except TypeError:
+ source_lines = None
+
+ # Initialize globals, and merge in extraglobs.
+ if globs is None:
+ if module is None:
+ globs = {}
+ else:
+ globs = module.__dict__.copy()
+ else:
+ globs = globs.copy()
+ if extraglobs is not None:
+ globs.update(extraglobs)
+
+ # Recursively expore `obj`, extracting DocTests.
+ tests = []
+ self._find(tests, obj, name, module, source_lines, globs, {})
+ return tests
+
+ def _filter(self, obj, prefix, base):
+ """
+ Return true if the given object should not be examined.
+ """
+ return (self._namefilter is not None and
+ self._namefilter(prefix, base))
+
+ def _from_module(self, module, object):
+ """
+ Return true if the given object is defined in the given
+ module.
+ """
+ if module is None:
+ return True
+ elif inspect.isfunction(object):
+ return module.__dict__ is object.func_globals
+ elif inspect.isclass(object):
+ return module.__name__ == object.__module__
+ elif inspect.getmodule(object) is not None:
+ return module is inspect.getmodule(object)
+ elif hasattr(object, '__module__'):
+ return module.__name__ == object.__module__
+ elif isinstance(object, property):
+ return True # [XX] no way not be sure.
+ else:
+ raise ValueError("object must be a class or function")
+
+ def _find(self, tests, obj, name, module, source_lines, globs, seen):
+ """
+ Find tests for the given object and any contained objects, and
+ add them to `tests`.
+ """
+ if self._verbose:
+ print 'Finding tests in %s' % name
+
+ # If we've already processed this object, then ignore it.
+ if id(obj) in seen:
+ return
+ seen[id(obj)] = 1
+
+ # Find a test for this object, and add it to the list of tests.
+ test = self._get_test(obj, name, module, globs, source_lines)
+ if test is not None:
+ tests.append(test)
+
+ # Look for tests in a module's contained objects.
+ if inspect.ismodule(obj) and self._recurse:
+ for valname, val in obj.__dict__.items():
+ # Check if this contained object should be ignored.
+ if self._filter(val, name, valname):
+ continue
+ valname = '%s.%s' % (name, valname)
+ # Recurse to functions & classes.
+ if ((inspect.isfunction(val) or inspect.isclass(val)) and
+ self._from_module(module, val)):
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ # Look for tests in a module's __test__ dictionary.
+ if inspect.ismodule(obj) and self._recurse:
+ for valname, val in getattr(obj, '__test__', {}).items():
+ if not isinstance(valname, basestring):
+ raise ValueError("DocTestFinder.find: __test__ keys "
+ "must be strings: %r" %
+ (type(valname),))
+ if not (inspect.isfunction(val) or inspect.isclass(val) or
+ inspect.ismethod(val) or inspect.ismodule(val) or
+ isinstance(val, basestring)):
+ raise ValueError("DocTestFinder.find: __test__ values "
+ "must be strings, functions, methods, "
+ "classes, or modules: %r" %
+ (type(val),))
+ valname = '%s.__test__.%s' % (name, valname)
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ # Look for tests in a class's contained objects.
+ if inspect.isclass(obj) and self._recurse:
+ for valname, val in obj.__dict__.items():
+ # Check if this contained object should be ignored.
+ if self._filter(val, name, valname):
+ continue
+ # Special handling for staticmethod/classmethod.
+ if isinstance(val, staticmethod):
+ val = getattr(obj, valname)
+ if isinstance(val, classmethod):
+ val = getattr(obj, valname).im_func
+
+ # Recurse to methods, properties, and nested classes.
+ if ((inspect.isfunction(val) or inspect.isclass(val) or
+ isinstance(val, property)) and
+ self._from_module(module, val)):
+ valname = '%s.%s' % (name, valname)
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ def _get_test(self, obj, name, module, globs, source_lines):
+ """
+ Return a DocTest for the given object, if it defines a docstring;
+ otherwise, return None.
+ """
+ # Extract the object's docstring. If it doesn't have one,
+ # then return None (no test for this object).
+ if isinstance(obj, basestring):
+ docstring = obj
+ else:
+ try:
+ if obj.__doc__ is None:
+ docstring = ''
+ else:
+ docstring = obj.__doc__
+ if not isinstance(docstring, basestring):
+ docstring = str(docstring)
+ except (TypeError, AttributeError):
+ docstring = ''
+
+ # Find the docstring's location in the file.
+ lineno = self._find_lineno(obj, source_lines)
+
+ # Don't bother if the docstring is empty.
+ if self._exclude_empty and not docstring:
+ return None
+
+ # Return a DocTest for this object.
+ if module is None:
+ filename = None
+ else:
+ filename = getattr(module, '__file__', module.__name__)
+ if filename[-4:] in (".pyc", ".pyo"):
+ filename = filename[:-1]
+ return self._parser.get_doctest(docstring, globs, name,
+ filename, lineno)
+
+ def _find_lineno(self, obj, source_lines):
+ """
+ Return a line number of the given object's docstring. Note:
+ this method assumes that the object has a docstring.
+ """
+ lineno = None
+
+ # Find the line number for modules.
+ if inspect.ismodule(obj):
+ lineno = 0
+
+ # Find the line number for classes.
+ # Note: this could be fooled if a class is defined multiple
+ # times in a single file.
+ if inspect.isclass(obj):
+ if source_lines is None:
+ return None
+ pat = re.compile(r'^\s*class\s*%s\b' %
+ getattr(obj, '__name__', '-'))
+ for i, line in enumerate(source_lines):
+ if pat.match(line):
+ lineno = i
+ break
+
+ # Find the line number for functions & methods.
+ if inspect.ismethod(obj): obj = obj.im_func
+ if inspect.isfunction(obj): obj = obj.func_code
+ if inspect.istraceback(obj): obj = obj.tb_frame
+ if inspect.isframe(obj): obj = obj.f_code
+ if inspect.iscode(obj):
+ lineno = getattr(obj, 'co_firstlineno', None)-1
+
+ # Find the line number where the docstring starts. Assume
+ # that it's the first line that begins with a quote mark.
+ # Note: this could be fooled by a multiline function
+ # signature, where a continuation line begins with a quote
+ # mark.
+ if lineno is not None:
+ if source_lines is None:
+ return lineno+1
+ pat = re.compile('(^|.*:)\s*\w*("|\')')
+ for lineno in range(lineno, len(source_lines)):
+ if pat.match(source_lines[lineno]):
+ return lineno
+
+ # We couldn't find the line number.
+ return None
+
+######################################################################
+## 5. DocTest Runner
+######################################################################
+
+class DocTestRunner:
+ """
+ A class used to run DocTest test cases, and accumulate statistics.
+ The `run` method is used to process a single DocTest case. It
+ returns a tuple `(f, t)`, where `t` is the number of test cases
+ tried, and `f` is the number of test cases that failed.
+
+ >>> tests = DocTestFinder().find(_TestClass)
+ >>> runner = DocTestRunner(verbose=False)
+ >>> for test in tests:
+ ... print runner.run(test)
+ (0, 2)
+ (0, 1)
+ (0, 2)
+ (0, 2)
+
+ The `summarize` method prints a summary of all the test cases that
+ have been run by the runner, and returns an aggregated `(f, t)`
+ tuple:
+
+ >>> runner.summarize(verbose=1)
+ 4 items passed all tests:
+ 2 tests in _TestClass
+ 2 tests in _TestClass.__init__
+ 2 tests in _TestClass.get
+ 1 tests in _TestClass.square
+ 7 tests in 4 items.
+ 7 passed and 0 failed.
+ Test passed.
+ (0, 7)
+
+ The aggregated number of tried examples and failed examples is
+ also available via the `tries` and `failures` attributes:
+
+ >>> runner.tries
+ 7
+ >>> runner.failures
+ 0
+
+ The comparison between expected outputs and actual outputs is done
+ by an `OutputChecker`. This comparison may be customized with a
+ number of option flags; see the documentation for `testmod` for
+ more information. If the option flags are insufficient, then the
+ comparison may also be customized by passing a subclass of
+ `OutputChecker` to the constructor.
+
+ The test runner's display output can be controlled in two ways.
+ First, an output function (`out) can be passed to
+ `TestRunner.run`; this function will be called with strings that
+ should be displayed. It defaults to `sys.stdout.write`. If
+ capturing the output is not sufficient, then the display output
+ can be also customized by subclassing DocTestRunner, and
+ overriding the methods `report_start`, `report_success`,
+ `report_unexpected_exception`, and `report_failure`.
+ """
+ # This divider string is used to separate failure messages, and to
+ # separate sections of the summary.
+ DIVIDER = "*" * 70
+
+ def __init__(self, checker=None, verbose=None, optionflags=0):
+ """
+ Create a new test runner.
+
+ Optional keyword arg `checker` is the `OutputChecker` that
+ should be used to compare the expected outputs and actual
+ outputs of doctest examples.
+
+ Optional keyword arg 'verbose' prints lots of stuff if true,
+ only failures if false; by default, it's true iff '-v' is in
+ sys.argv.
+
+ Optional argument `optionflags` can be used to control how the
+ test runner compares expected output to actual output, and how
+ it displays failures. See the documentation for `testmod` for
+ more information.
+ """
+ self._checker = checker or OutputChecker()
+ if verbose is None:
+ verbose = '-v' in sys.argv
+ self._verbose = verbose
+ self.optionflags = optionflags
+ self.original_optionflags = optionflags
+
+ # Keep track of the examples we've run.
+ self.tries = 0
+ self.failures = 0
+ self._name2ft = {}
+
+ # Create a fake output target for capturing doctest output.
+ self._fakeout = _SpoofOut()
+
+ #/////////////////////////////////////////////////////////////////
+ # Reporting methods
+ #/////////////////////////////////////////////////////////////////
+
+ def report_start(self, out, test, example):
+ """
+ Report that the test runner is about to process the given
+ example. (Only displays a message if verbose=True)
+ """
+ if self._verbose:
+ if example.want:
+ out('Trying:\n' + _indent(example.source) +
+ 'Expecting:\n' + _indent(example.want))
+ else:
+ out('Trying:\n' + _indent(example.source) +
+ 'Expecting nothing\n')
+
+ def report_success(self, out, test, example, got):
+ """
+ Report that the given example ran successfully. (Only
+ displays a message if verbose=True)
+ """
+ if self._verbose:
+ out("ok\n")
+
+ def report_failure(self, out, test, example, got):
+ """
+ Report that the given example failed.
+ """
+ out(self._failure_header(test, example) +
+ self._checker.output_difference(example, got, self.optionflags))
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ """
+ Report that the given example raised an unexpected exception.
+ """
+ out(self._failure_header(test, example) +
+ 'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
+
+ def _failure_header(self, test, example):
+ out = [self.DIVIDER]
+ if test.filename:
+ if test.lineno is not None and example.lineno is not None:
+ lineno = test.lineno + example.lineno + 1
+ else:
+ lineno = '?'
+ out.append('File "%s", line %s, in %s' %
+ (test.filename, lineno, test.name))
+ else:
+ out.append('Line %s, in %s' % (example.lineno+1, test.name))
+ out.append('Failed example:')
+ source = example.source
+ out.append(_indent(source))
+ return '\n'.join(out)
+
+ #/////////////////////////////////////////////////////////////////
+ # DocTest Running
+ #/////////////////////////////////////////////////////////////////
+
+ def __run(self, test, compileflags, out):
+ """
+ Run the examples in `test`. Write the outcome of each example
+ with one of the `DocTestRunner.report_*` methods, using the
+ writer function `out`. `compileflags` is the set of compiler
+ flags that should be used to execute examples. Return a tuple
+ `(f, t)`, where `t` is the number of examples tried, and `f`
+ is the number of examples that failed. The examples are run
+ in the namespace `test.globs`.
+ """
+ # Keep track of the number of failures and tries.
+ failures = tries = 0
+
+ # Save the option flags (since option directives can be used
+ # to modify them).
+ original_optionflags = self.optionflags
+
+ SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
+
+ check = self._checker.check_output
+
+ # Process each example.
+ for examplenum, example in enumerate(test.examples):
+
+ # If REPORT_ONLY_FIRST_FAILURE is set, then supress
+ # reporting after the first failure.
+ quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
+ failures > 0)
+
+ # Merge in the example's options.
+ self.optionflags = original_optionflags
+ if example.options:
+ for (optionflag, val) in example.options.items():
+ if val:
+ self.optionflags |= optionflag
+ else:
+ self.optionflags &= ~optionflag
+
+ # Record that we started this example.
+ tries += 1
+ if not quiet:
+ self.report_start(out, test, example)
+
+ # Use a special filename for compile(), so we can retrieve
+ # the source code during interactive debugging (see
+ # __patched_linecache_getlines).
+ filename = '<doctest %s[%d]>' % (test.name, examplenum)
+
+ # Run the example in the given context (globs), and record
+ # any exception that gets raised. (But don't intercept
+ # keyboard interrupts.)
+ try:
+ # Don't blink! This is where the user's code gets run.
+ exec compile(example.source, filename, "single",
+ compileflags, 1) in test.globs
+ self.debugger.set_continue() # ==== Example Finished ====
+ exception = None
+ except KeyboardInterrupt:
+ raise
+ except:
+ exception = sys.exc_info()
+ self.debugger.set_continue() # ==== Example Finished ====
+
+ got = self._fakeout.getvalue() # the actual output
+ self._fakeout.truncate(0)
+ outcome = FAILURE # guilty until proved innocent or insane
+
+ # If the example executed without raising any exceptions,
+ # verify its output.
+ if exception is None:
+ if check(example.want, got, self.optionflags):
+ outcome = SUCCESS
+
+ # The example raised an exception: check if it was expected.
+ else:
+ exc_info = sys.exc_info()
+ exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
+ if not quiet:
+ got += _exception_traceback(exc_info)
+
+ # If `example.exc_msg` is None, then we weren't expecting
+ # an exception.
+ if example.exc_msg is None:
+ outcome = BOOM
+
+ # We expected an exception: see whether it matches.
+ elif check(example.exc_msg, exc_msg, self.optionflags):
+ outcome = SUCCESS
+
+ # Another chance if they didn't care about the detail.
+ elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
+ m1 = re.match(r'[^:]*:', example.exc_msg)
+ m2 = re.match(r'[^:]*:', exc_msg)
+ if m1 and m2 and check(m1.group(0), m2.group(0),
+ self.optionflags):
+ outcome = SUCCESS
+
+ # Report the outcome.
+ if outcome is SUCCESS:
+ if not quiet:
+ self.report_success(out, test, example, got)
+ elif outcome is FAILURE:
+ if not quiet:
+ self.report_failure(out, test, example, got)
+ failures += 1
+ elif outcome is BOOM:
+ if not quiet:
+ self.report_unexpected_exception(out, test, example,
+ exc_info)
+ failures += 1
+ else:
+ assert False, ("unknown outcome", outcome)
+
+ # Restore the option flags (in case they were modified)
+ self.optionflags = original_optionflags
+
+ # Record and return the number of failures and tries.
+ self.__record_outcome(test, failures, tries)
+ return failures, tries
+
+ def __record_outcome(self, test, f, t):
+ """
+ Record the fact that the given DocTest (`test`) generated `f`
+ failures out of `t` tried examples.
+ """
+ f2, t2 = self._name2ft.get(test.name, (0,0))
+ self._name2ft[test.name] = (f+f2, t+t2)
+ self.failures += f
+ self.tries += t
+
+ __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
+ r'(?P<name>[\w\.]+)'
+ r'\[(?P<examplenum>\d+)\]>$')
+ def __patched_linecache_getlines(self, filename, module_globals=None):
+ m = self.__LINECACHE_FILENAME_RE.match(filename)
+ if m and m.group('name') == self.test.name:
+ example = self.test.examples[int(m.group('examplenum'))]
+ return example.source.splitlines(True)
+ elif self.save_linecache_getlines.func_code.co_argcount>1:
+ return self.save_linecache_getlines(filename, module_globals)
+ else:
+ return self.save_linecache_getlines(filename)
+
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
+ """
+ Run the examples in `test`, and display the results using the
+ writer function `out`.
+
+ The examples are run in the namespace `test.globs`. If
+ `clear_globs` is true (the default), then this namespace will
+ be cleared after the test runs, to help with garbage
+ collection. If you would like to examine the namespace after
+ the test completes, then use `clear_globs=False`.
+
+ `compileflags` gives the set of flags that should be used by
+ the Python compiler when running the examples. If not
+ specified, then it will default to the set of future-import
+ flags that apply to `globs`.
+
+ The output of each example is checked using
+ `DocTestRunner.check_output`, and the results are formatted by
+ the `DocTestRunner.report_*` methods.
+ """
+ self.test = test
+
+ if compileflags is None:
+ compileflags = _extract_future_flags(test.globs)
+
+ save_stdout = sys.stdout
+ if out is None:
+ out = save_stdout.write
+ sys.stdout = self._fakeout
+
+ # Patch pdb.set_trace to restore sys.stdout during interactive
+ # debugging (so it's not still redirected to self._fakeout).
+ # Note that the interactive output will go to *our*
+ # save_stdout, even if that's not the real sys.stdout; this
+ # allows us to write test cases for the set_trace behavior.
+ save_set_trace = pdb.set_trace
+ self.debugger = _OutputRedirectingPdb(save_stdout)
+ self.debugger.reset()
+ pdb.set_trace = self.debugger.set_trace
+
+ # Patch linecache.getlines, so we can see the example's source
+ # when we're inside the debugger.
+ self.save_linecache_getlines = linecache.getlines
+ linecache.getlines = self.__patched_linecache_getlines
+
+ try:
+ return self.__run(test, compileflags, out)
+ finally:
+ sys.stdout = save_stdout
+ pdb.set_trace = save_set_trace
+ linecache.getlines = self.save_linecache_getlines
+ if clear_globs:
+ test.globs.clear()
+
+ #/////////////////////////////////////////////////////////////////
+ # Summarization
+ #/////////////////////////////////////////////////////////////////
+ def summarize(self, verbose=None):
+ """
+ Print a summary of all the test cases that have been run by
+ this DocTestRunner, and return a tuple `(f, t)`, where `f` is
+ the total number of failed examples, and `t` is the total
+ number of tried examples.
+
+ The optional `verbose` argument controls how detailed the
+ summary is. If the verbosity is not specified, then the
+ DocTestRunner's verbosity is used.
+ """
+ if verbose is None:
+ verbose = self._verbose
+ notests = []
+ passed = []
+ failed = []
+ totalt = totalf = 0
+ for x in self._name2ft.items():
+ name, (f, t) = x
+ assert f <= t
+ totalt += t
+ totalf += f
+ if t == 0:
+ notests.append(name)
+ elif f == 0:
+ passed.append( (name, t) )
+ else:
+ failed.append(x)
+ if verbose:
+ if notests:
+ print len(notests), "items had no tests:"
+ notests.sort()
+ for thing in notests:
+ print " ", thing
+ if passed:
+ print len(passed), "items passed all tests:"
+ passed.sort()
+ for thing, count in passed:
+ print " %3d tests in %s" % (count, thing)
+ if failed:
+ print self.DIVIDER
+ print len(failed), "items had failures:"
+ failed.sort()
+ for thing, (f, t) in failed:
+ print " %3d of %3d in %s" % (f, t, thing)
+ if verbose:
+ print totalt, "tests in", len(self._name2ft), "items."
+ print totalt - totalf, "passed and", totalf, "failed."
+ if totalf:
+ print "***Test Failed***", totalf, "failures."
+ elif verbose:
+ print "Test passed."
+ return totalf, totalt
+
+ #/////////////////////////////////////////////////////////////////
+ # Backward compatibility cruft to maintain doctest.master.
+ #/////////////////////////////////////////////////////////////////
+ def merge(self, other):
+ d = self._name2ft
+ for name, (f, t) in other._name2ft.items():
+ if name in d:
+ print "*** DocTestRunner.merge: '" + name + "' in both" \
+ " testers; summing outcomes."
+ f2, t2 = d[name]
+ f = f + f2
+ t = t + t2
+ d[name] = f, t
+
+class OutputChecker:
+ """
+ A class used to check the whether the actual output from a doctest
+ example matches the expected output. `OutputChecker` defines two
+ methods: `check_output`, which compares a given pair of outputs,
+ and returns true if they match; and `output_difference`, which
+ returns a string describing the differences between two outputs.
+ """
+ def check_output(self, want, got, optionflags):
+ """
+ Return True iff the actual output from an example (`got`)
+ matches the expected output (`want`). These strings are
+ always considered to match if they are identical; but
+ depending on what option flags the test runner is using,
+ several non-exact match types are also possible. See the
+ documentation for `TestRunner` for more information about
+ option flags.
+ """
+ # Handle the common case first, for efficiency:
+ # if they're string-identical, always return true.
+ if got == want:
+ return True
+
+ # The values True and False replaced 1 and 0 as the return
+ # value for boolean comparisons in Python 2.3.
+ if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
+ if (got,want) == ("True\n", "1\n"):
+ return True
+ if (got,want) == ("False\n", "0\n"):
+ return True
+
+ # <BLANKLINE> can be used as a special sequence to signify a
+ # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
+ if not (optionflags & DONT_ACCEPT_BLANKLINE):
+ # Replace <BLANKLINE> in want with a blank line.
+ want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
+ '', want)
+ # If a line in got contains only spaces, then remove the
+ # spaces.
+ got = re.sub('(?m)^\s*?$', '', got)
+ if got == want:
+ return True
+
+ # This flag causes doctest to ignore any differences in the
+ # contents of whitespace strings. Note that this can be used
+ # in conjunction with the ELLIPSIS flag.
+ if optionflags & NORMALIZE_WHITESPACE:
+ got = ' '.join(got.split())
+ want = ' '.join(want.split())
+ if got == want:
+ return True
+
+ # The ELLIPSIS flag says to let the sequence "..." in `want`
+ # match any substring in `got`.
+ if optionflags & ELLIPSIS:
+ if _ellipsis_match(want, got):
+ return True
+
+ # We didn't find any match; return false.
+ return False
+
+ # Should we do a fancy diff?
+ def _do_a_fancy_diff(self, want, got, optionflags):
+ # Not unless they asked for a fancy diff.
+ if not optionflags & (REPORT_UDIFF |
+ REPORT_CDIFF |
+ REPORT_NDIFF):
+ return False
+
+ # If expected output uses ellipsis, a meaningful fancy diff is
+ # too hard ... or maybe not. In two real-life failures Tim saw,
+ # a diff was a major help anyway, so this is commented out.
+ # [todo] _ellipsis_match() knows which pieces do and don't match,
+ # and could be the basis for a kick-ass diff in this case.
+ ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
+ ## return False
+
+ # ndiff does intraline difference marking, so can be useful even
+ # for 1-line differences.
+ if optionflags & REPORT_NDIFF:
+ return True
+
+ # The other diff types need at least a few lines to be helpful.
+ return want.count('\n') > 2 and got.count('\n') > 2
+
+ def output_difference(self, example, got, optionflags):
+ """
+ Return a string describing the differences between the
+ expected output for a given example (`example`) and the actual
+ output (`got`). `optionflags` is the set of option flags used
+ to compare `want` and `got`.
+ """
+ want = example.want
+ # If <BLANKLINE>s are being used, then replace blank lines
+ # with <BLANKLINE> in the actual output string.
+ if not (optionflags & DONT_ACCEPT_BLANKLINE):
+ got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
+
+ # Check if we should use diff.
+ if self._do_a_fancy_diff(want, got, optionflags):
+ # Split want & got into lines.
+ want_lines = want.splitlines(True) # True == keep line ends
+ got_lines = got.splitlines(True)
+ # Use difflib to find their differences.
+ if optionflags & REPORT_UDIFF:
+ diff = difflib.unified_diff(want_lines, got_lines, n=2)
+ diff = list(diff)[2:] # strip the diff header
+ kind = 'unified diff with -expected +actual'
+ elif optionflags & REPORT_CDIFF:
+ diff = difflib.context_diff(want_lines, got_lines, n=2)
+ diff = list(diff)[2:] # strip the diff header
+ kind = 'context diff with expected followed by actual'
+ elif optionflags & REPORT_NDIFF:
+ engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
+ diff = list(engine.compare(want_lines, got_lines))
+ kind = 'ndiff with -expected +actual'
+ else:
+ assert 0, 'Bad diff option'
+ # Remove trailing whitespace on diff output.
+ diff = [line.rstrip() + '\n' for line in diff]
+ return 'Differences (%s):\n' % kind + _indent(''.join(diff))
+
+ # If we're not using diff, then simply list the expected
+ # output followed by the actual output.
+ if want and got:
+ return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
+ elif want:
+ return 'Expected:\n%sGot nothing\n' % _indent(want)
+ elif got:
+ return 'Expected nothing\nGot:\n%s' % _indent(got)
+ else:
+ return 'Expected nothing\nGot nothing\n'
+
+class DocTestFailure(Exception):
+ """A DocTest example has failed in debugging mode.
+
+ The exception instance has variables:
+
+ - test: the DocTest object being run
+
+ - excample: the Example object that failed
+
+ - got: the actual output
+ """
+ def __init__(self, test, example, got):
+ self.test = test
+ self.example = example
+ self.got = got
+
+ def __str__(self):
+ return str(self.test)
+
+class UnexpectedException(Exception):
+ """A DocTest example has encountered an unexpected exception
+
+ The exception instance has variables:
+
+ - test: the DocTest object being run
+
+ - excample: the Example object that failed
+
+ - exc_info: the exception info
+ """
+ def __init__(self, test, example, exc_info):
+ self.test = test
+ self.example = example
+ self.exc_info = exc_info
+
+ def __str__(self):
+ return str(self.test)
+
+class DebugRunner(DocTestRunner):
+ r"""Run doc tests but raise an exception as soon as there is a failure.
+
+ If an unexpected exception occurs, an UnexpectedException is raised.
+ It contains the test, the example, and the original exception:
+
+ >>> runner = DebugRunner(verbose=False)
+ >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+ ... {}, 'foo', 'foo.py', 0)
+ >>> try:
+ ... runner.run(test)
+ ... except UnexpectedException, failure:
+ ... pass
+
+ >>> failure.test is test
+ True
+
+ >>> failure.example.want
+ '42\n'
+
+ >>> exc_info = failure.exc_info
+ >>> raise exc_info[0], exc_info[1], exc_info[2]
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ We wrap the original exception to give the calling application
+ access to the test and example information.
+
+ If the output doesn't match, then a DocTestFailure is raised:
+
+ >>> test = DocTestParser().get_doctest('''
+ ... >>> x = 1
+ ... >>> x
+ ... 2
+ ... ''', {}, 'foo', 'foo.py', 0)
+
+ >>> try:
+ ... runner.run(test)
+ ... except DocTestFailure, failure:
+ ... pass
+
+ DocTestFailure objects provide access to the test:
+
+ >>> failure.test is test
+ True
+
+ As well as to the example:
+
+ >>> failure.example.want
+ '2\n'
+
+ and the actual output:
+
+ >>> failure.got
+ '1\n'
+
+ If a failure or error occurs, the globals are left intact:
+
+ >>> del test.globs['__builtins__']
+ >>> test.globs
+ {'x': 1}
+
+ >>> test = DocTestParser().get_doctest('''
+ ... >>> x = 2
+ ... >>> raise KeyError
+ ... ''', {}, 'foo', 'foo.py', 0)
+
+ >>> runner.run(test)
+ Traceback (most recent call last):
+ ...
+ UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
+
+ >>> del test.globs['__builtins__']
+ >>> test.globs
+ {'x': 2}
+
+ But the globals are cleared if there is no error:
+
+ >>> test = DocTestParser().get_doctest('''
+ ... >>> x = 2
+ ... ''', {}, 'foo', 'foo.py', 0)
+
+ >>> runner.run(test)
+ (0, 1)
+
+ >>> test.globs
+ {}
+
+ """
+
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
+ r = DocTestRunner.run(self, test, compileflags, out, False)
+ if clear_globs:
+ test.globs.clear()
+ return r
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ raise UnexpectedException(test, example, exc_info)
+
+ def report_failure(self, out, test, example, got):
+ raise DocTestFailure(test, example, got)
+
+######################################################################
+## 6. Test Functions
+######################################################################
+# These should be backwards compatible.
+
+# For backward compatibility, a global instance of a DocTestRunner
+# class, updated by testmod.
+master = None
+
+def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
+ report=True, optionflags=0, extraglobs=None,
+ raise_on_error=False, exclude_empty=False):
+ """m=None, name=None, globs=None, verbose=None, isprivate=None,
+ report=True, optionflags=0, extraglobs=None, raise_on_error=False,
+ exclude_empty=False
+
+ Test examples in docstrings in functions and classes reachable
+ from module m (or the current module if m is not supplied), starting
+ with m.__doc__. Unless isprivate is specified, private names
+ are not skipped.
+
+ Also test examples reachable from dict m.__test__ if it exists and is
+ not None. m.__test__ maps names to functions, classes and strings;
+ function and class docstrings are tested even if the name is private;
+ strings are tested directly, as if they were docstrings.
+
+ Return (#failures, #tests).
+
+ See doctest.__doc__ for an overview.
+
+ Optional keyword arg "name" gives the name of the module; by default
+ use m.__name__.
+
+ Optional keyword arg "globs" gives a dict to be used as the globals
+ when executing examples; by default, use m.__dict__. A copy of this
+ dict is actually used for each docstring, so that each docstring's
+ examples start with a clean slate.
+
+ Optional keyword arg "extraglobs" gives a dictionary that should be
+ merged into the globals that are used to execute examples. By
+ default, no extra globals are used. This is new in 2.4.
+
+ Optional keyword arg "verbose" prints lots of stuff if true, prints
+ only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+ Optional keyword arg "report" prints a summary at the end when true,
+ else prints nothing at the end. In verbose mode, the summary is
+ detailed, else very brief (in fact, empty if all tests passed).
+
+ Optional keyword arg "optionflags" or's together module constants,
+ and defaults to 0. This is new in 2.3. Possible values (see the
+ docs for details):
+
+ DONT_ACCEPT_TRUE_FOR_1
+ DONT_ACCEPT_BLANKLINE
+ NORMALIZE_WHITESPACE
+ ELLIPSIS
+ IGNORE_EXCEPTION_DETAIL
+ REPORT_UDIFF
+ REPORT_CDIFF
+ REPORT_NDIFF
+ REPORT_ONLY_FIRST_FAILURE
+
+ Optional keyword arg "raise_on_error" raises an exception on the
+ first unexpected exception or failure. This allows failures to be
+ post-mortem debugged.
+
+ Deprecated in Python 2.4:
+ Optional keyword arg "isprivate" specifies a function used to
+ determine whether a name is private. The default function is
+ treat all functions as public. Optionally, "isprivate" can be
+ set to doctest.is_private to skip over functions marked as private
+ using the underscore naming convention; see its docs for details.
+
+ Advanced tomfoolery: testmod runs methods of a local instance of
+ class doctest.Tester, then merges the results into (or creates)
+ global Tester instance doctest.master. Methods of doctest.master
+ can be called directly too, if you want to do something unusual.
+ Passing report=0 to testmod is especially useful then, to delay
+ displaying a summary. Invoke doctest.master.summarize(verbose)
+ when you're done fiddling.
+ """
+ global master
+
+ if isprivate is not None:
+ warnings.warn("the isprivate argument is deprecated; "
+ "examine DocTestFinder.find() lists instead",
+ DeprecationWarning)
+
+ # If no module was given, then use __main__.
+ if m is None:
+ # DWA - m will still be None if this wasn't invoked from the command
+ # line, in which case the following TypeError is about as good an error
+ # as we should expect
+ m = sys.modules.get('__main__')
+
+ # Check that we were actually given a module.
+ if not inspect.ismodule(m):
+ raise TypeError("testmod: module required; %r" % (m,))
+
+ # If no name was given, then use the module's name.
+ if name is None:
+ name = m.__name__
+
+ # Find, parse, and run all tests in the given module.
+ finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
+
+ if raise_on_error:
+ runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+ else:
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+ for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
+ runner.run(test)
+
+ if report:
+ runner.summarize()
+
+ if master is None:
+ master = runner
+ else:
+ master.merge(runner)
+
+ return runner.failures, runner.tries
+
+def testfile(filename, module_relative=True, name=None, package=None,
+ globs=None, verbose=None, report=True, optionflags=0,
+ extraglobs=None, raise_on_error=False, parser=DocTestParser()):
+ """
+ Test examples in the given file. Return (#failures, #tests).
+
+ Optional keyword arg "module_relative" specifies how filenames
+ should be interpreted:
+
+ - If "module_relative" is True (the default), then "filename"
+ specifies a module-relative path. By default, this path is
+ relative to the calling module's directory; but if the
+ "package" argument is specified, then it is relative to that
+ package. To ensure os-independence, "filename" should use
+ "/" characters to separate path segments, and should not
+ be an absolute path (i.e., it may not begin with "/").
+
+ - If "module_relative" is False, then "filename" specifies an
+ os-specific path. The path may be absolute or relative (to
+ the current working directory).
+
+ Optional keyword arg "name" gives the name of the test; by default
+ use the file's basename.
+
+ Optional keyword argument "package" is a Python package or the
+ name of a Python package whose directory should be used as the
+ base directory for a module relative filename. If no package is
+ specified, then the calling module's directory is used as the base
+ directory for module relative filenames. It is an error to
+ specify "package" if "module_relative" is False.
+
+ Optional keyword arg "globs" gives a dict to be used as the globals
+ when executing examples; by default, use {}. A copy of this dict
+ is actually used for each docstring, so that each docstring's
+ examples start with a clean slate.
+
+ Optional keyword arg "extraglobs" gives a dictionary that should be
+ merged into the globals that are used to execute examples. By
+ default, no extra globals are used.
+
+ Optional keyword arg "verbose" prints lots of stuff if true, prints
+ only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+ Optional keyword arg "report" prints a summary at the end when true,
+ else prints nothing at the end. In verbose mode, the summary is
+ detailed, else very brief (in fact, empty if all tests passed).
+
+ Optional keyword arg "optionflags" or's together module constants,
+ and defaults to 0. Possible values (see the docs for details):
+
+ DONT_ACCEPT_TRUE_FOR_1
+ DONT_ACCEPT_BLANKLINE
+ NORMALIZE_WHITESPACE
+ ELLIPSIS
+ IGNORE_EXCEPTION_DETAIL
+ REPORT_UDIFF
+ REPORT_CDIFF
+ REPORT_NDIFF
+ REPORT_ONLY_FIRST_FAILURE
+
+ Optional keyword arg "raise_on_error" raises an exception on the
+ first unexpected exception or failure. This allows failures to be
+ post-mortem debugged.
+
+ Optional keyword arg "parser" specifies a DocTestParser (or
+ subclass) that should be used to extract tests from the files.
+
+ Advanced tomfoolery: testmod runs methods of a local instance of
+ class doctest.Tester, then merges the results into (or creates)
+ global Tester instance doctest.master. Methods of doctest.master
+ can be called directly too, if you want to do something unusual.
+ Passing report=0 to testmod is especially useful then, to delay
+ displaying a summary. Invoke doctest.master.summarize(verbose)
+ when you're done fiddling.
+ """
+ global master
+
+ if package and not module_relative:
+ raise ValueError("Package may only be specified for module-"
+ "relative paths.")
+
+ # Relativize the path
+ if module_relative:
+ package = _normalize_module(package)
+ filename = _module_relative_path(package, filename)
+
+ # If no name was given, then use the file's name.
+ if name is None:
+ name = os.path.basename(filename)
+
+ # Assemble the globals.
+ if globs is None:
+ globs = {}
+ else:
+ globs = globs.copy()
+ if extraglobs is not None:
+ globs.update(extraglobs)
+
+ if raise_on_error:
+ runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+ else:
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+ # Read the file, convert it to a test, and run it.
+ s = open(filename).read()
+ test = parser.get_doctest(s, globs, name, filename, 0)
+ runner.run(test)
+
+ if report:
+ runner.summarize()
+
+ if master is None:
+ master = runner
+ else:
+ master.merge(runner)
+
+ return runner.failures, runner.tries
+
+def run_docstring_examples(f, globs, verbose=False, name="NoName",
+ compileflags=None, optionflags=0):
+ """
+ Test examples in the given object's docstring (`f`), using `globs`
+ as globals. Optional argument `name` is used in failure messages.
+ If the optional argument `verbose` is true, then generate output
+ even if there are no failures.
+
+ `compileflags` gives the set of flags that should be used by the
+ Python compiler when running the examples. If not specified, then
+ it will default to the set of future-import flags that apply to
+ `globs`.
+
+ Optional keyword arg `optionflags` specifies options for the
+ testing and output. See the documentation for `testmod` for more
+ information.
+ """
+ # Find, parse, and run all tests in the given module.
+ finder = DocTestFinder(verbose=verbose, recurse=False)
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+ for test in finder.find(f, name, globs=globs):
+ runner.run(test, compileflags=compileflags)
+
+######################################################################
+## 7. Tester
+######################################################################
+# This is provided only for backwards compatibility. It's not
+# actually used in any way.
+
+class Tester:
+ def __init__(self, mod=None, globs=None, verbose=None,
+ isprivate=None, optionflags=0):
+
+ warnings.warn("class Tester is deprecated; "
+ "use class doctest.DocTestRunner instead",
+ DeprecationWarning, stacklevel=2)
+ if mod is None and globs is None:
+ raise TypeError("Tester.__init__: must specify mod or globs")
+ if mod is not None and not inspect.ismodule(mod):
+ raise TypeError("Tester.__init__: mod must be a module; %r" %
+ (mod,))
+ if globs is None:
+ globs = mod.__dict__
+ self.globs = globs
+
+ self.verbose = verbose
+ self.isprivate = isprivate
+ self.optionflags = optionflags
+ self.testfinder = DocTestFinder(_namefilter=isprivate)
+ self.testrunner = DocTestRunner(verbose=verbose,
+ optionflags=optionflags)
+
+ def runstring(self, s, name):
+ test = DocTestParser().get_doctest(s, self.globs, name, None, None)
+ if self.verbose:
+ print "Running string", name
+ (f,t) = self.testrunner.run(test)
+ if self.verbose:
+ print f, "of", t, "examples failed in string", name
+ return (f,t)
+
+ def rundoc(self, object, name=None, module=None):
+ f = t = 0
+ tests = self.testfinder.find(object, name, module=module,
+ globs=self.globs)
+ for test in tests:
+ (f2, t2) = self.testrunner.run(test)
+ (f,t) = (f+f2, t+t2)
+ return (f,t)
+
+ def rundict(self, d, name, module=None):
+ import types
+ m = types.ModuleType(name)
+ m.__dict__.update(d)
+ if module is None:
+ module = False
+ return self.rundoc(m, name, module)
+
+ def run__test__(self, d, name):
+ import types
+ m = types.ModuleType(name)
+ m.__test__ = d
+ return self.rundoc(m, name)
+
+ def summarize(self, verbose=None):
+ return self.testrunner.summarize(verbose)
+
+ def merge(self, other):
+ self.testrunner.merge(other.testrunner)
+
+######################################################################
+## 8. Unittest Support
+######################################################################
+
+_unittest_reportflags = 0
+
+def set_unittest_reportflags(flags):
+ """Sets the unittest option flags.
+
+ The old flag is returned so that a runner could restore the old
+ value if it wished to:
+
+ >>> old = _unittest_reportflags
+ >>> set_unittest_reportflags(REPORT_NDIFF |
+ ... REPORT_ONLY_FIRST_FAILURE) == old
+ True
+
+ >>> import doctest
+ >>> doctest._unittest_reportflags == (REPORT_NDIFF |
+ ... REPORT_ONLY_FIRST_FAILURE)
+ True
+
+ Only reporting flags can be set:
+
+ >>> set_unittest_reportflags(ELLIPSIS)
+ Traceback (most recent call last):
+ ...
+ ValueError: ('Only reporting flags allowed', 8)
+
+ >>> set_unittest_reportflags(old) == (REPORT_NDIFF |
+ ... REPORT_ONLY_FIRST_FAILURE)
+ True
+ """
+ global _unittest_reportflags
+
+ if (flags & REPORTING_FLAGS) != flags:
+ raise ValueError("Only reporting flags allowed", flags)
+ old = _unittest_reportflags
+ _unittest_reportflags = flags
+ return old
+
+
+class DocTestCase(unittest.TestCase):
+
+ def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+ checker=None):
+
+ unittest.TestCase.__init__(self)
+ self._dt_optionflags = optionflags
+ self._dt_checker = checker
+ self._dt_test = test
+ self._dt_setUp = setUp
+ self._dt_tearDown = tearDown
+
+ def setUp(self):
+ test = self._dt_test
+
+ if self._dt_setUp is not None:
+ self._dt_setUp(test)
+
+ def tearDown(self):
+ test = self._dt_test
+
+ if self._dt_tearDown is not None:
+ self._dt_tearDown(test)
+
+ test.globs.clear()
+
+ def runTest(self):
+ test = self._dt_test
+ old = sys.stdout
+ new = StringIO()
+ optionflags = self._dt_optionflags
+
+ if not (optionflags & REPORTING_FLAGS):
+ # The option flags don't include any reporting flags,
+ # so add the default reporting flags
+ optionflags |= _unittest_reportflags
+
+ runner = DocTestRunner(optionflags=optionflags,
+ checker=self._dt_checker, verbose=False)
+
+ try:
+ runner.DIVIDER = "-"*70
+ failures, tries = runner.run(
+ test, out=new.write, clear_globs=False)
+ finally:
+ sys.stdout = old
+
+ if failures:
+ raise self.failureException(self.format_failure(new.getvalue()))
+
+ def format_failure(self, err):
+ test = self._dt_test
+ if test.lineno is None:
+ lineno = 'unknown line number'
+ else:
+ lineno = '%s' % test.lineno
+ lname = '.'.join(test.name.split('.')[-1:])
+ return ('Failed doctest test for %s\n'
+ ' File "%s", line %s, in %s\n\n%s'
+ % (test.name, test.filename, lineno, lname, err)
+ )
+
+ def debug(self):
+ r"""Run the test case without results and without catching exceptions
+
+ The unit test framework includes a debug method on test cases
+ and test suites to support post-mortem debugging. The test code
+ is run in such a way that errors are not caught. This way a
+ caller can catch the errors and initiate post-mortem debugging.
+
+ The DocTestCase provides a debug method that raises
+ UnexpectedException errors if there is an unexepcted
+ exception:
+
+ >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+ ... {}, 'foo', 'foo.py', 0)
+ >>> case = DocTestCase(test)
+ >>> try:
+ ... case.debug()
+ ... except UnexpectedException, failure:
+ ... pass
+
+ The UnexpectedException contains the test, the example, and
+ the original exception:
+
+ >>> failure.test is test
+ True
+
+ >>> failure.example.want
+ '42\n'
+
+ >>> exc_info = failure.exc_info
+ >>> raise exc_info[0], exc_info[1], exc_info[2]
+ Traceback (most recent call last):
+ ...
+ KeyError
+
+ If the output doesn't match, then a DocTestFailure is raised:
+
+ >>> test = DocTestParser().get_doctest('''
+ ... >>> x = 1
+ ... >>> x
+ ... 2
+ ... ''', {}, 'foo', 'foo.py', 0)
+ >>> case = DocTestCase(test)
+
+ >>> try:
+ ... case.debug()
+ ... except DocTestFailure, failure:
+ ... pass
+
+ DocTestFailure objects provide access to the test:
+
+ >>> failure.test is test
+ True
+
+ As well as to the example:
+
+ >>> failure.example.want
+ '2\n'
+
+ and the actual output:
+
+ >>> failure.got
+ '1\n'
+
+ """
+
+ self.setUp()
+ runner = DebugRunner(optionflags=self._dt_optionflags,
+ checker=self._dt_checker, verbose=False)
+ runner.run(self._dt_test)
+ self.tearDown()
+
+ def id(self):
+ return self._dt_test.name
+
+ def __repr__(self):
+ name = self._dt_test.name.split('.')
+ return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
+
+ __str__ = __repr__
+
+ def shortDescription(self):
+ return "Doctest: " + self._dt_test.name
+
+def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
+ **options):
+ """
+ Convert doctest tests for a module to a unittest test suite.
+
+ This converts each documentation string in a module that
+ contains doctest tests to a unittest test case. If any of the
+ tests in a doc string fail, then the test case fails. An exception
+ is raised showing the name of the file containing the test and a
+ (sometimes approximate) line number.
+
+ The `module` argument provides the module to be tested. The argument
+ can be either a module or a module name.
+
+ If no argument is given, the calling module is used.
+
+ A number of options may be provided as keyword arguments:
+
+ setUp
+ A set-up function. This is called before running the
+ tests in each file. The setUp function will be passed a DocTest
+ object. The setUp function can access the test globals as the
+ globs attribute of the test passed.
+
+ tearDown
+ A tear-down function. This is called after running the
+ tests in each file. The tearDown function will be passed a DocTest
+ object. The tearDown function can access the test globals as the
+ globs attribute of the test passed.
+
+ globs
+ A dictionary containing initial global variables for the tests.
+
+ optionflags
+ A set of doctest option flags expressed as an integer.
+ """
+
+ if test_finder is None:
+ test_finder = DocTestFinder()
+
+ module = _normalize_module(module)
+ tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
+ if globs is None:
+ globs = module.__dict__
+ if not tests:
+ # Why do we want to do this? Because it reveals a bug that might
+ # otherwise be hidden.
+ raise ValueError(module, "has no tests")
+
+ tests.sort()
+ suite = unittest.TestSuite()
+ for test in tests:
+ if len(test.examples) == 0:
+ continue
+ if not test.filename:
+ filename = module.__file__
+ if filename[-4:] in (".pyc", ".pyo"):
+ filename = filename[:-1]
+ test.filename = filename
+ suite.addTest(DocTestCase(test, **options))
+
+ return suite
+
+class DocFileCase(DocTestCase):
+
+ def id(self):
+ return '_'.join(self._dt_test.name.split('.'))
+
+ def __repr__(self):
+ return self._dt_test.filename
+ __str__ = __repr__
+
+ def format_failure(self, err):
+ return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
+ % (self._dt_test.name, self._dt_test.filename, err)
+ )
+
+def DocFileTest(path, module_relative=True, package=None,
+ globs=None, parser=DocTestParser(), **options):
+ if globs is None:
+ globs = {}
+
+ if package and not module_relative:
+ raise ValueError("Package may only be specified for module-"
+ "relative paths.")
+
+ # Relativize the path.
+ if module_relative:
+ package = _normalize_module(package)
+ path = _module_relative_path(package, path)
+
+ # Find the file and read it.
+ name = os.path.basename(path)
+ doc = open(path).read()
+
+ # Convert it to a test, and wrap it in a DocFileCase.
+ test = parser.get_doctest(doc, globs, name, path, 0)
+ return DocFileCase(test, **options)
+
+def DocFileSuite(*paths, **kw):
+ """A unittest suite for one or more doctest files.
+
+ The path to each doctest file is given as a string; the
+ interpretation of that string depends on the keyword argument
+ "module_relative".
+
+ A number of options may be provided as keyword arguments:
+
+ module_relative
+ If "module_relative" is True, then the given file paths are
+ interpreted as os-independent module-relative paths. By
+ default, these paths are relative to the calling module's
+ directory; but if the "package" argument is specified, then
+ they are relative to that package. To ensure os-independence,
+ "filename" should use "/" characters to separate path
+ segments, and may not be an absolute path (i.e., it may not
+ begin with "/").
+
+ If "module_relative" is False, then the given file paths are
+ interpreted as os-specific paths. These paths may be absolute
+ or relative (to the current working directory).
+
+ package
+ A Python package or the name of a Python package whose directory
+ should be used as the base directory for module relative paths.
+ If "package" is not specified, then the calling module's
+ directory is used as the base directory for module relative
+ filenames. It is an error to specify "package" if
+ "module_relative" is False.
+
+ setUp
+ A set-up function. This is called before running the
+ tests in each file. The setUp function will be passed a DocTest
+ object. The setUp function can access the test globals as the
+ globs attribute of the test passed.
+
+ tearDown
+ A tear-down function. This is called after running the
+ tests in each file. The tearDown function will be passed a DocTest
+ object. The tearDown function can access the test globals as the
+ globs attribute of the test passed.
+
+ globs
+ A dictionary containing initial global variables for the tests.
+
+ optionflags
+ A set of doctest option flags expressed as an integer.
+
+ parser
+ A DocTestParser (or subclass) that should be used to extract
+ tests from the files.
+ """
+ suite = unittest.TestSuite()
+
+ # We do this here so that _normalize_module is called at the right
+ # level. If it were called in DocFileTest, then this function
+ # would be the caller and we might guess the package incorrectly.
+ if kw.get('module_relative', True):
+ kw['package'] = _normalize_module(kw.get('package'))
+
+ for path in paths:
+ suite.addTest(DocFileTest(path, **kw))
+
+ return suite
+
+######################################################################
+## 9. Debugging Support
+######################################################################
+
+def script_from_examples(s):
+ r"""Extract script from text with examples.
+
+ Converts text with examples to a Python script. Example input is
+ converted to regular code. Example output and all other words
+ are converted to comments:
+
+ >>> text = '''
+ ... Here are examples of simple math.
+ ...
+ ... Python has super accurate integer addition
+ ...
+ ... >>> 2 + 2
+ ... 5
+ ...
+ ... And very friendly error messages:
+ ...
+ ... >>> 1/0
+ ... To Infinity
+ ... And
+ ... Beyond
+ ...
+ ... You can use logic if you want:
+ ...
+ ... >>> if 0:
+ ... ... blah
+ ... ... blah
+ ... ...
+ ...
+ ... Ho hum
+ ... '''
+
+ >>> print script_from_examples(text)
+ # Here are examples of simple math.
+ #
+ # Python has super accurate integer addition
+ #
+ 2 + 2
+ # Expected:
+ ## 5
+ #
+ # And very friendly error messages:
+ #
+ 1/0
+ # Expected:
+ ## To Infinity
+ ## And
+ ## Beyond
+ #
+ # You can use logic if you want:
+ #
+ if 0:
+ blah
+ blah
+ #
+ # Ho hum
+ """
+ output = []
+ for piece in DocTestParser().parse(s):
+ if isinstance(piece, Example):
+ # Add the example's source code (strip trailing NL)
+ output.append(piece.source[:-1])
+ # Add the expected output:
+ want = piece.want
+ if want:
+ output.append('# Expected:')
+ output += ['## '+l for l in want.split('\n')[:-1]]
+ else:
+ # Add non-example text.
+ output += [_comment_line(l)
+ for l in piece.split('\n')[:-1]]
+
+ # Trim junk on both ends.
+ while output and output[-1] == '#':
+ output.pop()
+ while output and output[0] == '#':
+ output.pop(0)
+ # Combine the output, and return it.
+ return '\n'.join(output)
+
+def testsource(module, name):
+ """Extract the test sources from a doctest docstring as a script.
+
+ Provide the module (or dotted name of the module) containing the
+ test to be debugged and the name (within the module) of the object
+ with the doc string with tests to be debugged.
+ """
+ module = _normalize_module(module)
+ tests = DocTestFinder().find(module)
+ test = [t for t in tests if t.name == name]
+ if not test:
+ raise ValueError(name, "not found in tests")
+ test = test[0]
+ testsrc = script_from_examples(test.docstring)
+ return testsrc
+
+def debug_src(src, pm=False, globs=None):
+ """Debug a single doctest docstring, in argument `src`'"""
+ testsrc = script_from_examples(src)
+ debug_script(testsrc, pm, globs)
+
+def debug_script(src, pm=False, globs=None):
+ "Debug a test script. `src` is the script, as a string."
+ import pdb
+
+ # Note that tempfile.NameTemporaryFile() cannot be used. As the
+ # docs say, a file so created cannot be opened by name a second time
+ # on modern Windows boxes, and execfile() needs to open it.
+ srcfilename = tempfile.mktemp(".py", "doctestdebug")
+ f = open(srcfilename, 'w')
+ f.write(src)
+ f.close()
+
+ try:
+ if globs:
+ globs = globs.copy()
+ else:
+ globs = {}
+
+ if pm:
+ try:
+ execfile(srcfilename, globs, globs)
+ except:
+ print sys.exc_info()[1]
+ pdb.post_mortem(sys.exc_info()[2])
+ else:
+ # Note that %r is vital here. '%s' instead can, e.g., cause
+ # backslashes to get treated as metacharacters on Windows.
+ pdb.run("execfile(%r)" % srcfilename, globs, globs)
+
+ finally:
+ os.remove(srcfilename)
+
+def debug(module, name, pm=False):
+ """Debug a single doctest docstring.
+
+ Provide the module (or dotted name of the module) containing the
+ test to be debugged and the name (within the module) of the object
+ with the docstring with tests to be debugged.
+ """
+ module = _normalize_module(module)
+ testsrc = testsource(module, name)
+ debug_script(testsrc, pm, module.__dict__)
+
+######################################################################
+## 10. Example Usage
+######################################################################
+class _TestClass:
+ """
+ A pointless class, for sanity-checking of docstring testing.
+
+ Methods:
+ square()
+ get()
+
+ >>> _TestClass(13).get() + _TestClass(-12).get()
+ 1
+ >>> hex(_TestClass(13).square().get())
+ '0xa9'
+ """
+
+ def __init__(self, val):
+ """val -> _TestClass object with associated value val.
+
+ >>> t = _TestClass(123)
+ >>> print t.get()
+ 123
+ """
+
+ self.val = val
+
+ def square(self):
+ """square() -> square TestClass's associated value
+
+ >>> _TestClass(13).square().get()
+ 169
+ """
+
+ self.val = self.val ** 2
+ return self
+
+ def get(self):
+ """get() -> return TestClass's associated value.
+
+ >>> x = _TestClass(-42)
+ >>> print x.get()
+ -42
+ """
+
+ return self.val
+
+__test__ = {"_TestClass": _TestClass,
+ "string": r"""
+ Example of a string object, searched as-is.
+ >>> x = 1; y = 2
+ >>> x + y, x * y
+ (3, 2)
+ """,
+
+ "bool-int equivalence": r"""
+ In 2.2, boolean expressions displayed
+ 0 or 1. By default, we still accept
+ them. This can be disabled by passing
+ DONT_ACCEPT_TRUE_FOR_1 to the new
+ optionflags argument.
+ >>> 4 == 4
+ 1
+ >>> 4 == 4
+ True
+ >>> 4 > 4
+ 0
+ >>> 4 > 4
+ False
+ """,
+
+ "blank lines": r"""
+ Blank lines can be marked with <BLANKLINE>:
+ >>> print 'foo\n\nbar\n'
+ foo
+ <BLANKLINE>
+ bar
+ <BLANKLINE>
+ """,
+
+ "ellipsis": r"""
+ If the ellipsis flag is used, then '...' can be used to
+ elide substrings in the desired output:
+ >>> print range(1000) #doctest: +ELLIPSIS
+ [0, 1, 2, ..., 999]
+ """,
+
+ "whitespace normalization": r"""
+ If the whitespace normalization flag is used, then
+ differences in whitespace are ignored.
+ >>> print range(30) #doctest: +NORMALIZE_WHITESPACE
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29]
+ """,
+ }
+
+def _test():
+ r = unittest.TextTestRunner()
+ r.run(DocTestSuite())
+
+if __name__ == "__main__":
+ _test()
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py
new file mode 100755
index 00000000..f4aaaa1c
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/server.py
@@ -0,0 +1,48 @@
+"""Basic http server for tests to simulate PyPI or custom indexes
+"""
+import urllib2
+import sys
+from threading import Thread
+from BaseHTTPServer import HTTPServer
+from SimpleHTTPServer import SimpleHTTPRequestHandler
+
+class IndexServer(HTTPServer):
+ """Basic single-threaded http server simulating a package index
+
+ You can use this server in unittest like this::
+ s = IndexServer()
+ s.start()
+ index_url = s.base_url() + 'mytestindex'
+ # do some test requests to the index
+ # The index files should be located in setuptools/tests/indexes
+ s.stop()
+ """
+ def __init__(self):
+ HTTPServer.__init__(self, ('', 0), SimpleHTTPRequestHandler)
+ self._run = True
+
+ def serve(self):
+ while True:
+ self.handle_request()
+ if not self._run: break
+
+ def start(self):
+ self.thread = Thread(target=self.serve)
+ self.thread.start()
+
+ def stop(self):
+ """self.shutdown is not supported on python < 2.6"""
+ self._run = False
+ try:
+ if sys.version > '2.6':
+ urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port,
+ None, 5)
+ else:
+ urllib2.urlopen('http://127.0.0.1:%s/' % self.server_port)
+ except urllib2.URLError:
+ pass
+ self.thread.join()
+
+ def base_url(self):
+ port = self.server_port
+ return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py
new file mode 100755
index 00000000..a520ced9
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_build_ext.py
@@ -0,0 +1,20 @@
+"""build_ext tests
+"""
+import os, shutil, tempfile, unittest
+from distutils.command.build_ext import build_ext as distutils_build_ext
+from setuptools.command.build_ext import build_ext
+from setuptools.dist import Distribution
+
+class TestBuildExtTest(unittest.TestCase):
+
+ def test_get_ext_filename(self):
+ # setuptools needs to give back the same
+ # result than distutils, even if the fullname
+ # is not in ext_map
+ dist = Distribution()
+ cmd = build_ext(dist)
+ cmd.ext_map['foo/bar'] = ''
+ res = cmd.get_ext_filename('foo')
+ wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
+ assert res == wanted
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py
new file mode 100755
index 00000000..a567dd5a
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_develop.py
@@ -0,0 +1,82 @@
+"""develop tests
+"""
+import sys
+import os, shutil, tempfile, unittest
+import tempfile
+import site
+from StringIO import StringIO
+
+from distutils.errors import DistutilsError
+from setuptools.command.develop import develop
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo')
+"""
+
+class TestDevelopTest(unittest.TestCase):
+
+ def setUp(self):
+ self.dir = tempfile.mkdtemp()
+ setup = os.path.join(self.dir, 'setup.py')
+ f = open(setup, 'w')
+ f.write(SETUP_PY)
+ f.close()
+ self.old_cwd = os.getcwd()
+ os.chdir(self.dir)
+ if sys.version >= "2.6":
+ self.old_base = site.USER_BASE
+ site.USER_BASE = tempfile.mkdtemp()
+ self.old_site = site.USER_SITE
+ site.USER_SITE = tempfile.mkdtemp()
+
+ def tearDown(self):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.dir)
+ if sys.version >= "2.6":
+ shutil.rmtree(site.USER_BASE)
+ shutil.rmtree(site.USER_SITE)
+ site.USER_BASE = self.old_base
+ site.USER_SITE = self.old_site
+
+ def test_develop(self):
+ if sys.version < "2.6":
+ return
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = develop(dist)
+ cmd.user = 1
+ cmd.ensure_finalized()
+ cmd.install_dir = site.USER_SITE
+ cmd.user = 1
+ old_stdout = sys.stdout
+ sys.stdout = StringIO()
+ try:
+ cmd.run()
+ finally:
+ sys.stdout = old_stdout
+
+ # let's see if we got our egg link at the right place
+ content = os.listdir(site.USER_SITE)
+ content.sort()
+ self.assertEquals(content, ['UNKNOWN.egg-link', 'easy-install.pth'])
+
+ def test_develop_with_setup_requires(self):
+
+ wanted = ("Could not find suitable distribution for "
+ "Requirement.parse('I-DONT-EXIST')")
+ old_dir = os.getcwd()
+ os.chdir(self.dir)
+ try:
+ try:
+ dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
+ except DistutilsError, e:
+ error = str(e)
+ if error == wanted:
+ pass
+ finally:
+ os.chdir(old_dir)
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py
new file mode 100755
index 00000000..85616605
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_easy_install.py
@@ -0,0 +1,243 @@
+"""Easy install Tests
+"""
+import sys
+import os, shutil, tempfile, unittest
+import site
+from StringIO import StringIO
+from setuptools.command.easy_install import easy_install, get_script_args, main
+from setuptools.command.easy_install import PthDistributions
+from setuptools.command import easy_install as easy_install_pkg
+from setuptools.dist import Distribution
+from pkg_resources import Distribution as PRDistribution
+
+try:
+ import multiprocessing
+ import logging
+ _LOG = logging.getLogger('test_easy_install')
+ logging.basicConfig(level=logging.INFO, stream=sys.stderr)
+ _MULTIPROC = True
+except ImportError:
+ _MULTIPROC = False
+ _LOG = None
+
+class FakeDist(object):
+ def get_entry_map(self, group):
+ if group != 'console_scripts':
+ return {}
+ return {'name': 'ep'}
+
+ def as_requirement(self):
+ return 'spec'
+
+WANTED = """\
+#!%s
+# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name'
+__requires__ = 'spec'
+import sys
+from pkg_resources import load_entry_point
+
+if __name__ == '__main__':
+ sys.exit(
+ load_entry_point('spec', 'console_scripts', 'name')()
+ )
+""" % sys.executable
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo')
+"""
+
+class TestEasyInstallTest(unittest.TestCase):
+
+ def test_install_site_py(self):
+ dist = Distribution()
+ cmd = easy_install(dist)
+ cmd.sitepy_installed = False
+ cmd.install_dir = tempfile.mkdtemp()
+ try:
+ cmd.install_site_py()
+ sitepy = os.path.join(cmd.install_dir, 'site.py')
+ self.assert_(os.path.exists(sitepy))
+ finally:
+ shutil.rmtree(cmd.install_dir)
+
+ def test_get_script_args(self):
+ dist = FakeDist()
+
+ old_platform = sys.platform
+ try:
+ name, script = get_script_args(dist).next()
+ finally:
+ sys.platform = old_platform
+
+ self.assertEquals(script, WANTED)
+
+ def test_no_setup_cfg(self):
+ # makes sure easy_install as a command (main)
+ # doesn't use a setup.cfg file that is located
+ # in the current working directory
+ dir = tempfile.mkdtemp()
+ setup_cfg = open(os.path.join(dir, 'setup.cfg'), 'w')
+ setup_cfg.write('[easy_install]\nfind_links = http://example.com')
+ setup_cfg.close()
+ setup_py = open(os.path.join(dir, 'setup.py'), 'w')
+ setup_py.write(SETUP_PY)
+ setup_py.close()
+
+ from setuptools.dist import Distribution
+
+ def _parse_command_line(self):
+ msg = 'Error: a local setup.cfg was used'
+ opts = self.command_options
+ if 'easy_install' in opts:
+ assert 'find_links' not in opts['easy_install'], msg
+ return self._old_parse_command_line
+
+ Distribution._old_parse_command_line = Distribution.parse_command_line
+ Distribution.parse_command_line = _parse_command_line
+
+ old_wd = os.getcwd()
+ try:
+ os.chdir(dir)
+ main([])
+ finally:
+ os.chdir(old_wd)
+ shutil.rmtree(dir)
+
+ def test_no_find_links(self):
+ # new option '--no-find-links', that blocks find-links added at
+ # the project level
+ dist = Distribution()
+ cmd = easy_install(dist)
+ cmd.check_pth_processing = lambda : True
+ cmd.no_find_links = True
+ cmd.find_links = ['link1', 'link2']
+ cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+ cmd.args = ['ok']
+ cmd.ensure_finalized()
+ self.assertEquals(cmd.package_index.scanned_urls, {})
+
+ # let's try without it (default behavior)
+ cmd = easy_install(dist)
+ cmd.check_pth_processing = lambda : True
+ cmd.find_links = ['link1', 'link2']
+ cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
+ cmd.args = ['ok']
+ cmd.ensure_finalized()
+ keys = cmd.package_index.scanned_urls.keys()
+ keys.sort()
+ self.assertEquals(keys, ['link1', 'link2'])
+
+
+class TestPTHFileWriter(unittest.TestCase):
+ def test_add_from_cwd_site_sets_dirty(self):
+ '''a pth file manager should set dirty
+ if a distribution is in site but also the cwd
+ '''
+ pth = PthDistributions('does-not_exist', [os.getcwd()])
+ self.assert_(not pth.dirty)
+ pth.add(PRDistribution(os.getcwd()))
+ self.assert_(pth.dirty)
+
+ def test_add_from_site_is_ignored(self):
+ pth = PthDistributions('does-not_exist', ['/test/location/does-not-have-to-exist'])
+ self.assert_(not pth.dirty)
+ pth.add(PRDistribution('/test/location/does-not-have-to-exist'))
+ self.assert_(not pth.dirty)
+
+
+class TestUserInstallTest(unittest.TestCase):
+
+ def setUp(self):
+ self.dir = tempfile.mkdtemp()
+ setup = os.path.join(self.dir, 'setup.py')
+ f = open(setup, 'w')
+ f.write(SETUP_PY)
+ f.close()
+ self.old_cwd = os.getcwd()
+ os.chdir(self.dir)
+ if sys.version >= "2.6":
+ self.old_has_site = easy_install_pkg.HAS_USER_SITE
+ self.old_file = easy_install_pkg.__file__
+ self.old_base = site.USER_BASE
+ site.USER_BASE = tempfile.mkdtemp()
+ self.old_site = site.USER_SITE
+ site.USER_SITE = tempfile.mkdtemp()
+ easy_install_pkg.__file__ = site.USER_SITE
+
+ def tearDown(self):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.dir)
+ if sys.version >= "2.6":
+ shutil.rmtree(site.USER_BASE)
+ shutil.rmtree(site.USER_SITE)
+ site.USER_BASE = self.old_base
+ site.USER_SITE = self.old_site
+ easy_install_pkg.HAS_USER_SITE = self.old_has_site
+ easy_install_pkg.__file__ = self.old_file
+
+ def test_user_install_implied(self):
+ easy_install_pkg.HAS_USER_SITE = True # disabled sometimes
+ #XXX: replace with something meaningfull
+ if sys.version < "2.6":
+ return #SKIP
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = easy_install(dist)
+ cmd.args = ['py']
+ cmd.ensure_finalized()
+ self.assertTrue(cmd.user, 'user should be implied')
+
+ def test_multiproc_atexit(self):
+ if not _MULTIPROC:
+ return
+ _LOG.info('this should not break')
+
+ def test_user_install_not_implied_without_usersite_enabled(self):
+ easy_install_pkg.HAS_USER_SITE = False # usually enabled
+ #XXX: replace with something meaningfull
+ if sys.version < "2.6":
+ return #SKIP
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = easy_install(dist)
+ cmd.args = ['py']
+ cmd.initialize_options()
+ self.assertFalse(cmd.user, 'NOT user should be implied')
+
+ def test_local_index(self):
+ # make sure the local index is used
+ # when easy_install looks for installed
+ # packages
+ new_location = tempfile.mkdtemp()
+ target = tempfile.mkdtemp()
+ egg_file = os.path.join(new_location, 'foo-1.0.egg-info')
+ f = open(egg_file, 'w')
+ try:
+ f.write('Name: foo\n')
+ except:
+ f.close()
+
+ sys.path.append(target)
+ old_ppath = os.environ.get('PYTHONPATH')
+ os.environ['PYTHONPATH'] = ':'.join(sys.path)
+ try:
+ dist = Distribution()
+ dist.script_name = 'setup.py'
+ cmd = easy_install(dist)
+ cmd.install_dir = target
+ cmd.args = ['foo']
+ cmd.ensure_finalized()
+ cmd.local_index.scan([new_location])
+ res = cmd.easy_install('foo')
+ self.assertEquals(res.location, new_location)
+ finally:
+ sys.path.remove(target)
+ shutil.rmtree(new_location)
+ shutil.rmtree(target)
+ if old_ppath is not None:
+ os.environ['PYTHONPATH'] = old_ppath
+ else:
+ del os.environ['PYTHONPATH']
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py
new file mode 100755
index 00000000..42cb8c1e
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_packageindex.py
@@ -0,0 +1,112 @@
+"""Package Index Tests
+"""
+# More would be better!
+import sys
+import os, shutil, tempfile, unittest, urllib2
+import pkg_resources
+import setuptools.package_index
+from server import IndexServer
+
+class TestPackageIndex(unittest.TestCase):
+
+ def test_bad_urls(self):
+ index = setuptools.package_index.PackageIndex()
+ url = 'http://127.0.0.1:0/nonesuch/test_package_index'
+ try:
+ v = index.open_url(url)
+ except Exception, v:
+ self.assert_(url in str(v))
+ else:
+ self.assert_(isinstance(v,urllib2.HTTPError))
+
+ # issue 16
+ # easy_install inquant.contentmirror.plone breaks because of a typo
+ # in its home URL
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+
+ url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
+ try:
+ v = index.open_url(url)
+ except Exception, v:
+ self.assert_(url in str(v))
+ else:
+ self.assert_(isinstance(v, urllib2.HTTPError))
+
+ def _urlopen(*args):
+ import httplib
+ raise httplib.BadStatusLine('line')
+
+ old_urlopen = urllib2.urlopen
+ urllib2.urlopen = _urlopen
+ url = 'http://example.com'
+ try:
+ try:
+ v = index.open_url(url)
+ except Exception, v:
+ self.assert_('line' in str(v))
+ else:
+ raise AssertionError('Should have raise here!')
+ finally:
+ urllib2.urlopen = old_urlopen
+
+ # issue 20
+ url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
+ try:
+ index.open_url(url)
+ except Exception, v:
+ self.assert_('nonnumeric port' in str(v))
+
+
+ # issue #160
+ if sys.version_info[0] == 2 and sys.version_info[1] == 7:
+ # this should not fail
+ url = 'http://example.com'
+ page = ('<a href="http://www.famfamfam.com]('
+ 'http://www.famfamfam.com/">')
+ index.process_index(url, page)
+
+
+ def test_url_ok(self):
+ index = setuptools.package_index.PackageIndex(
+ hosts=('www.example.com',)
+ )
+ url = 'file:///tmp/test_package_index'
+ self.assert_(index.url_ok(url, True))
+
+ def test_links_priority(self):
+ """
+ Download links from the pypi simple index should be used before
+ external download links.
+ http://bitbucket.org/tarek/distribute/issue/163/md5-validation-error
+
+ Usecase :
+ - someone uploads a package on pypi, a md5 is generated
+ - someone manually copies this link (with the md5 in the url) onto an
+ external page accessible from the package page.
+ - someone reuploads the package (with a different md5)
+ - while easy_installing, an MD5 error occurs because the external link
+ is used
+ -> Distribute should use the link from pypi, not the external one.
+ """
+ # start an index server
+ server = IndexServer()
+ server.start()
+ index_url = server.base_url() + 'test_links_priority/simple/'
+
+ # scan a test index
+ pi = setuptools.package_index.PackageIndex(index_url)
+ requirement = pkg_resources.Requirement.parse('foobar')
+ pi.find_packages(requirement)
+ server.stop()
+
+ # the distribution has been found
+ self.assert_('foobar' in pi)
+ # we have only one link, because links are compared without md5
+ self.assert_(len(pi['foobar'])==1)
+ # the link should be from the index
+ self.assert_('correct_md5' in pi['foobar'][0].location)
+
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py
new file mode 100755
index 00000000..883cfad1
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_resources.py
@@ -0,0 +1,565 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# NOTE: the shebang and encoding lines are for ScriptHeaderTests; do not remove
+from unittest import TestCase, makeSuite; from pkg_resources import *
+from setuptools.command.easy_install import get_script_header, is_sh
+import os, pkg_resources, sys, StringIO
+try: frozenset
+except NameError:
+ from sets import ImmutableSet as frozenset
+
+class Metadata(EmptyProvider):
+ """Mock object to return metadata as if from an on-disk distribution"""
+
+ def __init__(self,*pairs):
+ self.metadata = dict(pairs)
+
+ def has_metadata(self,name):
+ return name in self.metadata
+
+ def get_metadata(self,name):
+ return self.metadata[name]
+
+ def get_metadata_lines(self,name):
+ return yield_lines(self.get_metadata(name))
+
+class DistroTests(TestCase):
+
+ def testCollection(self):
+ # empty path should produce no distributions
+ ad = Environment([], platform=None, python=None)
+ self.assertEqual(list(ad), [])
+ self.assertEqual(ad['FooPkg'],[])
+ ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
+ ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
+ ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
+
+ # Name is in there now
+ self.assert_(ad['FooPkg'])
+ # But only 1 package
+ self.assertEqual(list(ad), ['foopkg'])
+
+ # Distributions sort by version
+ self.assertEqual(
+ [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
+ )
+ # Removing a distribution leaves sequence alone
+ ad.remove(ad['FooPkg'][1])
+ self.assertEqual(
+ [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
+ )
+ # And inserting adds them in order
+ ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
+ self.assertEqual(
+ [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
+ )
+
+ ws = WorkingSet([])
+ foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
+ foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
+ req, = parse_requirements("FooPkg>=1.3")
+
+ # Nominal case: no distros on path, should yield all applicable
+ self.assertEqual(ad.best_match(req,ws).version, '1.9')
+ # If a matching distro is already installed, should return only that
+ ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+ # If the first matching distro is unsuitable, it's a version conflict
+ ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
+ self.assertRaises(VersionConflict, ad.best_match, req, ws)
+
+ # If more than one match on the path, the first one takes precedence
+ ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
+ self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+ def checkFooPkg(self,d):
+ self.assertEqual(d.project_name, "FooPkg")
+ self.assertEqual(d.key, "foopkg")
+ self.assertEqual(d.version, "1.3-1")
+ self.assertEqual(d.py_version, "2.4")
+ self.assertEqual(d.platform, "win32")
+ self.assertEqual(d.parsed_version, parse_version("1.3-1"))
+
+ def testDistroBasics(self):
+ d = Distribution(
+ "/some/path",
+ project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
+ )
+ self.checkFooPkg(d)
+
+ d = Distribution("/some/path")
+ self.assertEqual(d.py_version, sys.version[:3])
+ self.assertEqual(d.platform, None)
+
+ def testDistroParse(self):
+ d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
+ self.checkFooPkg(d)
+ d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
+ self.checkFooPkg(d)
+
+ def testDistroMetadata(self):
+ d = Distribution(
+ "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
+ metadata = Metadata(
+ ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
+ )
+ )
+ self.checkFooPkg(d)
+
+
+ def distRequires(self, txt):
+ return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
+
+ def checkRequires(self, dist, txt, extras=()):
+ self.assertEqual(
+ list(dist.requires(extras)),
+ list(parse_requirements(txt))
+ )
+
+ def testDistroDependsSimple(self):
+ for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
+ self.checkRequires(self.distRequires(v), v)
+
+
+ def testResolve(self):
+ ad = Environment([]); ws = WorkingSet([])
+ # Resolving no requirements -> nothing to install
+ self.assertEqual( list(ws.resolve([],ad)), [] )
+ # Request something not in the collection -> DistributionNotFound
+ self.assertRaises(
+ DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
+ )
+ Foo = Distribution.from_filename(
+ "/foo_dir/Foo-1.2.egg",
+ metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
+ )
+ ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
+
+ # Request thing(s) that are available -> list to activate
+ for i in range(3):
+ targets = list(ws.resolve(parse_requirements("Foo"), ad))
+ self.assertEqual(targets, [Foo])
+ map(ws.add,targets)
+ self.assertRaises(VersionConflict, ws.resolve,
+ parse_requirements("Foo==0.9"), ad)
+ ws = WorkingSet([]) # reset
+
+ # Request an extra that causes an unresolved dependency for "Baz"
+ self.assertRaises(
+ DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
+ )
+ Baz = Distribution.from_filename(
+ "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
+ )
+ ad.add(Baz)
+
+ # Activation list now includes resolved dependency
+ self.assertEqual(
+ list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
+ )
+ # Requests for conflicting versions produce VersionConflict
+ self.assertRaises( VersionConflict,
+ ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
+ )
+
+ def testDistroDependsOptions(self):
+ d = self.distRequires("""
+ Twisted>=1.5
+ [docgen]
+ ZConfig>=2.0
+ docutils>=0.3
+ [fastcgi]
+ fcgiapp>=0.1""")
+ self.checkRequires(d,"Twisted>=1.5")
+ self.checkRequires(
+ d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
+ )
+ self.checkRequires(
+ d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
+ )
+ self.checkRequires(
+ d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
+ ["docgen","fastcgi"]
+ )
+ self.checkRequires(
+ d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
+ ["fastcgi", "docgen"]
+ )
+ self.assertRaises(UnknownExtra, d.requires, ["foo"])
+
+ def testSetuptoolsDistributeCombination(self):
+ # Ensure that installing a 0.7-series setuptools fails. PJE says that
+ # it will not co-exist.
+ ws = WorkingSet([])
+ d = Distribution(
+ "/some/path",
+ project_name="setuptools",
+ version="0.7a1")
+ self.assertRaises(ValueError, ws.add, d)
+ # A 0.6-series is no problem
+ d2 = Distribution(
+ "/some/path",
+ project_name="setuptools",
+ version="0.6c9")
+ ws.add(d2)
+
+ # a unexisting version needs to work
+ ws = WorkingSet([])
+ d3 = Distribution(
+ "/some/path",
+ project_name="setuptools")
+ ws.add(d3)
+
+
+class EntryPointTests(TestCase):
+
+ def assertfields(self, ep):
+ self.assertEqual(ep.name,"foo")
+ self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
+ self.assertEqual(ep.attrs, ("EntryPointTests",))
+ self.assertEqual(ep.extras, ("x",))
+ self.assert_(ep.load() is EntryPointTests)
+ self.assertEqual(
+ str(ep),
+ "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+ )
+
+ def setUp(self):
+ self.dist = Distribution.from_filename(
+ "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
+
+ def testBasics(self):
+ ep = EntryPoint(
+ "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
+ ["x"], self.dist
+ )
+ self.assertfields(ep)
+
+ def testParse(self):
+ s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+ ep = EntryPoint.parse(s, self.dist)
+ self.assertfields(ep)
+
+ ep = EntryPoint.parse("bar baz= spammity[PING]")
+ self.assertEqual(ep.name,"bar baz")
+ self.assertEqual(ep.module_name,"spammity")
+ self.assertEqual(ep.attrs, ())
+ self.assertEqual(ep.extras, ("ping",))
+
+ ep = EntryPoint.parse(" fizzly = wocka:foo")
+ self.assertEqual(ep.name,"fizzly")
+ self.assertEqual(ep.module_name,"wocka")
+ self.assertEqual(ep.attrs, ("foo",))
+ self.assertEqual(ep.extras, ())
+
+ def testRejects(self):
+ for ep in [
+ "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
+ ]:
+ try: EntryPoint.parse(ep)
+ except ValueError: pass
+ else: raise AssertionError("Should've been bad", ep)
+
+ def checkSubMap(self, m):
+ self.assertEqual(len(m), len(self.submap_expect))
+ for key, ep in self.submap_expect.iteritems():
+ self.assertEqual(repr(m.get(key)), repr(ep))
+
+ submap_expect = dict(
+ feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
+ feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
+ feature3=EntryPoint('feature3', 'this.module', extras=['something'])
+ )
+ submap_str = """
+ # define features for blah blah
+ feature1 = somemodule:somefunction
+ feature2 = another.module:SomeClass [extra1,extra2]
+ feature3 = this.module [something]
+ """
+
+ def testParseList(self):
+ self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
+ self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
+ self.assertRaises(ValueError, EntryPoint.parse_group, "x",
+ ["foo=baz", "foo=bar"])
+
+ def testParseMap(self):
+ m = EntryPoint.parse_map({'xyz':self.submap_str})
+ self.checkSubMap(m['xyz'])
+ self.assertEqual(m.keys(),['xyz'])
+ m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
+ self.checkSubMap(m['xyz'])
+ self.assertEqual(m.keys(),['xyz'])
+ self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
+ self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
+
+class RequirementsTests(TestCase):
+
+ def testBasics(self):
+ r = Requirement.parse("Twisted>=1.2")
+ self.assertEqual(str(r),"Twisted>=1.2")
+ self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
+ self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
+ self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
+ self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
+ self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
+ self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
+ self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
+
+ def testOrdering(self):
+ r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
+ r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
+ self.assertEqual(r1,r2)
+ self.assertEqual(str(r1),str(r2))
+ self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
+
+ def testBasicContains(self):
+ r = Requirement("Twisted", [('>=','1.2')], ())
+ foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
+ twist11 = Distribution.from_filename("Twisted-1.1.egg")
+ twist12 = Distribution.from_filename("Twisted-1.2.egg")
+ self.assert_(parse_version('1.2') in r)
+ self.assert_(parse_version('1.1') not in r)
+ self.assert_('1.2' in r)
+ self.assert_('1.1' not in r)
+ self.assert_(foo_dist not in r)
+ self.assert_(twist11 not in r)
+ self.assert_(twist12 in r)
+
+ def testAdvancedContains(self):
+ r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
+ for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
+ self.assert_(v in r, (v,r))
+ for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
+ self.assert_(v not in r, (v,r))
+
+
+ def testOptionsAndHashing(self):
+ r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
+ r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
+ r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
+ self.assertEqual(r1,r2)
+ self.assertEqual(r1,r3)
+ self.assertEqual(r1.extras, ("foo","bar"))
+ self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized
+ self.assertEqual(hash(r1), hash(r2))
+ self.assertEqual(
+ hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
+ frozenset(["foo","bar"])))
+ )
+
+ def testVersionEquality(self):
+ r1 = Requirement.parse("foo==0.3a2")
+ r2 = Requirement.parse("foo!=0.3a4")
+ d = Distribution.from_filename
+
+ self.assert_(d("foo-0.3a4.egg") not in r1)
+ self.assert_(d("foo-0.3a1.egg") not in r1)
+ self.assert_(d("foo-0.3a4.egg") not in r2)
+
+ self.assert_(d("foo-0.3a2.egg") in r1)
+ self.assert_(d("foo-0.3a2.egg") in r2)
+ self.assert_(d("foo-0.3a3.egg") in r2)
+ self.assert_(d("foo-0.3a5.egg") in r2)
+
+ def testDistributeSetuptoolsOverride(self):
+ # Plain setuptools or distribute mean we return distribute.
+ self.assertEqual(
+ Requirement.parse('setuptools').project_name, 'distribute')
+ self.assertEqual(
+ Requirement.parse('distribute').project_name, 'distribute')
+ # setuptools lower than 0.7 means distribute
+ self.assertEqual(
+ Requirement.parse('setuptools==0.6c9').project_name, 'distribute')
+ self.assertEqual(
+ Requirement.parse('setuptools==0.6c10').project_name, 'distribute')
+ self.assertEqual(
+ Requirement.parse('setuptools>=0.6').project_name, 'distribute')
+ self.assertEqual(
+ Requirement.parse('setuptools < 0.7').project_name, 'distribute')
+ # setuptools 0.7 and higher means setuptools.
+ self.assertEqual(
+ Requirement.parse('setuptools == 0.7').project_name, 'setuptools')
+ self.assertEqual(
+ Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools')
+ self.assertEqual(
+ Requirement.parse('setuptools >= 0.7').project_name, 'setuptools')
+
+
+
+
+
+
+
+
+
+
+
+class ParseTests(TestCase):
+
+ def testEmptyParse(self):
+ self.assertEqual(list(parse_requirements('')), [])
+
+ def testYielding(self):
+ for inp,out in [
+ ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
+ (['x\n\n','y'], ['x','y']),
+ ]:
+ self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
+
+ def testSplitting(self):
+ self.assertEqual(
+ list(
+ pkg_resources.split_sections("""
+ x
+ [Y]
+ z
+
+ a
+ [b ]
+ # foo
+ c
+ [ d]
+ [q]
+ v
+ """
+ )
+ ),
+ [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
+ )
+ self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
+
+ def testSafeName(self):
+ self.assertEqual(safe_name("adns-python"), "adns-python")
+ self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
+ self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils")
+ self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
+ self.assertNotEqual(safe_name("peak.web"), "peak-web")
+
+ def testSafeVersion(self):
+ self.assertEqual(safe_version("1.2-1"), "1.2-1")
+ self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha")
+ self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
+ self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
+ self.assertEqual(safe_version("peak.web"), "peak.web")
+
+ def testSimpleRequirements(self):
+ self.assertEqual(
+ list(parse_requirements('Twis-Ted>=1.2-1')),
+ [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
+ )
+ self.assertEqual(
+ list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
+ [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
+ )
+ self.assertEqual(
+ Requirement.parse("FooBar==1.99a3"),
+ Requirement("FooBar", [('==','1.99a3')], ())
+ )
+ self.assertRaises(ValueError,Requirement.parse,">=2.3")
+ self.assertRaises(ValueError,Requirement.parse,"x\\")
+ self.assertRaises(ValueError,Requirement.parse,"x==2 q")
+ self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
+ self.assertRaises(ValueError,Requirement.parse,"#")
+
+ def testVersionEquality(self):
+ def c(s1,s2):
+ p1, p2 = parse_version(s1),parse_version(s2)
+ self.assertEqual(p1,p2, (s1,s2,p1,p2))
+
+ c('1.2-rc1', '1.2rc1')
+ c('0.4', '0.4.0')
+ c('0.4.0.0', '0.4.0')
+ c('0.4.0-0', '0.4-0')
+ c('0pl1', '0.0pl1')
+ c('0pre1', '0.0c1')
+ c('0.0.0preview1', '0c1')
+ c('0.0c1', '0-rc1')
+ c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
+
+ def testVersionOrdering(self):
+ def c(s1,s2):
+ p1, p2 = parse_version(s1),parse_version(s2)
+ self.assert_(p1<p2, (s1,s2,p1,p2))
+
+ c('2.1','2.1.1')
+ c('2a1','2b0')
+ c('2a1','2.1')
+ c('2.3a1', '2.3')
+ c('2.1-1', '2.1-2')
+ c('2.1-1', '2.1.1')
+ c('2.1', '2.1pl4')
+ c('2.1a0-20040501', '2.1')
+ c('1.1', '02.1')
+ c('A56','B27')
+ c('3.2', '3.2.pl0')
+ c('3.2-1', '3.2pl1')
+ c('3.2pl1', '3.2pl1-1')
+ c('0.4', '4.0')
+ c('0.0.4', '0.4.0')
+ c('0pl1', '0.4pl1')
+ c('2.1.0-rc1','2.1.0')
+ c('2.1dev','2.1a0')
+
+ torture ="""
+ 0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
+ 0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
+ 0.77.2-1 0.77.1-1 0.77.0-1
+ """.split()
+
+ for p,v1 in enumerate(torture):
+ for v2 in torture[p+1:]:
+ c(v2,v1)
+
+
+
+
+
+
+
+
+class ScriptHeaderTests(TestCase):
+ non_ascii_exe = '/Users/José/bin/python'
+
+ def test_get_script_header(self):
+ if not sys.platform.startswith('java') or not is_sh(sys.executable):
+ # This test is for non-Jython platforms
+ self.assertEqual(get_script_header('#!/usr/local/bin/python'),
+ '#!%s\n' % os.path.normpath(sys.executable))
+ self.assertEqual(get_script_header('#!/usr/bin/python -x'),
+ '#!%s -x\n' % os.path.normpath(sys.executable))
+ self.assertEqual(get_script_header('#!/usr/bin/python',
+ executable=self.non_ascii_exe),
+ '#!%s -x\n' % self.non_ascii_exe)
+
+ def test_get_script_header_jython_workaround(self):
+ # This test doesn't work with Python 3 in some locales
+ if (sys.version_info >= (3,) and os.environ.get("LC_CTYPE")
+ in (None, "C", "POSIX")):
+ return
+ platform = sys.platform
+ sys.platform = 'java1.5.0_13'
+ stdout = sys.stdout
+ try:
+ # A mock sys.executable that uses a shebang line (this file)
+ exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py')
+ self.assertEqual(
+ get_script_header('#!/usr/local/bin/python', executable=exe),
+ '#!/usr/bin/env %s\n' % exe)
+
+ # Ensure we generate what is basically a broken shebang line
+ # when there's options, with a warning emitted
+ sys.stdout = sys.stderr = StringIO.StringIO()
+ self.assertEqual(get_script_header('#!/usr/bin/python -x',
+ executable=exe),
+ '#!%s -x\n' % exe)
+ self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
+ sys.stdout = sys.stderr = StringIO.StringIO()
+ self.assertEqual(get_script_header('#!/usr/bin/python',
+ executable=self.non_ascii_exe),
+ '#!%s -x\n' % self.non_ascii_exe)
+ self.assert_('Unable to adapt shebang line' in sys.stdout.getvalue())
+ finally:
+ sys.platform = platform
+ sys.stdout = stdout
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py
new file mode 100755
index 00000000..1609ee86
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_sandbox.py
@@ -0,0 +1,66 @@
+"""develop tests
+"""
+import sys
+import os
+import shutil
+import unittest
+import tempfile
+
+from setuptools.sandbox import DirectorySandbox, SandboxViolation
+
+def has_win32com():
+ """
+ Run this to determine if the local machine has win32com, and if it
+ does, include additional tests.
+ """
+ if not sys.platform.startswith('win32'):
+ return False
+ try:
+ mod = __import__('win32com')
+ except ImportError:
+ return False
+ return True
+
+class TestSandbox(unittest.TestCase):
+
+ def setUp(self):
+ self.dir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self.dir)
+
+ def test_devnull(self):
+ if sys.version < '2.4':
+ return
+ sandbox = DirectorySandbox(self.dir)
+ sandbox.run(self._file_writer(os.devnull))
+
+ def _file_writer(path):
+ def do_write():
+ f = open(path, 'w')
+ f.write('xxx')
+ f.close()
+ return do_write
+
+ _file_writer = staticmethod(_file_writer)
+
+ if has_win32com():
+ def test_win32com(self):
+ """
+ win32com should not be prevented from caching COM interfaces
+ in gen_py.
+ """
+ import win32com
+ gen_py = win32com.__gen_path__
+ target = os.path.join(gen_py, 'test_write')
+ sandbox = DirectorySandbox(self.dir)
+ try:
+ try:
+ sandbox.run(self._file_writer(target))
+ except SandboxViolation:
+ self.fail("Could not create gen_py file due to SandboxViolation")
+ finally:
+ if os.path.exists(target): os.remove(target)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py
new file mode 100755
index 00000000..8b2dc892
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/setuptools/tests/test_upload_docs.py
@@ -0,0 +1,65 @@
+"""build_ext tests
+"""
+import sys, os, shutil, tempfile, unittest, site, zipfile
+from setuptools.command.upload_docs import upload_docs
+from setuptools.dist import Distribution
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(name='foo')
+"""
+
+class TestUploadDocsTest(unittest.TestCase):
+ def setUp(self):
+ self.dir = tempfile.mkdtemp()
+ setup = os.path.join(self.dir, 'setup.py')
+ f = open(setup, 'w')
+ f.write(SETUP_PY)
+ f.close()
+ self.old_cwd = os.getcwd()
+ os.chdir(self.dir)
+
+ self.upload_dir = os.path.join(self.dir, 'build')
+ os.mkdir(self.upload_dir)
+
+ # A test document.
+ f = open(os.path.join(self.upload_dir, 'index.html'), 'w')
+ f.write("Hello world.")
+ f.close()
+
+ # An empty folder.
+ os.mkdir(os.path.join(self.upload_dir, 'empty'))
+
+ if sys.version >= "2.6":
+ self.old_base = site.USER_BASE
+ site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp()
+ self.old_site = site.USER_SITE
+ site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp()
+
+ def tearDown(self):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.dir)
+ if sys.version >= "2.6":
+ shutil.rmtree(site.USER_BASE)
+ shutil.rmtree(site.USER_SITE)
+ site.USER_BASE = self.old_base
+ site.USER_SITE = self.old_site
+
+ def test_create_zipfile(self):
+ # Test to make sure zipfile creation handles common cases.
+ # This explicitly includes a folder containing an empty folder.
+
+ dist = Distribution()
+
+ cmd = upload_docs(dist)
+ cmd.upload_dir = self.upload_dir
+ zip_file = cmd.create_zipfile()
+
+ assert zipfile.is_zipfile(zip_file)
+
+ zip_f = zipfile.ZipFile(zip_file) # woh...
+
+ assert zip_f.namelist() == ['index.html']
+
+
diff --git a/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py
new file mode 100755
index 00000000..80e084b2
--- /dev/null
+++ b/lib/python2.7/site-packages/distribute-0.6.14-py2.7.egg/site.py
@@ -0,0 +1,82 @@
+def __boot():
+ import sys, imp, os, os.path
+ PYTHONPATH = os.environ.get('PYTHONPATH')
+ if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
+ PYTHONPATH = []
+ else:
+ PYTHONPATH = PYTHONPATH.split(os.pathsep)
+
+ pic = getattr(sys,'path_importer_cache',{})
+ stdpath = sys.path[len(PYTHONPATH):]
+ mydir = os.path.dirname(__file__)
+ #print "searching",stdpath,sys.path
+
+ for item in stdpath:
+ if item==mydir or not item:
+ continue # skip if current dir. on Windows, or my own directory
+ importer = pic.get(item)
+ if importer is not None:
+ loader = importer.find_module('site')
+ if loader is not None:
+ # This should actually reload the current module
+ loader.load_module('site')
+ break
+ else:
+ try:
+ stream, path, descr = imp.find_module('site',[item])
+ except ImportError:
+ continue
+ if stream is None:
+ continue
+ try:
+ # This should actually reload the current module
+ imp.load_module('site',stream,path,descr)
+ finally:
+ stream.close()
+ break
+ else:
+ raise ImportError("Couldn't find the real 'site' module")
+
+ #print "loaded", __file__
+
+ known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
+
+ oldpos = getattr(sys,'__egginsert',0) # save old insertion position
+ sys.__egginsert = 0 # and reset the current one
+
+ for item in PYTHONPATH:
+ addsitedir(item)
+
+ sys.__egginsert += oldpos # restore effective old position
+
+ d,nd = makepath(stdpath[0])
+ insert_at = None
+ new_path = []
+
+ for item in sys.path:
+ p,np = makepath(item)
+
+ if np==nd and insert_at is None:
+ # We've hit the first 'system' path entry, so added entries go here
+ insert_at = len(new_path)
+
+ if np in known_paths or insert_at is None:
+ new_path.append(item)
+ else:
+ # new path after the insert point, back-insert it
+ new_path.insert(insert_at, item)
+ insert_at += 1
+
+ sys.path[:] = new_path
+
+if __name__=='site':
+ __boot()
+ del __boot
+
+
+
+
+
+
+
+
diff --git a/lib/python2.7/site-packages/easy-install.pth b/lib/python2.7/site-packages/easy-install.pth
new file mode 100644
index 00000000..492c156f
--- /dev/null
+++ b/lib/python2.7/site-packages/easy-install.pth
@@ -0,0 +1,4 @@
+import sys; sys.__plen = len(sys.path)
+./distribute-0.6.14-py2.7.egg
+./pip-0.8.1-py2.7.egg
+import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO
new file mode 100644
index 00000000..fb0a3680
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/PKG-INFO
@@ -0,0 +1,348 @@
+Metadata-Version: 1.0
+Name: pip
+Version: 0.8.1
+Summary: pip installs packages. Python packages. An easy_install replacement
+Home-page: http://pip.openplans.org
+Author: Ian Bicking
+Author-email: python-virtualenv@groups.google.com
+License: MIT
+Description: The main website for pip is `pip.openplans.org
+ <http://pip.openplans.org>`_. You can also install
+ the `in-development version <http://bitbucket.org/ianb/pip/get/tip.gz#egg=pip-dev>`_
+ of pip with ``easy_install pip==dev``.
+
+
+ Introduction
+ ------------
+
+ pip installs packages. Python packages.
+
+ If you use `virtualenv <http://virtualenv.openplans.org>`__ -- a tool
+ for installing libraries in a local and isolated manner -- you'll
+ automatically get a copy of pip. Free bonus!
+
+ Once you have pip, you can use it like this::
+
+ $ pip install SomePackage
+
+ SomePackage is some package you'll find on `PyPI
+ <http://pypi.python.org/pypi/>`_. This installs the package and all
+ its dependencies.
+
+ pip does other stuff too, with packages, but install is the biggest
+ one. You can ``pip uninstall`` too.
+
+ You can also install from a URL (that points to a tar or zip file),
+ install from some version control system (use URLs like
+ ``hg+http://domain/repo`` -- or prefix ``git+``, ``svn+`` etc). pip
+ knows a bunch of stuff about revisions and stuff, so if you need to do
+ things like install a very specific revision from a repository pip can
+ do that too.
+
+ If you've ever used ``python setup.py develop``, you can do something
+ like that with ``pip install -e ./`` -- this works with packages that
+ use ``distutils`` too (usually this only works with Setuptools
+ projects).
+
+ You can use ``pip install --upgrade SomePackage`` to upgrade to a
+ newer version, or ``pip install SomePackage==1.0.4`` to install a very
+ specific version.
+
+ Pip Compared To easy_install
+ ----------------------------
+
+ pip is a replacement for `easy_install
+ <http://peak.telecommunity.com/DevCenter/EasyInstall>`_. It uses mostly the
+ same techniques for finding packages, so packages that were made
+ easy_installable should be pip-installable as well.
+
+ pip is meant to improve on easy_install. Some of the improvements:
+
+ * All packages are downloaded before installation. Partially-completed
+ installation doesn't occur as a result.
+
+ * Care is taken to present useful output on the console.
+
+ * The reasons for actions are kept track of. For instance, if a package is
+ being installed, pip keeps track of why that package was required.
+
+ * Error messages should be useful.
+
+ * The code is relatively concise and cohesive, making it easier to use
+ programmatically.
+
+ * Packages don't have to be installed as egg archives, they can be installed
+ flat (while keeping the egg metadata).
+
+ * Native support for other version control systems (Git, Mercurial and Bazaar)
+
+ * Uninstallation of packages.
+
+ * Simple to define fixed sets of requirements and reliably reproduce a
+ set of packages.
+
+ pip doesn't do everything that easy_install does. Specifically:
+
+ * It cannot install from eggs. It only installs from source. (In the
+ future it would be good if it could install binaries from Windows ``.exe``
+ or ``.msi`` -- binary install on other platforms is not a priority.)
+
+ * It doesn't understand Setuptools extras (like ``package[test]``). This should
+ be added eventually.
+
+ * It is incompatible with some packages that extensively customize distutils
+ or setuptools in their ``setup.py`` files.
+
+ pip is complementary with `virtualenv
+ <http://pypi.python.org/pypi/virtualenv>`__, and it is encouraged that you use
+ virtualenv to isolate your installation.
+
+ Community
+ ---------
+
+ The homepage for pip is temporarily located `on PyPI
+ <http://pypi.python.org/pypi/pip>`_ -- a more proper homepage will
+ follow. Bugs can go on the `pip issue tracker
+ <http://bitbucket.org/ianb/pip/issues/>`_. Discussion should happen on the
+ `virtualenv email group
+ <http://groups.google.com/group/python-virtualenv?hl=en>`_.
+
+ Uninstall
+ ---------
+
+ pip is able to uninstall most installed packages with ``pip uninstall
+ package-name``.
+
+ Known exceptions include pure-distutils packages installed with
+ ``python setup.py install`` (such packages leave behind no metadata allowing
+ determination of what files were installed), and script wrappers installed
+ by develop-installs (``python setup.py develop``).
+
+ pip also performs an automatic uninstall of an old version of a package
+ before upgrading to a newer version, so outdated files (and egg-info data)
+ from conflicting versions aren't left hanging around to cause trouble. The
+ old version of the package is automatically restored if the new version
+ fails to download or install.
+
+ .. _`requirements file`:
+
+ Requirements Files
+ ------------------
+
+ When installing software, and Python packages in particular, it's common that
+ you get a lot of libraries installed. You just did ``easy_install MyPackage``
+ and you get a dozen packages. Each of these packages has its own version.
+
+ Maybe you ran that installation and it works. Great! Will it keep working?
+ Did you have to provide special options to get it to find everything? Did you
+ have to install a bunch of other optional pieces? Most of all, will you be able
+ to do it again? Requirements files give you a way to create an *environment*:
+ a *set* of packages that work together.
+
+ If you've ever tried to setup an application on a new system, or with slightly
+ updated pieces, and had it fail, pip requirements are for you. If you
+ haven't had this problem then you will eventually, so pip requirements are
+ for you too -- requirements make explicit, repeatable installation of packages.
+
+ So what are requirements files? They are very simple: lists of packages to
+ install. Instead of running something like ``pip MyApp`` and getting
+ whatever libraries come along, you can create a requirements file something like::
+
+ MyApp
+ Framework==0.9.4
+ Library>=0.2
+
+ Then, regardless of what MyApp lists in ``setup.py``, you'll get a
+ specific version of Framework (0.9.4) and at least the 0.2 version of
+ Library. (You might think you could list these specific versions in
+ MyApp's ``setup.py`` -- but if you do that you'll have to edit MyApp
+ if you want to try a new version of Framework, or release a new
+ version of MyApp if you determine that Library 0.3 doesn't work with
+ your application.) You can also add optional libraries and support
+ tools that MyApp doesn't strictly require, giving people a set of
+ recommended libraries.
+
+ You can also include "editable" packages -- packages that are checked out from
+ Subversion, Git, Mercurial and Bazaar. These are just like using the ``-e``
+ option to pip. They look like::
+
+ -e svn+http://myrepo/svn/MyApp#egg=MyApp
+
+ You have to start the URL with ``svn+`` (``git+``, ``hg+`` or ``bzr+``), and
+ you have to include ``#egg=Package`` so pip knows what to expect at that URL.
+ You can also include ``@rev`` in the URL, e.g., ``@275`` to check out
+ revision 275.
+
+ Requirement files are mostly *flat*. Maybe ``MyApp`` requires
+ ``Framework``, and ``Framework`` requires ``Library``. I encourage
+ you to still list all these in a single requirement file; it is the
+ nature of Python programs that there are implicit bindings *directly*
+ between MyApp and Library. For instance, Framework might expose one
+ of Library's objects, and so if Library is updated it might directly
+ break MyApp. If that happens you can update the requirements file to
+ force an earlier version of Library, and you can do that without
+ having to re-release MyApp at all.
+
+ Read the `requirements file format <http://pip.openplans.org/requirement-format.html>`_ to
+ learn about other features.
+
+ Freezing Requirements
+ ---------------------
+
+ So you have a working set of packages, and you want to be able to install them
+ elsewhere. `Requirements files`_ let you install exact versions, but it won't
+ tell you what all the exact versions are.
+
+ To create a new requirements file from a known working environment, use::
+
+ $ pip freeze > stable-req.txt
+
+ This will write a listing of *all* installed libraries to ``stable-req.txt``
+ with exact versions for every library. You may want to edit the file down after
+ generating (e.g., to eliminate unnecessary libraries), but it'll give you a
+ stable starting point for constructing your requirements file.
+
+ You can also give it an existing requirements file, and it will use that as a
+ sort of template for the new file. So if you do::
+
+ $ pip freeze -r devel-req.txt > stable-req.txt
+
+ it will keep the packages listed in ``devel-req.txt`` in order and preserve
+ comments.
+
+ Bundles
+ -------
+
+ Another way to distribute a set of libraries is a bundle format (specific to
+ pip). This format is not stable at this time (there simply hasn't been
+ any feedback, nor a great deal of thought). A bundle file contains all the
+ source for your package, and you can have pip install them all together.
+ Once you have the bundle file further network access won't be necessary. To
+ build a bundle file, do::
+
+ $ pip bundle MyApp.pybundle MyApp
+
+ (Using a `requirements file`_ would be wise.) Then someone else can get the
+ file ``MyApp.pybundle`` and run::
+
+ $ pip install MyApp.pybundle
+
+ This is *not* a binary format. This only packages source. If you have binary
+ packages, then the person who installs the files will have to have a compiler,
+ any necessary headers installed, etc. Binary packages are hard, this is
+ relatively easy.
+
+ Using pip with virtualenv
+ -------------------------
+
+ pip is most nutritious when used with `virtualenv
+ <http://pypi.python.org/pypi/virtualenv>`__. One of the reasons pip
+ doesn't install "multi-version" eggs is that virtualenv removes much of the need
+ for it. Because pip is installed by virtualenv, just use
+ ``path/to/my/environment/bin/pip`` to install things into that
+ specific environment.
+
+ To tell pip to only run if there is a virtualenv currently activated,
+ and to bail if not, use::
+
+ export PIP_REQUIRE_VIRTUALENV=true
+
+ To tell pip to automatically use the currently active virtualenv::
+
+ export PIP_RESPECT_VIRTUALENV=true
+
+ Providing an environment with ``-E`` will be ignored.
+
+ Using pip with virtualenvwrapper
+ ---------------------------------
+
+ If you are using `virtualenvwrapper
+ <http://www.doughellmann.com/projects/virtualenvwrapper/>`_, you might
+ want pip to automatically create its virtualenvs in your
+ ``$WORKON_HOME``.
+
+ You can tell pip to do so by defining ``PIP_VIRTUALENV_BASE`` in your
+ environment and setting it to the same value as that of
+ ``$WORKON_HOME``.
+
+ Do so by adding the line::
+
+ export PIP_VIRTUALENV_BASE=$WORKON_HOME
+
+ in your .bashrc under the line starting with ``export WORKON_HOME``.
+
+ Using pip with buildout
+ -----------------------
+
+ If you are using `zc.buildout
+ <http://pypi.python.org/pypi/zc.buildout>`_ you should look at
+ `gp.recipe.pip <http://pypi.python.org/pypi/gp.recipe.pip>`_ as an
+ option to use pip and virtualenv in your buildouts.
+
+ Command line completion
+ -----------------------
+
+ pip comes with support for command line completion in bash and zsh and
+ allows you tab complete commands and options. To enable it you simply
+ need copy the required shell script to the your shell startup file
+ (e.g. ``.profile`` or ``.zprofile``) by running the special ``completion``
+ command, e.g. for bash::
+
+ $ pip completion --bash >> ~/.profile
+
+ And for zsh::
+
+ $ pip completion --zsh >> ~/.zprofile
+
+ Alternatively, you can use the result of the ``completion`` command
+ directly with the eval function of you shell, e.g. by adding::
+
+ eval "`pip completion --bash`"
+
+ to your startup file.
+
+ Searching for packages
+ ----------------------
+
+ pip can search the `Python Package Index <http://pypi.python.org/pypi>`_ (PyPI)
+ for packages using the ``pip search`` command. To search, run::
+
+ $ pip search "query"
+
+ The query will be used to search the names and summaries of all packages
+ indexed.
+
+ pip searches http://pypi.python.org/pypi by default but alternative indexes
+ can be searched by using the ``--index`` flag.
+
+ Mirror support
+ --------------
+
+ The `PyPI mirroring infrastructure <http://pypi.python.org/mirrors>`_ as
+ described in `PEP 381 <http://www.python.org/dev/peps/pep-0381/>`_ can be
+ used by passing the ``--use-mirrors`` option to the install command.
+ Alternatively, you can use the other ways to configure pip, e.g.::
+
+ $ export PIP_USE_MIRRORS=true
+
+ If enabled, pip will automatically query the DNS entry of the mirror index URL
+ to find the list of mirrors to use. In case you want to override this list,
+ please use the ``--mirrors`` option of the install command, or add to your pip
+ configuration file::
+
+ [install]
+ use-mirrors = true
+ mirrors =
+ http://d.pypi.python.org
+ http://b.pypi.python.org
+
+Keywords: easy_install distutils setuptools egg virtualenv
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt
new file mode 100644
index 00000000..3a068547
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/SOURCES.txt
@@ -0,0 +1,57 @@
+MANIFEST.in
+setup.cfg
+setup.py
+docs/branches.txt
+docs/ci-server-step-by-step.txt
+docs/configuration.txt
+docs/how-to-contribute.txt
+docs/index.txt
+docs/license.txt
+docs/news.txt
+docs/requirement-format.txt
+docs/running-tests.txt
+docs/_build/branches.html
+docs/_build/ci-server-step-by-step.html
+docs/_build/configuration.html
+docs/_build/how-to-contribute.html
+docs/_build/index.html
+docs/_build/license.html
+docs/_build/news.html
+docs/_build/requirement-format.html
+docs/_build/running-tests.html
+docs/_build/search.html
+pip/__init__.py
+pip/_pkgutil.py
+pip/backwardcompat.py
+pip/basecommand.py
+pip/baseparser.py
+pip/download.py
+pip/exceptions.py
+pip/index.py
+pip/locations.py
+pip/log.py
+pip/req.py
+pip/runner.py
+pip/util.py
+pip/venv.py
+pip.egg-info/PKG-INFO
+pip.egg-info/SOURCES.txt
+pip.egg-info/dependency_links.txt
+pip.egg-info/entry_points.txt
+pip.egg-info/not-zip-safe
+pip.egg-info/top_level.txt
+pip/commands/__init__.py
+pip/commands/bundle.py
+pip/commands/completion.py
+pip/commands/freeze.py
+pip/commands/help.py
+pip/commands/install.py
+pip/commands/search.py
+pip/commands/uninstall.py
+pip/commands/unzip.py
+pip/commands/zip.py
+pip/vcs/__init__.py
+pip/vcs/bazaar.py
+pip/vcs/git.py
+pip/vcs/mercurial.py
+pip/vcs/subversion.py \ No newline at end of file
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt
new file mode 100644
index 00000000..5f7b7cf9
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+pip = pip:main
+pip-2.7 = pip:main
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt
new file mode 100644
index 00000000..a1b589e3
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/EGG-INFO/top_level.txt
@@ -0,0 +1 @@
+pip
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py
new file mode 100755
index 00000000..c5de5c9a
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/__init__.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+import os
+import optparse
+
+import subprocess
+import sys
+import re
+import difflib
+
+from pip.basecommand import command_dict, load_command, load_all_commands, command_names
+from pip.baseparser import parser
+from pip.exceptions import InstallationError
+from pip.log import logger
+from pip.util import get_installed_distributions
+from pip.backwardcompat import walk_packages
+
+
+def autocomplete():
+ """Command and option completion for the main option parser (and options)
+ and its subcommands (and options).
+
+ Enable by sourcing one of the completion shell scripts (bash or zsh).
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'PIP_AUTO_COMPLETE' not in os.environ:
+ return
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+ try:
+ current = cwords[cword-1]
+ except IndexError:
+ current = ''
+ load_all_commands()
+ subcommands = [cmd for cmd, cls in command_dict.items() if not cls.hidden]
+ options = []
+ # subcommand
+ try:
+ subcommand_name = [w for w in cwords if w in subcommands][0]
+ except IndexError:
+ subcommand_name = None
+ # subcommand options
+ if subcommand_name:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == 'help':
+ sys.exit(1)
+ # special case: list locally installed dists for uninstall command
+ if subcommand_name == 'uninstall' and not current.startswith('-'):
+ installed = []
+ lc = current.lower()
+ for dist in get_installed_distributions(local_only=True):
+ if dist.key.startswith(lc) and dist.key not in cwords[1:]:
+ installed.append(dist.key)
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print dist
+ sys.exit(1)
+ subcommand = command_dict.get(subcommand_name)
+ options += [(opt.get_opt_string(), opt.nargs)
+ for opt in subcommand.parser.option_list
+ if opt.help != optparse.SUPPRESS_HELP]
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
+ options = filter(lambda (x, v): x not in prev_opts, options)
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1]:
+ opt_label += '='
+ print opt_label
+ else:
+ # show options of main parser only when necessary
+ if current.startswith('-') or current.startswith('--'):
+ subcommands += [opt.get_opt_string()
+ for opt in parser.option_list
+ if opt.help != optparse.SUPPRESS_HELP]
+ print ' '.join(filter(lambda x: x.startswith(current), subcommands))
+ sys.exit(1)
+
+
+def version_control():
+ # Import all the version control support modules:
+ from pip import vcs
+ for importer, modname, ispkg in \
+ walk_packages(path=vcs.__path__, prefix=vcs.__name__+'.'):
+ __import__(modname)
+
+
+def main(initial_args=None):
+ if initial_args is None:
+ initial_args = sys.argv[1:]
+ autocomplete()
+ version_control()
+ options, args = parser.parse_args(initial_args)
+ if options.help and not args:
+ args = ['help']
+ if not args:
+ parser.error('You must give a command (use "pip help" to see a list of commands)')
+ command = args[0].lower()
+ load_command(command)
+ if command not in command_dict:
+ close_commands = difflib.get_close_matches(command, command_names())
+ if close_commands:
+ guess = close_commands[0]
+ if args[1:]:
+ guess = "%s %s" % (guess, " ".join(args[1:]))
+ else:
+ guess = 'install %s' % command
+ error_dict = {'arg': command, 'guess': guess,
+ 'script': os.path.basename(sys.argv[0])}
+ parser.error('No command by the name %(script)s %(arg)s\n '
+ '(maybe you meant "%(script)s %(guess)s")' % error_dict)
+ command = command_dict[command]
+ return command.main(initial_args, args[1:], options)
+
+
+############################################################
+## Writing freeze files
+
+
+class FrozenRequirement(object):
+
+ def __init__(self, name, req, editable, comments=()):
+ self.name = name
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ _rev_re = re.compile(r'-r(\d+)$')
+ _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
+
+ @classmethod
+ def from_dist(cls, dist, dependency_links, find_tags=False):
+ location = os.path.normcase(os.path.abspath(dist.location))
+ comments = []
+ from pip.vcs import vcs, get_src_requirement
+ if vcs.get_backend_name(location):
+ editable = True
+ req = get_src_requirement(dist, location, find_tags)
+ if req is None:
+ logger.warn('Could not determine repository location of %s' % location)
+ comments.append('## !! Could not determine repository location')
+ req = dist.as_requirement()
+ editable = False
+ else:
+ editable = False
+ req = dist.as_requirement()
+ specs = req.specs
+ assert len(specs) == 1 and specs[0][0] == '=='
+ version = specs[0][1]
+ ver_match = cls._rev_re.search(version)
+ date_match = cls._date_re.search(version)
+ if ver_match or date_match:
+ svn_backend = vcs.get_backend('svn')
+ if svn_backend:
+ svn_location = svn_backend(
+ ).get_location(dist, dependency_links)
+ if not svn_location:
+ logger.warn(
+ 'Warning: cannot find svn location for %s' % req)
+ comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
+ else:
+ comments.append('# Installing as editable to satisfy requirement %s:' % req)
+ if ver_match:
+ rev = ver_match.group(1)
+ else:
+ rev = '{%s}' % date_match.group(1)
+ editable = True
+ req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
+ return cls(dist.project_name, req, editable, comments)
+
+ @staticmethod
+ def egg_name(dist):
+ name = dist.egg_name()
+ match = re.search(r'-py\d\.\d$', name)
+ if match:
+ name = name[:match.start()]
+ return name
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments)+[str(req)])+'\n'
+
+############################################################
+## Requirement files
+
+
+def call_subprocess(cmd, show_stdout=True,
+ filter_stdout=None, cwd=None,
+ raise_on_returncode=True,
+ command_level=logger.DEBUG, command_desc=None,
+ extra_environ=None):
+ if command_desc is None:
+ cmd_parts = []
+ for part in cmd:
+ if ' ' in part or '\n' in part or '"' in part or "'" in part:
+ part = '"%s"' % part.replace('"', '\\"')
+ cmd_parts.append(part)
+ command_desc = ' '.join(cmd_parts)
+ if show_stdout:
+ stdout = None
+ else:
+ stdout = subprocess.PIPE
+ logger.log(command_level, "Running command %s" % command_desc)
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
+ cwd=cwd, env=env)
+ except Exception, e:
+ logger.fatal(
+ "Error %s while executing command %s" % (e, command_desc))
+ raise
+ all_output = []
+ if stdout is not None:
+ stdout = proc.stdout
+ while 1:
+ line = stdout.readline()
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line + '\n')
+ if filter_stdout:
+ level = filter_stdout(line)
+ if isinstance(level, tuple):
+ level, line = level
+ logger.log(level, line)
+ if not logger.stdout_level_matches(level):
+ logger.show_progress()
+ else:
+ logger.info(line)
+ else:
+ returned_stdout, returned_stderr = proc.communicate()
+ all_output = [returned_stdout or '']
+ proc.wait()
+ if proc.returncode:
+ if raise_on_returncode:
+ if all_output:
+ logger.notify('Complete output from command %s:' % command_desc)
+ logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+ raise InstallationError(
+ "Command %s failed with error code %s"
+ % (command_desc, proc.returncode))
+ else:
+ logger.warn(
+ "Command %s had error code %s"
+ % (command_desc, proc.returncode))
+ if stdout is not None:
+ return ''.join(all_output)
+
+
+if __name__ == '__main__':
+ exit = main()
+ if exit:
+ sys.exit(exit)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py
new file mode 100755
index 00000000..f8fb8aa6
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/_pkgutil.py
@@ -0,0 +1,589 @@
+"""Utilities to support packages."""
+
+# NOTE: This module must remain compatible with Python 2.3, as it is shared
+# by setuptools for distribution with Python 2.3 and up.
+
+import os
+import sys
+import imp
+import os.path
+from types import ModuleType
+
+__all__ = [
+ 'get_importer', 'iter_importers', 'get_loader', 'find_loader',
+ 'walk_packages', 'iter_modules',
+ 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
+]
+
+
+def read_code(stream):
+ # This helper is needed in order for the PEP 302 emulation to
+ # correctly handle compiled files
+ import marshal
+
+ magic = stream.read(4)
+ if magic != imp.get_magic():
+ return None
+
+ stream.read(4) # Skip timestamp
+ return marshal.load(stream)
+
+
+def simplegeneric(func):
+ """Make a trivial single-dispatch generic function"""
+ registry = {}
+
+ def wrapper(*args, **kw):
+ ob = args[0]
+ try:
+ cls = ob.__class__
+ except AttributeError:
+ cls = type(ob)
+ try:
+ mro = cls.__mro__
+ except AttributeError:
+ try:
+
+ class cls(cls, object):
+ pass
+
+ mro = cls.__mro__[1:]
+ except TypeError:
+ mro = object, # must be an ExtensionClass or some such :(
+ for t in mro:
+ if t in registry:
+ return registry[t](*args, **kw)
+ else:
+ return func(*args, **kw)
+ try:
+ wrapper.__name__ = func.__name__
+ except (TypeError, AttributeError):
+ pass # Python 2.3 doesn't allow functions to be renamed
+
+ def register(typ, func=None):
+ if func is None:
+ return lambda f: register(typ, f)
+ registry[typ] = func
+ return func
+
+ wrapper.__dict__ = func.__dict__
+ wrapper.__doc__ = func.__doc__
+ wrapper.register = register
+ return wrapper
+
+
+def walk_packages(path=None, prefix='', onerror=None):
+ """Yields (module_loader, name, ispkg) for all modules recursively
+ on path, or, if path is None, all accessible modules.
+
+ 'path' should be either None or a list of paths to look for
+ modules in.
+
+ 'prefix' is a string to output on the front of every module name
+ on output.
+
+ Note that this function must import all *packages* (NOT all
+ modules!) on the given path, in order to access the __path__
+ attribute to find submodules.
+
+ 'onerror' is a function which gets called with one argument (the
+ name of the package which was being imported) if any exception
+ occurs while trying to import a package. If no onerror function is
+ supplied, ImportErrors are caught and ignored, while all other
+ exceptions are propagated, terminating the search.
+
+ Examples:
+
+ # list all modules python can access
+ walk_packages()
+
+ # list all submodules of ctypes
+ walk_packages(ctypes.__path__, ctypes.__name__+'.')
+ """
+
+ def seen(p, m={}):
+ if p in m:
+ return True
+ m[p] = True
+
+ for importer, name, ispkg in iter_modules(path, prefix):
+ yield importer, name, ispkg
+
+ if ispkg:
+ try:
+ __import__(name)
+ except ImportError:
+ if onerror is not None:
+ onerror(name)
+ except Exception:
+ if onerror is not None:
+ onerror(name)
+ else:
+ raise
+ else:
+ path = getattr(sys.modules[name], '__path__', None) or []
+
+ # don't traverse path items we've seen before
+ path = [p for p in path if not seen(p)]
+
+ for item in walk_packages(path, name+'.', onerror):
+ yield item
+
+
+def iter_modules(path=None, prefix=''):
+ """Yields (module_loader, name, ispkg) for all submodules on path,
+ or, if path is None, all top-level modules on sys.path.
+
+ 'path' should be either None or a list of paths to look for
+ modules in.
+
+ 'prefix' is a string to output on the front of every module name
+ on output.
+ """
+
+ if path is None:
+ importers = iter_importers()
+ else:
+ importers = map(get_importer, path)
+
+ yielded = {}
+ for i in importers:
+ for name, ispkg in iter_importer_modules(i, prefix):
+ if name not in yielded:
+ yielded[name] = 1
+ yield i, name, ispkg
+
+
+#@simplegeneric
+def iter_importer_modules(importer, prefix=''):
+ if not hasattr(importer, 'iter_modules'):
+ return []
+ return importer.iter_modules(prefix)
+
+iter_importer_modules = simplegeneric(iter_importer_modules)
+
+
+class ImpImporter:
+ """PEP 302 Importer that wraps Python's "classic" import algorithm
+
+ ImpImporter(dirname) produces a PEP 302 importer that searches that
+ directory. ImpImporter(None) produces a PEP 302 importer that searches
+ the current sys.path, plus any modules that are frozen or built-in.
+
+ Note that ImpImporter does not currently support being used by placement
+ on sys.meta_path.
+ """
+
+ def __init__(self, path=None):
+ self.path = path
+
+ def find_module(self, fullname, path=None):
+ # Note: we ignore 'path' argument since it is only used via meta_path
+ subname = fullname.split(".")[-1]
+ if subname != fullname and self.path is None:
+ return None
+ if self.path is None:
+ path = None
+ else:
+ path = [os.path.realpath(self.path)]
+ try:
+ file, filename, etc = imp.find_module(subname, path)
+ except ImportError:
+ return None
+ return ImpLoader(fullname, file, filename, etc)
+
+ def iter_modules(self, prefix=''):
+ if self.path is None or not os.path.isdir(self.path):
+ return
+
+ yielded = {}
+ import inspect
+
+ filenames = os.listdir(self.path)
+ filenames.sort() # handle packages before same-named modules
+
+ for fn in filenames:
+ modname = inspect.getmodulename(fn)
+ if modname=='__init__' or modname in yielded:
+ continue
+
+ path = os.path.join(self.path, fn)
+ ispkg = False
+
+ if not modname and os.path.isdir(path) and '.' not in fn:
+ modname = fn
+ for fn in os.listdir(path):
+ subname = inspect.getmodulename(fn)
+ if subname=='__init__':
+ ispkg = True
+ break
+ else:
+ continue # not a package
+
+ if modname and '.' not in modname:
+ yielded[modname] = 1
+ yield prefix + modname, ispkg
+
+
+class ImpLoader:
+ """PEP 302 Loader that wraps Python's "classic" import algorithm
+ """
+ code = source = None
+
+ def __init__(self, fullname, file, filename, etc):
+ self.file = file
+ self.filename = filename
+ self.fullname = fullname
+ self.etc = etc
+
+ def load_module(self, fullname):
+ self._reopen()
+ try:
+ mod = imp.load_module(fullname, self.file, self.filename, self.etc)
+ finally:
+ if self.file:
+ self.file.close()
+ # Note: we don't set __loader__ because we want the module to look
+ # normal; i.e. this is just a wrapper for standard import machinery
+ return mod
+
+ def get_data(self, pathname):
+ return open(pathname, "rb").read()
+
+ def _reopen(self):
+ if self.file and self.file.closed:
+ mod_type = self.etc[2]
+ if mod_type==imp.PY_SOURCE:
+ self.file = open(self.filename, 'rU')
+ elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
+ self.file = open(self.filename, 'rb')
+
+ def _fix_name(self, fullname):
+ if fullname is None:
+ fullname = self.fullname
+ elif fullname != self.fullname:
+ raise ImportError("Loader for module %s cannot handle "
+ "module %s" % (self.fullname, fullname))
+ return fullname
+
+ def is_package(self, fullname):
+ fullname = self._fix_name(fullname)
+ return self.etc[2]==imp.PKG_DIRECTORY
+
+ def get_code(self, fullname=None):
+ fullname = self._fix_name(fullname)
+ if self.code is None:
+ mod_type = self.etc[2]
+ if mod_type==imp.PY_SOURCE:
+ source = self.get_source(fullname)
+ self.code = compile(source, self.filename, 'exec')
+ elif mod_type==imp.PY_COMPILED:
+ self._reopen()
+ try:
+ self.code = read_code(self.file)
+ finally:
+ self.file.close()
+ elif mod_type==imp.PKG_DIRECTORY:
+ self.code = self._get_delegate().get_code()
+ return self.code
+
+ def get_source(self, fullname=None):
+ fullname = self._fix_name(fullname)
+ if self.source is None:
+ mod_type = self.etc[2]
+ if mod_type==imp.PY_SOURCE:
+ self._reopen()
+ try:
+ self.source = self.file.read()
+ finally:
+ self.file.close()
+ elif mod_type==imp.PY_COMPILED:
+ if os.path.exists(self.filename[:-1]):
+ f = open(self.filename[:-1], 'rU')
+ self.source = f.read()
+ f.close()
+ elif mod_type==imp.PKG_DIRECTORY:
+ self.source = self._get_delegate().get_source()
+ return self.source
+
+ def _get_delegate(self):
+ return ImpImporter(self.filename).find_module('__init__')
+
+ def get_filename(self, fullname=None):
+ fullname = self._fix_name(fullname)
+ mod_type = self.etc[2]
+ if self.etc[2]==imp.PKG_DIRECTORY:
+ return self._get_delegate().get_filename()
+ elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
+ return self.filename
+ return None
+
+
+try:
+ import zipimport
+ from zipimport import zipimporter
+
+ def iter_zipimport_modules(importer, prefix=''):
+ dirlist = zipimport._zip_directory_cache[importer.archive].keys()
+ dirlist.sort()
+ _prefix = importer.prefix
+ plen = len(_prefix)
+ yielded = {}
+ import inspect
+ for fn in dirlist:
+ if not fn.startswith(_prefix):
+ continue
+
+ fn = fn[plen:].split(os.sep)
+
+ if len(fn)==2 and fn[1].startswith('__init__.py'):
+ if fn[0] not in yielded:
+ yielded[fn[0]] = 1
+ yield fn[0], True
+
+ if len(fn)!=1:
+ continue
+
+ modname = inspect.getmodulename(fn[0])
+ if modname=='__init__':
+ continue
+
+ if modname and '.' not in modname and modname not in yielded:
+ yielded[modname] = 1
+ yield prefix + modname, False
+
+ iter_importer_modules.register(zipimporter, iter_zipimport_modules)
+
+except ImportError:
+ pass
+
+
+def get_importer(path_item):
+ """Retrieve a PEP 302 importer for the given path item
+
+ The returned importer is cached in sys.path_importer_cache
+ if it was newly created by a path hook.
+
+ If there is no importer, a wrapper around the basic import
+ machinery is returned. This wrapper is never inserted into
+ the importer cache (None is inserted instead).
+
+ The cache (or part of it) can be cleared manually if a
+ rescan of sys.path_hooks is necessary.
+ """
+ try:
+ importer = sys.path_importer_cache[path_item]
+ except KeyError:
+ for path_hook in sys.path_hooks:
+ try:
+ importer = path_hook(path_item)
+ break
+ except ImportError:
+ pass
+ else:
+ importer = None
+ sys.path_importer_cache.setdefault(path_item, importer)
+
+ if importer is None:
+ try:
+ importer = ImpImporter(path_item)
+ except ImportError:
+ importer = None
+ return importer
+
+
+def iter_importers(fullname=""):
+ """Yield PEP 302 importers for the given module name
+
+ If fullname contains a '.', the importers will be for the package
+ containing fullname, otherwise they will be importers for sys.meta_path,
+ sys.path, and Python's "classic" import machinery, in that order. If
+ the named module is in a package, that package is imported as a side
+ effect of invoking this function.
+
+ Non PEP 302 mechanisms (e.g. the Windows registry) used by the
+ standard import machinery to find files in alternative locations
+ are partially supported, but are searched AFTER sys.path. Normally,
+ these locations are searched BEFORE sys.path, preventing sys.path
+ entries from shadowing them.
+
+ For this to cause a visible difference in behaviour, there must
+ be a module or package name that is accessible via both sys.path
+ and one of the non PEP 302 file system mechanisms. In this case,
+ the emulation will find the former version, while the builtin
+ import mechanism will find the latter.
+
+ Items of the following types can be affected by this discrepancy:
+ imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
+ """
+ if fullname.startswith('.'):
+ raise ImportError("Relative module names not supported")
+ if '.' in fullname:
+ # Get the containing package's __path__
+ pkg = '.'.join(fullname.split('.')[:-1])
+ if pkg not in sys.modules:
+ __import__(pkg)
+ path = getattr(sys.modules[pkg], '__path__', None) or []
+ else:
+ for importer in sys.meta_path:
+ yield importer
+ path = sys.path
+ for item in path:
+ yield get_importer(item)
+ if '.' not in fullname:
+ yield ImpImporter()
+
+
+def get_loader(module_or_name):
+ """Get a PEP 302 "loader" object for module_or_name
+
+ If the module or package is accessible via the normal import
+ mechanism, a wrapper around the relevant part of that machinery
+ is returned. Returns None if the module cannot be found or imported.
+ If the named module is not already imported, its containing package
+ (if any) is imported, in order to establish the package __path__.
+
+ This function uses iter_importers(), and is thus subject to the same
+ limitations regarding platform-specific special import locations such
+ as the Windows registry.
+ """
+ if module_or_name in sys.modules:
+ module_or_name = sys.modules[module_or_name]
+ if isinstance(module_or_name, ModuleType):
+ module = module_or_name
+ loader = getattr(module, '__loader__', None)
+ if loader is not None:
+ return loader
+ fullname = module.__name__
+ else:
+ fullname = module_or_name
+ return find_loader(fullname)
+
+
+def find_loader(fullname):
+ """Find a PEP 302 "loader" object for fullname
+
+ If fullname contains dots, path must be the containing package's __path__.
+ Returns None if the module cannot be found or imported. This function uses
+ iter_importers(), and is thus subject to the same limitations regarding
+ platform-specific special import locations such as the Windows registry.
+ """
+ for importer in iter_importers(fullname):
+ loader = importer.find_module(fullname)
+ if loader is not None:
+ return loader
+
+ return None
+
+
+def extend_path(path, name):
+ """Extend a package's path.
+
+ Intended use is to place the following code in a package's __init__.py:
+
+ from pkgutil import extend_path
+ __path__ = extend_path(__path__, __name__)
+
+ This will add to the package's __path__ all subdirectories of
+ directories on sys.path named after the package. This is useful
+ if one wants to distribute different parts of a single logical
+ package as multiple directories.
+
+ It also looks for *.pkg files beginning where * matches the name
+ argument. This feature is similar to *.pth files (see site.py),
+ except that it doesn't special-case lines starting with 'import'.
+ A *.pkg file is trusted at face value: apart from checking for
+ duplicates, all entries found in a *.pkg file are added to the
+ path, regardless of whether they are exist the filesystem. (This
+ is a feature.)
+
+ If the input path is not a list (as is the case for frozen
+ packages) it is returned unchanged. The input path is not
+ modified; an extended copy is returned. Items are only appended
+ to the copy at the end.
+
+ It is assumed that sys.path is a sequence. Items of sys.path that
+ are not (unicode or 8-bit) strings referring to existing
+ directories are ignored. Unicode items of sys.path that cause
+ errors when used as filenames may cause this function to raise an
+ exception (in line with os.path.isdir() behavior).
+ """
+
+ if not isinstance(path, list):
+ # This could happen e.g. when this is called from inside a
+ # frozen package. Return the path unchanged in that case.
+ return path
+
+ pname = os.path.join(*name.split('.')) # Reconstitute as relative path
+ # Just in case os.extsep != '.'
+ sname = os.extsep.join(name.split('.'))
+ sname_pkg = sname + os.extsep + "pkg"
+ init_py = "__init__" + os.extsep + "py"
+
+ path = path[:] # Start with a copy of the existing path
+
+ for dir in sys.path:
+ if not isinstance(dir, basestring) or not os.path.isdir(dir):
+ continue
+ subdir = os.path.join(dir, pname)
+ # XXX This may still add duplicate entries to path on
+ # case-insensitive filesystems
+ initfile = os.path.join(subdir, init_py)
+ if subdir not in path and os.path.isfile(initfile):
+ path.append(subdir)
+ # XXX Is this the right thing for subpackages like zope.app?
+ # It looks for a file named "zope.app.pkg"
+ pkgfile = os.path.join(dir, sname_pkg)
+ if os.path.isfile(pkgfile):
+ try:
+ f = open(pkgfile)
+ except IOError, msg:
+ sys.stderr.write("Can't open %s: %s\n" %
+ (pkgfile, msg))
+ else:
+ for line in f:
+ line = line.rstrip('\n')
+ if not line or line.startswith('#'):
+ continue
+ path.append(line) # Don't check for existence!
+ f.close()
+
+ return path
+
+
+def get_data(package, resource):
+ """Get a resource from a package.
+
+ This is a wrapper round the PEP 302 loader get_data API. The package
+ argument should be the name of a package, in standard module format
+ (foo.bar). The resource argument should be in the form of a relative
+ filename, using '/' as the path separator. The parent directory name '..'
+ is not allowed, and nor is a rooted name (starting with a '/').
+
+ The function returns a binary string, which is the contents of the
+ specified resource.
+
+ For packages located in the filesystem, which have already been imported,
+ this is the rough equivalent of
+
+ d = os.path.dirname(sys.modules[package].__file__)
+ data = open(os.path.join(d, resource), 'rb').read()
+
+ If the package cannot be located or loaded, or it uses a PEP 302 loader
+ which does not support get_data(), then None is returned.
+ """
+
+ loader = get_loader(package)
+ if loader is None or not hasattr(loader, 'get_data'):
+ return None
+ mod = sys.modules.get(package) or loader.load_module(package)
+ if mod is None or not hasattr(mod, '__file__'):
+ return None
+
+ # Modify the resource name to be compatible with the loader.get_data
+ # signature - an os.path format "filename" starting with the dirname of
+ # the package's __file__
+ parts = resource.split('/')
+ parts.insert(0, os.path.dirname(mod.__file__))
+ resource_name = os.path.join(*parts)
+ return loader.get_data(resource_name)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py
new file mode 100755
index 00000000..e7c11f1d
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/backwardcompat.py
@@ -0,0 +1,55 @@
+"""Stuff that isn't in some old versions of Python"""
+
+import sys
+import os
+import shutil
+
+__all__ = ['any', 'WindowsError', 'md5', 'copytree']
+
+try:
+ WindowsError = WindowsError
+except NameError:
+ WindowsError = None
+try:
+ from hashlib import md5
+except ImportError:
+ import md5 as md5_module
+ md5 = md5_module.new
+
+try:
+ from pkgutil import walk_packages
+except ImportError:
+ # let's fall back as long as we can
+ from _pkgutil import walk_packages
+
+try:
+ any = any
+except NameError:
+
+ def any(seq):
+ for item in seq:
+ if item:
+ return True
+ return False
+
+
+def copytree(src, dst):
+ if sys.version_info < (2, 5):
+ before_last_dir = os.path.dirname(dst)
+ if not os.path.exists(before_last_dir):
+ os.makedirs(before_last_dir)
+ shutil.copytree(src, dst)
+ shutil.copymode(src, dst)
+ else:
+ shutil.copytree(src, dst)
+
+
+def product(*args, **kwds):
+ # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
+ # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
+ pools = map(tuple, args) * kwds.get('repeat', 1)
+ result = [[]]
+ for pool in pools:
+ result = [x+[y] for x in result for y in pool]
+ for prod in result:
+ yield tuple(prod)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py
new file mode 100755
index 00000000..f450e839
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/basecommand.py
@@ -0,0 +1,203 @@
+"""Base Command class, and related routines"""
+
+from cStringIO import StringIO
+import getpass
+import os
+import socket
+import sys
+import traceback
+import time
+import urllib
+import urllib2
+
+from pip import commands
+from pip.log import logger
+from pip.baseparser import parser, ConfigOptionParser, UpdatingDefaultsHelpFormatter
+from pip.download import urlopen
+from pip.exceptions import BadCommand, InstallationError, UninstallationError
+from pip.venv import restart_in_venv
+from pip.backwardcompat import walk_packages
+
+__all__ = ['command_dict', 'Command', 'load_all_commands',
+ 'load_command', 'command_names']
+
+command_dict = {}
+
+# for backwards compatibiliy
+get_proxy = urlopen.get_proxy
+
+
+class Command(object):
+ name = None
+ usage = None
+ hidden = False
+
+ def __init__(self):
+ assert self.name
+ self.parser = ConfigOptionParser(
+ usage=self.usage,
+ prog='%s %s' % (sys.argv[0], self.name),
+ version=parser.version,
+ formatter=UpdatingDefaultsHelpFormatter(),
+ name=self.name)
+ for option in parser.option_list:
+ if not option.dest or option.dest == 'help':
+ # -h, --version, etc
+ continue
+ self.parser.add_option(option)
+ command_dict[self.name] = self
+
+ def merge_options(self, initial_options, options):
+ # Make sure we have all global options carried over
+ for attr in ['log', 'venv', 'proxy', 'venv_base', 'require_venv',
+ 'respect_venv', 'log_explicit_levels', 'log_file',
+ 'timeout', 'default_vcs', 'skip_requirements_regex',
+ 'no_input']:
+ setattr(options, attr, getattr(initial_options, attr) or getattr(options, attr))
+ options.quiet += initial_options.quiet
+ options.verbose += initial_options.verbose
+
+ def setup_logging(self):
+ pass
+
+ def main(self, complete_args, args, initial_options):
+ options, args = self.parser.parse_args(args)
+ self.merge_options(initial_options, options)
+
+ level = 1 # Notify
+ level += options.verbose
+ level -= options.quiet
+ level = logger.level_for_integer(4-level)
+ complete_log = []
+ logger.consumers.extend(
+ [(level, sys.stdout),
+ (logger.DEBUG, complete_log.append)])
+ if options.log_explicit_levels:
+ logger.explicit_levels = True
+
+ self.setup_logging()
+
+ if options.require_venv and not options.venv:
+ # If a venv is required check if it can really be found
+ if not os.environ.get('VIRTUAL_ENV'):
+ logger.fatal('Could not find an activated virtualenv (required).')
+ sys.exit(3)
+ # Automatically install in currently activated venv if required
+ options.respect_venv = True
+
+ if args and args[-1] == '___VENV_RESTART___':
+ ## FIXME: We don't do anything this this value yet:
+ args = args[:-2]
+ options.venv = None
+ else:
+ # If given the option to respect the activated environment
+ # check if no venv is given as a command line parameter
+ if options.respect_venv and os.environ.get('VIRTUAL_ENV'):
+ if options.venv and os.path.exists(options.venv):
+ # Make sure command line venv and environmental are the same
+ if (os.path.realpath(os.path.expanduser(options.venv)) !=
+ os.path.realpath(os.environ.get('VIRTUAL_ENV'))):
+ logger.fatal("Given virtualenv (%s) doesn't match "
+ "currently activated virtualenv (%s)."
+ % (options.venv, os.environ.get('VIRTUAL_ENV')))
+ sys.exit(3)
+ else:
+ options.venv = os.environ.get('VIRTUAL_ENV')
+ logger.info('Using already activated environment %s' % options.venv)
+ if options.venv:
+ logger.info('Running in environment %s' % options.venv)
+ site_packages=False
+ if options.site_packages:
+ site_packages=True
+ restart_in_venv(options.venv, options.venv_base, site_packages,
+ complete_args)
+ # restart_in_venv should actually never return, but for clarity...
+ return
+
+ ## FIXME: not sure if this sure come before or after venv restart
+ if options.log:
+ log_fp = open_logfile(options.log, 'a')
+ logger.consumers.append((logger.DEBUG, log_fp))
+ else:
+ log_fp = None
+
+ socket.setdefaulttimeout(options.timeout or None)
+
+ urlopen.setup(proxystr=options.proxy, prompting=not options.no_input)
+
+ exit = 0
+ try:
+ self.run(options, args)
+ except (InstallationError, UninstallationError), e:
+ logger.fatal(str(e))
+ logger.info('Exception information:\n%s' % format_exc())
+ exit = 1
+ except BadCommand, e:
+ logger.fatal(str(e))
+ logger.info('Exception information:\n%s' % format_exc())
+ exit = 1
+ except:
+ logger.fatal('Exception:\n%s' % format_exc())
+ exit = 2
+
+ if log_fp is not None:
+ log_fp.close()
+ if exit:
+ log_fn = options.log_file
+ text = '\n'.join(complete_log)
+ logger.fatal('Storing complete log in %s' % log_fn)
+ log_fp = open_logfile(log_fn, 'w')
+ log_fp.write(text)
+ log_fp.close()
+ return exit
+
+
+
+
+def format_exc(exc_info=None):
+ if exc_info is None:
+ exc_info = sys.exc_info()
+ out = StringIO()
+ traceback.print_exception(*exc_info, **dict(file=out))
+ return out.getvalue()
+
+
+def open_logfile(filename, mode='a'):
+ """Open the named log file in append mode.
+
+ If the file already exists, a separator will also be printed to
+ the file to separate past activity from current activity.
+ """
+ filename = os.path.expanduser(filename)
+ filename = os.path.abspath(filename)
+ dirname = os.path.dirname(filename)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ exists = os.path.exists(filename)
+
+ log_fp = open(filename, mode)
+ if exists:
+ print >> log_fp, '-'*60
+ print >> log_fp, '%s run on %s' % (sys.argv[0], time.strftime('%c'))
+ return log_fp
+
+
+def load_command(name):
+ full_name = 'pip.commands.%s' % name
+ if full_name in sys.modules:
+ return
+ try:
+ __import__(full_name)
+ except ImportError:
+ pass
+
+
+def load_all_commands():
+ for name in command_names():
+ load_command(name)
+
+
+def command_names():
+ names = set((pkg[1] for pkg in walk_packages(path=commands.__path__)))
+ return list(names)
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py
new file mode 100755
index 00000000..a8bd6ce4
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/baseparser.py
@@ -0,0 +1,231 @@
+"""Base option parser setup"""
+
+import sys
+import optparse
+import pkg_resources
+import ConfigParser
+import os
+from distutils.util import strtobool
+from pip.locations import default_config_file, default_log_file
+
+
+class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser that updates
+ the defaults before expanding them, allowing them to show up correctly
+ in the help listing"""
+
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser.update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class ConfigOptionParser(optparse.OptionParser):
+ """Custom option parser which updates its defaults by by checking the
+ configuration files and environmental variables"""
+
+ def __init__(self, *args, **kwargs):
+ self.config = ConfigParser.RawConfigParser()
+ self.name = kwargs.pop('name')
+ self.files = self.get_config_files()
+ self.config.read(self.files)
+ assert self.name
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def get_config_files(self):
+ config_file = os.environ.get('PIP_CONFIG_FILE', False)
+ if config_file and os.path.exists(config_file):
+ return [config_file]
+ return [default_config_file]
+
+ def update_defaults(self, defaults):
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+ # Then go and look for the other sources of configuration:
+ config = {}
+ # 1. config files
+ for section in ('global', self.name):
+ config.update(dict(self.get_config_section(section)))
+ # 2. environmental variables
+ config.update(dict(self.get_environ_vars()))
+ # Then set the options with those values
+ for key, val in config.iteritems():
+ key = key.replace('_', '-')
+ if not key.startswith('--'):
+ key = '--%s' % key # only prefer long opts
+ option = self.get_option(key)
+ if option is not None:
+ # ignore empty values
+ if not val:
+ continue
+ # handle multiline configs
+ if option.action == 'append':
+ val = val.split()
+ else:
+ option.nargs = 1
+ if option.action in ('store_true', 'store_false', 'count'):
+ val = strtobool(val)
+ try:
+ val = option.convert_value(key, val)
+ except optparse.OptionValueError, e:
+ print ("An error occured during configuration: %s" % e)
+ sys.exit(3)
+ defaults[option.dest] = val
+ return defaults
+
+ def get_config_section(self, name):
+ """Get a section of a configuration"""
+ if self.config.has_section(name):
+ return self.config.items(name)
+ return []
+
+ def get_environ_vars(self, prefix='PIP_'):
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.iteritems():
+ if key.startswith(prefix):
+ yield (key.replace(prefix, '').lower(), val)
+
+ def get_default_values(self):
+ """Overridding to make updating the defaults after instantiation of
+ the option parser possible, update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ defaults = self.update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, basestring):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+try:
+ pip_dist = pkg_resources.get_distribution('pip')
+ version = '%s from %s (python %s)' % (
+ pip_dist, pip_dist.location, sys.version[:3])
+except pkg_resources.DistributionNotFound:
+ # when running pip.py without installing
+ version=None
+
+parser = ConfigOptionParser(
+ usage='%prog COMMAND [OPTIONS]',
+ version=version,
+ add_help_option=False,
+ formatter=UpdatingDefaultsHelpFormatter(),
+ name='global')
+
+parser.add_option(
+ '-h', '--help',
+ dest='help',
+ action='store_true',
+ help='Show help')
+parser.add_option(
+ '-E', '--environment',
+ dest='venv',
+ metavar='DIR',
+ help='virtualenv environment to run pip in (either give the '
+ 'interpreter or the environment base directory)')
+parser.add_option(
+ '-s', '--enable-site-packages',
+ dest='site_packages',
+ action='store_true',
+ help='Include site-packages in virtualenv if one is to be '
+ 'created. Ignored if --environment is not used or '
+ 'the virtualenv already exists.')
+parser.add_option(
+ # Defines a default root directory for virtualenvs, relative
+ # virtualenvs names/paths are considered relative to it.
+ '--virtualenv-base',
+ dest='venv_base',
+ type='str',
+ default='',
+ help=optparse.SUPPRESS_HELP)
+parser.add_option(
+ # Run only if inside a virtualenv, bail if not.
+ '--require-virtualenv', '--require-venv',
+ dest='require_venv',
+ action='store_true',
+ default=False,
+ help=optparse.SUPPRESS_HELP)
+parser.add_option(
+ # Use automatically an activated virtualenv instead of installing
+ # globally. -E will be ignored if used.
+ '--respect-virtualenv', '--respect-venv',
+ dest='respect_venv',
+ action='store_true',
+ default=False,
+ help=optparse.SUPPRESS_HELP)
+
+parser.add_option(
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output')
+parser.add_option(
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help='Give less output')
+parser.add_option(
+ '--log',
+ dest='log',
+ metavar='FILENAME',
+ help='Log file where a complete (maximum verbosity) record will be kept')
+parser.add_option(
+ # Writes the log levels explicitely to the log'
+ '--log-explicit-levels',
+ dest='log_explicit_levels',
+ action='store_true',
+ default=False,
+ help=optparse.SUPPRESS_HELP)
+parser.add_option(
+ # The default log file
+ '--local-log', '--log-file',
+ dest='log_file',
+ metavar='FILENAME',
+ default=default_log_file,
+ help=optparse.SUPPRESS_HELP)
+parser.add_option(
+ # Don't ask for input
+ '--no-input',
+ dest='no_input',
+ action='store_true',
+ default=False,
+ help=optparse.SUPPRESS_HELP)
+
+parser.add_option(
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form user:passwd@proxy.server:port. "
+ "Note that the user:password@ is optional and required only if you "
+ "are behind an authenticated proxy. If you provide "
+ "user@proxy.server:port then you will be prompted for a password.")
+parser.add_option(
+ '--timeout', '--default-timeout',
+ metavar='SECONDS',
+ dest='timeout',
+ type='float',
+ default=15,
+ help='Set the socket timeout (default %default seconds)')
+parser.add_option(
+ # The default version control system for editables, e.g. 'svn'
+ '--default-vcs',
+ dest='default_vcs',
+ type='str',
+ default='',
+ help=optparse.SUPPRESS_HELP)
+parser.add_option(
+ # A regex to be used to skip requirements
+ '--skip-requirements-regex',
+ dest='skip_requirements_regex',
+ type='str',
+ default='',
+ help=optparse.SUPPRESS_HELP)
+
+parser.disable_interspersed_args()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py
new file mode 100755
index 00000000..792d6005
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py
new file mode 100755
index 00000000..fb0f7570
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/bundle.py
@@ -0,0 +1,33 @@
+from pip.locations import build_prefix, src_prefix
+from pip.util import display_path, backup_dir
+from pip.log import logger
+from pip.exceptions import InstallationError
+from pip.commands.install import InstallCommand
+
+
+class BundleCommand(InstallCommand):
+ name = 'bundle'
+ usage = '%prog [OPTIONS] BUNDLE_NAME.pybundle PACKAGE_NAMES...'
+ summary = 'Create pybundles (archives containing multiple packages)'
+ bundle = True
+
+ def __init__(self):
+ super(BundleCommand, self).__init__()
+
+ def run(self, options, args):
+ if not args:
+ raise InstallationError('You must give a bundle filename')
+ if not options.build_dir:
+ options.build_dir = backup_dir(build_prefix, '-bundle')
+ if not options.src_dir:
+ options.src_dir = backup_dir(src_prefix, '-bundle')
+ # We have to get everything when creating a bundle:
+ options.ignore_installed = True
+ logger.notify('Putting temporary build files in %s and source/develop files in %s'
+ % (display_path(options.build_dir), display_path(options.src_dir)))
+ self.bundle_filename = args.pop(0)
+ requirement_set = super(BundleCommand, self).run(options, args)
+ return requirement_set
+
+
+BundleCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py
new file mode 100755
index 00000000..d003b9ae
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/completion.py
@@ -0,0 +1,60 @@
+import sys
+from pip.basecommand import Command
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ 'bash': """
+_pip_completion()
+{
+ COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 ) )
+}
+complete -o default -F _pip_completion pip
+""", 'zsh': """
+function _pip_completion {
+ local words cword
+ read -Ac words
+ read -cn cword
+ reply=( $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$(( cword-1 )) \\
+ PIP_AUTO_COMPLETE=1 $words[1] ) )
+}
+compctl -K _pip_completion pip
+"""}
+
+
+class CompletionCommand(Command):
+ name = 'completion'
+ summary = 'A helper command to be used for command completion'
+ hidden = True
+
+ def __init__(self):
+ super(CompletionCommand, self).__init__()
+ self.parser.add_option(
+ '--bash', '-b',
+ action='store_const',
+ const='bash',
+ dest='shell',
+ help='Emit completion code for bash')
+ self.parser.add_option(
+ '--zsh', '-z',
+ action='store_const',
+ const='zsh',
+ dest='shell',
+ help='Emit completion code for zsh')
+
+ def run(self, options, args):
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ['--'+shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = COMPLETION_SCRIPTS.get(options.shell, '')
+ print BASE_COMPLETION % {'script': script, 'shell': options.shell}
+ else:
+ sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
+
+CompletionCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py
new file mode 100755
index 00000000..01b5df93
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/freeze.py
@@ -0,0 +1,109 @@
+import re
+import sys
+import pkg_resources
+import pip
+from pip.req import InstallRequirement
+from pip.log import logger
+from pip.basecommand import Command
+from pip.util import get_installed_distributions
+
+
+class FreezeCommand(Command):
+ name = 'freeze'
+ usage = '%prog [OPTIONS]'
+ summary = 'Output all currently installed packages (exact versions) to stdout'
+
+ def __init__(self):
+ super(FreezeCommand, self).__init__()
+ self.parser.add_option(
+ '-r', '--requirement',
+ dest='requirement',
+ action='store',
+ default=None,
+ metavar='FILENAME',
+ help='Use the given requirements file as a hint about how to generate the new frozen requirements')
+ self.parser.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL for finding packages, which will be added to the frozen requirements file')
+ self.parser.add_option(
+ '-l', '--local',
+ dest='local',
+ action='store_true',
+ default=False,
+ help='If in a virtualenv, do not report globally-installed packages')
+
+ def setup_logging(self):
+ logger.move_stdout_to_stderr()
+
+ def run(self, options, args):
+ requirement = options.requirement
+ find_links = options.find_links or []
+ local_only = options.local
+ ## FIXME: Obviously this should be settable:
+ find_tags = False
+ skip_match = None
+
+ skip_regex = options.skip_requirements_regex
+ if skip_regex:
+ skip_match = re.compile(skip_regex)
+
+ dependency_links = []
+
+ f = sys.stdout
+
+ for dist in pkg_resources.working_set:
+ if dist.has_metadata('dependency_links.txt'):
+ dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
+ for link in find_links:
+ if '#egg=' in link:
+ dependency_links.append(link)
+ for link in find_links:
+ f.write('-f %s\n' % link)
+ installations = {}
+ for dist in get_installed_distributions(local_only=local_only):
+ req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
+ installations[req.name] = req
+ if requirement:
+ req_f = open(requirement)
+ for line in req_f:
+ if not line.strip() or line.strip().startswith('#'):
+ f.write(line)
+ continue
+ if skip_match and skip_match.search(line):
+ f.write(line)
+ continue
+ elif line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
+ elif (line.startswith('-r') or line.startswith('--requirement')
+ or line.startswith('-Z') or line.startswith('--always-unzip')
+ or line.startswith('-f') or line.startswith('-i')
+ or line.startswith('--extra-index-url')):
+ f.write(line)
+ continue
+ else:
+ line_req = InstallRequirement.from_line(line)
+ if not line_req.name:
+ logger.notify("Skipping line because it's not clear what it would install: %s"
+ % line.strip())
+ logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
+ continue
+ if line_req.name not in installations:
+ logger.warn("Requirement file contains %s, but that package is not installed"
+ % line.strip())
+ continue
+ f.write(str(installations[line_req.name]))
+ del installations[line_req.name]
+ f.write('## The following requirements were added by pip --freeze:\n')
+ for installation in sorted(installations.values(), key=lambda x: x.name):
+ f.write(str(installation))
+
+
+FreezeCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py
new file mode 100755
index 00000000..b0b36611
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/help.py
@@ -0,0 +1,32 @@
+from pip.basecommand import Command, command_dict, load_all_commands
+from pip.exceptions import InstallationError
+from pip.baseparser import parser
+
+
+class HelpCommand(Command):
+ name = 'help'
+ usage = '%prog'
+ summary = 'Show available commands'
+
+ def run(self, options, args):
+ load_all_commands()
+ if args:
+ ## FIXME: handle errors better here
+ command = args[0]
+ if command not in command_dict:
+ raise InstallationError('No command with the name: %s' % command)
+ command = command_dict[command]
+ command.parser.print_help()
+ return
+ parser.print_help()
+ print
+ print 'Commands available:'
+ commands = list(set(command_dict.values()))
+ commands.sort(key=lambda x: x.name)
+ for command in commands:
+ if command.hidden:
+ continue
+ print ' %s: %s' % (command.name, command.summary)
+
+
+HelpCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py
new file mode 100755
index 00000000..861c332b
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/install.py
@@ -0,0 +1,247 @@
+import os, sys
+from pip.req import InstallRequirement, RequirementSet
+from pip.req import parse_requirements
+from pip.log import logger
+from pip.locations import build_prefix, src_prefix
+from pip.basecommand import Command
+from pip.index import PackageFinder
+from pip.exceptions import InstallationError
+
+
+class InstallCommand(Command):
+ name = 'install'
+ usage = '%prog [OPTIONS] PACKAGE_NAMES...'
+ summary = 'Install packages'
+ bundle = False
+
+ def __init__(self):
+ super(InstallCommand, self).__init__()
+ self.parser.add_option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='VCS+REPOS_URL[@REV]#egg=PACKAGE',
+ help='Install a package directly from a checkout. Source will be checked '
+ 'out into src/PACKAGE (lower-case) and installed in-place (using '
+ 'setup.py develop). You can run this on an existing directory/checkout (like '
+ 'pip install -e src/mycheckout). This option may be provided multiple times. '
+ 'Possible values for VCS are: svn, git, hg and bzr.')
+ self.parser.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='FILENAME',
+ help='Install all the packages listed in the given requirements file. '
+ 'This option can be used multiple times.')
+ self.parser.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL to look for packages at')
+ self.parser.add_option(
+ '-i', '--index-url', '--pypi-url',
+ dest='index_url',
+ metavar='URL',
+ default='http://pypi.python.org/simple/',
+ help='Base URL of Python Package Index (default %default)')
+ self.parser.add_option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help='Extra URLs of package indexes to use in addition to --index-url')
+ self.parser.add_option(
+ '--no-index',
+ dest='no_index',
+ action='store_true',
+ default=False,
+ help='Ignore package index (only looking at --find-links URLs instead)')
+ self.parser.add_option(
+ '-M', '--use-mirrors',
+ dest='use_mirrors',
+ action='store_true',
+ default=False,
+ help='Use the PyPI mirrors as a fallback in case the main index is down.')
+ self.parser.add_option(
+ '--mirrors',
+ dest='mirrors',
+ metavar='URL',
+ action='append',
+ default=[],
+ help='Specific mirror URLs to query when --use-mirrors is used')
+
+ self.parser.add_option(
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ metavar='DIR',
+ default=None,
+ help='Unpack packages into DIR (default %s) and build from there' % build_prefix)
+ self.parser.add_option(
+ '-d', '--download', '--download-dir', '--download-directory',
+ dest='download_dir',
+ metavar='DIR',
+ default=None,
+ help='Download packages into DIR instead of installing them')
+ self.parser.add_option(
+ '--download-cache',
+ dest='download_cache',
+ metavar='DIR',
+ default=None,
+ help='Cache downloaded packages in DIR')
+ self.parser.add_option(
+ '--src', '--source', '--source-dir', '--source-directory',
+ dest='src_dir',
+ metavar='DIR',
+ default=None,
+ help='Check out --editable packages into DIR (default %s)' % src_prefix)
+
+ self.parser.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all packages to the newest available version')
+ self.parser.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages (reinstalling instead)')
+ self.parser.add_option(
+ '--no-deps', '--no-dependencies',
+ dest='ignore_dependencies',
+ action='store_true',
+ default=False,
+ help='Ignore package dependencies')
+ self.parser.add_option(
+ '--no-install',
+ dest='no_install',
+ action='store_true',
+ help="Download and unpack all packages, but don't actually install them")
+ self.parser.add_option(
+ '--no-download',
+ dest='no_download',
+ action="store_true",
+ help="Don't download any packages, just install the ones already downloaded "
+ "(completes an install run with --no-install)")
+
+ self.parser.add_option(
+ '--install-option',
+ dest='install_options',
+ action='append',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
+ "Use multiple --install-option options to pass multiple options to setup.py install. "
+ "If you are using an option with a directory path, be sure to use absolute path.")
+
+ self.parser.add_option(
+ '--global-option',
+ dest='global_options',
+ action='append',
+ help="Extra global options to be supplied to the setup.py"
+ "call before the install command")
+
+ self.parser.add_option(
+ '--user',
+ dest='use_user_site',
+ action='store_true',
+ help='Install to user-site')
+
+ def _build_package_finder(self, options, index_urls):
+ """
+ Create a package finder appropriate to this install command.
+ This method is meant to be overridden by subclasses, not
+ called directly.
+ """
+ return PackageFinder(find_links=options.find_links,
+ index_urls=index_urls,
+ use_mirrors=options.use_mirrors,
+ mirrors=options.mirrors)
+
+ def run(self, options, args):
+ if not options.build_dir:
+ options.build_dir = build_prefix
+ if not options.src_dir:
+ options.src_dir = src_prefix
+ if options.download_dir:
+ options.no_install = True
+ options.ignore_installed = True
+ options.build_dir = os.path.abspath(options.build_dir)
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+ if options.use_user_site:
+ install_options.append('--user')
+ global_options = options.global_options or []
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index:
+ logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
+ index_urls = []
+
+ finder = self._build_package_finder(options, index_urls)
+
+ requirement_set = RequirementSet(
+ build_dir=options.build_dir,
+ src_dir=options.src_dir,
+ download_dir=options.download_dir,
+ download_cache=options.download_cache,
+ upgrade=options.upgrade,
+ ignore_installed=options.ignore_installed,
+ ignore_dependencies=options.ignore_dependencies)
+ for name in args:
+ requirement_set.add_requirement(
+ InstallRequirement.from_line(name, None))
+ for name in options.editables:
+ requirement_set.add_requirement(
+ InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
+ for filename in options.requirements:
+ for req in parse_requirements(filename, finder=finder, options=options):
+ requirement_set.add_requirement(req)
+
+ if not requirement_set.has_requirements:
+ if options.find_links:
+ raise InstallationError('You must give at least one '
+ 'requirement to %s (maybe you meant "pip install %s"?)'
+ % (self.name, " ".join(options.find_links)))
+ raise InstallationError('You must give at least one requirement '
+ 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
+
+ if (options.use_user_site and
+ sys.version_info < (2, 6)):
+ raise InstallationError('--user is only supported in Python version 2.6 and newer')
+
+ import setuptools
+ if (options.use_user_site and
+ requirement_set.has_editables and
+ not getattr(setuptools, '_distribute', False)):
+
+ raise InstallationError('--user --editable not supported with setuptools, use distribute')
+
+ if not options.no_download:
+ requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
+ else:
+ requirement_set.locate_files()
+
+ if not options.no_install and not self.bundle:
+ requirement_set.install(install_options, global_options)
+ installed = ' '.join([req.name for req in
+ requirement_set.successfully_installed])
+ if installed:
+ logger.notify('Successfully installed %s' % installed)
+ elif not self.bundle:
+ downloaded = ' '.join([req.name for req in
+ requirement_set.successfully_downloaded])
+ if downloaded:
+ logger.notify('Successfully downloaded %s' % downloaded)
+ elif self.bundle:
+ requirement_set.create_bundle(self.bundle_filename)
+ logger.notify('Created bundle in %s' % self.bundle_filename)
+ # Clean up
+ if not options.no_install:
+ requirement_set.cleanup_files(bundle=self.bundle)
+ return requirement_set
+
+
+InstallCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py
new file mode 100755
index 00000000..73da58ac
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/search.py
@@ -0,0 +1,116 @@
+import sys
+import xmlrpclib
+import textwrap
+import pkg_resources
+import pip.download
+from pip.basecommand import Command
+from pip.util import get_terminal_size
+from pip.log import logger
+from distutils.version import StrictVersion, LooseVersion
+
+
+class SearchCommand(Command):
+ name = 'search'
+ usage = '%prog QUERY'
+ summary = 'Search PyPI'
+
+ def __init__(self):
+ super(SearchCommand, self).__init__()
+ self.parser.add_option(
+ '--index',
+ dest='index',
+ metavar='URL',
+ default='http://pypi.python.org/pypi',
+ help='Base URL of Python Package Index (default %default)')
+
+ def run(self, options, args):
+ if not args:
+ logger.warn('ERROR: Missing required argument (search query).')
+ return
+ query = ' '.join(args)
+ index_url = options.index
+
+ pypi_hits = self.search(query, index_url)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+
+ def search(self, query, index_url):
+ pypi = xmlrpclib.ServerProxy(index_url, pip.download.xmlrpclib_transport)
+ hits = pypi.search({'name': query, 'summary': query}, 'or')
+ return hits
+
+
+def transform_hits(hits):
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages = {}
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary']
+ version = hit['version']
+ score = hit['_pypi_ordering']
+
+ if name not in packages.keys():
+ packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
+ else:
+ packages[name]['versions'].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]['versions']):
+ packages[name]['summary'] = summary
+ packages[name]['score'] = score
+
+ # each record has a unique name now, so we will convert the dict into a list sorted by score
+ package_list = sorted(packages.values(), lambda x, y: cmp(y['score'], x['score']))
+ return package_list
+
+
+def print_results(hits, name_column_width=25, terminal_width=None):
+ installed_packages = [p.project_name for p in pkg_resources.working_set]
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary'] or ''
+ if terminal_width is not None:
+ # wrap and indent summary to fit terminal
+ summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
+ summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
+ line = '%s - %s' % (name.ljust(name_column_width), summary)
+ try:
+ logger.notify(line)
+ if name in installed_packages:
+ dist = pkg_resources.get_distribution(name)
+ logger.indent += 2
+ try:
+ latest = highest_version(hit['versions'])
+ if dist.version == latest:
+ logger.notify('INSTALLED: %s (latest)' % dist.version)
+ else:
+ logger.notify('INSTALLED: %s' % dist.version)
+ logger.notify('LATEST: %s' % latest)
+ finally:
+ logger.indent -= 2
+ except UnicodeEncodeError:
+ pass
+
+
+def compare_versions(version1, version2):
+ try:
+ return cmp(StrictVersion(version1), StrictVersion(version2))
+ # in case of abnormal version number, fall back to LooseVersion
+ except ValueError:
+ return cmp(LooseVersion(version1), LooseVersion(version2))
+
+
+def highest_version(versions):
+ return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)
+
+
+SearchCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py
new file mode 100755
index 00000000..7effd844
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/uninstall.py
@@ -0,0 +1,42 @@
+from pip.req import InstallRequirement, RequirementSet, parse_requirements
+from pip.basecommand import Command
+from pip.exceptions import InstallationError
+
+class UninstallCommand(Command):
+ name = 'uninstall'
+ usage = '%prog [OPTIONS] PACKAGE_NAMES ...'
+ summary = 'Uninstall packages'
+
+ def __init__(self):
+ super(UninstallCommand, self).__init__()
+ self.parser.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='FILENAME',
+ help='Uninstall all the packages listed in the given requirements file. '
+ 'This option can be used multiple times.')
+ self.parser.add_option(
+ '-y', '--yes',
+ dest='yes',
+ action='store_true',
+ help="Don't ask for confirmation of uninstall deletions.")
+
+ def run(self, options, args):
+ requirement_set = RequirementSet(
+ build_dir=None,
+ src_dir=None,
+ download_dir=None)
+ for name in args:
+ requirement_set.add_requirement(
+ InstallRequirement.from_line(name))
+ for filename in options.requirements:
+ for req in parse_requirements(filename, options=options):
+ requirement_set.add_requirement(req)
+ if not requirement_set.has_requirements:
+ raise InstallationError('You must give at least one requirement '
+ 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
+ requirement_set.uninstall(auto_confirm=options.yes)
+
+UninstallCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py
new file mode 100755
index 00000000..f83e1820
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/unzip.py
@@ -0,0 +1,9 @@
+from pip.commands.zip import ZipCommand
+
+
+class UnzipCommand(ZipCommand):
+ name = 'unzip'
+ summary = 'Unzip individual packages'
+
+
+UnzipCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py
new file mode 100755
index 00000000..346fc051
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/commands/zip.py
@@ -0,0 +1,346 @@
+import sys
+import re
+import fnmatch
+import os
+import shutil
+import zipfile
+from pip.util import display_path, backup_dir
+from pip.log import logger
+from pip.exceptions import InstallationError
+from pip.basecommand import Command
+
+
+class ZipCommand(Command):
+ name = 'zip'
+ usage = '%prog [OPTIONS] PACKAGE_NAMES...'
+ summary = 'Zip individual packages'
+
+ def __init__(self):
+ super(ZipCommand, self).__init__()
+ if self.name == 'zip':
+ self.parser.add_option(
+ '--unzip',
+ action='store_true',
+ dest='unzip',
+ help='Unzip (rather than zip) a package')
+ else:
+ self.parser.add_option(
+ '--zip',
+ action='store_false',
+ dest='unzip',
+ default=True,
+ help='Zip (rather than unzip) a package')
+ self.parser.add_option(
+ '--no-pyc',
+ action='store_true',
+ dest='no_pyc',
+ help='Do not include .pyc files in zip files (useful on Google App Engine)')
+ self.parser.add_option(
+ '-l', '--list',
+ action='store_true',
+ dest='list',
+ help='List the packages available, and their zip status')
+ self.parser.add_option(
+ '--sort-files',
+ action='store_true',
+ dest='sort_files',
+ help='With --list, sort packages according to how many files they contain')
+ self.parser.add_option(
+ '--path',
+ action='append',
+ dest='paths',
+ help='Restrict operations to the given paths (may include wildcards)')
+ self.parser.add_option(
+ '-n', '--simulate',
+ action='store_true',
+ help='Do not actually perform the zip/unzip operation')
+
+ def paths(self):
+ """All the entries of sys.path, possibly restricted by --path"""
+ if not self.select_paths:
+ return sys.path
+ result = []
+ match_any = set()
+ for path in sys.path:
+ path = os.path.normcase(os.path.abspath(path))
+ for match in self.select_paths:
+ match = os.path.normcase(os.path.abspath(match))
+ if '*' in match:
+ if re.search(fnmatch.translate(match+'*'), path):
+ result.append(path)
+ match_any.add(match)
+ break
+ else:
+ if path.startswith(match):
+ result.append(path)
+ match_any.add(match)
+ break
+ else:
+ logger.debug("Skipping path %s because it doesn't match %s"
+ % (path, ', '.join(self.select_paths)))
+ for match in self.select_paths:
+ if match not in match_any and '*' not in match:
+ result.append(match)
+ logger.debug("Adding path %s because it doesn't match anything already on sys.path"
+ % match)
+ return result
+
+ def run(self, options, args):
+ self.select_paths = options.paths
+ self.simulate = options.simulate
+ if options.list:
+ return self.list(options, args)
+ if not args:
+ raise InstallationError(
+ 'You must give at least one package to zip or unzip')
+ packages = []
+ for arg in args:
+ module_name, filename = self.find_package(arg)
+ if options.unzip and os.path.isdir(filename):
+ raise InstallationError(
+ 'The module %s (in %s) is not a zip file; cannot be unzipped'
+ % (module_name, filename))
+ elif not options.unzip and not os.path.isdir(filename):
+ raise InstallationError(
+ 'The module %s (in %s) is not a directory; cannot be zipped'
+ % (module_name, filename))
+ packages.append((module_name, filename))
+ last_status = None
+ for module_name, filename in packages:
+ if options.unzip:
+ last_status = self.unzip_package(module_name, filename)
+ else:
+ last_status = self.zip_package(module_name, filename, options.no_pyc)
+ return last_status
+
+ def unzip_package(self, module_name, filename):
+ zip_filename = os.path.dirname(filename)
+ if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
+ raise InstallationError(
+ 'Module %s (in %s) isn\'t located in a zip file in %s'
+ % (module_name, filename, zip_filename))
+ package_path = os.path.dirname(zip_filename)
+ if not package_path in self.paths():
+ logger.warn(
+ 'Unpacking %s into %s, but %s is not on sys.path'
+ % (display_path(zip_filename), display_path(package_path),
+ display_path(package_path)))
+ logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
+ if self.simulate:
+ logger.notify('Skipping remaining operations because of --simulate')
+ return
+ logger.indent += 2
+ try:
+ ## FIXME: this should be undoable:
+ zip = zipfile.ZipFile(zip_filename)
+ to_save = []
+ for name in zip.namelist():
+ if name.startswith(module_name + os.path.sep):
+ content = zip.read(name)
+ dest = os.path.join(package_path, name)
+ if not os.path.exists(os.path.dirname(dest)):
+ os.makedirs(os.path.dirname(dest))
+ if not content and dest.endswith(os.path.sep):
+ if not os.path.exists(dest):
+ os.makedirs(dest)
+ else:
+ f = open(dest, 'wb')
+ f.write(content)
+ f.close()
+ else:
+ to_save.append((name, zip.read(name)))
+ zip.close()
+ if not to_save:
+ logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
+ os.unlink(zip_filename)
+ self.remove_filename_from_pth(zip_filename)
+ else:
+ logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
+ zip = zipfile.ZipFile(zip_filename, 'w')
+ for name, content in to_save:
+ zip.writestr(name, content)
+ zip.close()
+ finally:
+ logger.indent -= 2
+
+ def zip_package(self, module_name, filename, no_pyc):
+ orig_filename = filename
+ logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
+ logger.indent += 2
+ if filename.endswith('.egg'):
+ dest_filename = filename
+ else:
+ dest_filename = filename + '.zip'
+ try:
+ ## FIXME: I think this needs to be undoable:
+ if filename == dest_filename:
+ filename = backup_dir(orig_filename)
+ logger.notify('Moving %s aside to %s' % (orig_filename, filename))
+ if not self.simulate:
+ shutil.move(orig_filename, filename)
+ try:
+ logger.info('Creating zip file in %s' % display_path(dest_filename))
+ if not self.simulate:
+ zip = zipfile.ZipFile(dest_filename, 'w')
+ zip.writestr(module_name + '/', '')
+ for dirpath, dirnames, filenames in os.walk(filename):
+ if no_pyc:
+ filenames = [f for f in filenames
+ if not f.lower().endswith('.pyc')]
+ for fns, is_dir in [(dirnames, True), (filenames, False)]:
+ for fn in fns:
+ full = os.path.join(dirpath, fn)
+ dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
+ if is_dir:
+ zip.writestr(dest+'/', '')
+ else:
+ zip.write(full, dest)
+ zip.close()
+ logger.info('Removing old directory %s' % display_path(filename))
+ if not self.simulate:
+ shutil.rmtree(filename)
+ except:
+ ## FIXME: need to do an undo here
+ raise
+ ## FIXME: should also be undone:
+ self.add_filename_to_pth(dest_filename)
+ finally:
+ logger.indent -= 2
+
+ def remove_filename_from_pth(self, filename):
+ for pth in self.pth_files():
+ f = open(pth, 'r')
+ lines = f.readlines()
+ f.close()
+ new_lines = [
+ l for l in lines if l.strip() != filename]
+ if lines != new_lines:
+ logger.info('Removing reference to %s from .pth file %s'
+ % (display_path(filename), display_path(pth)))
+ if not filter(None, new_lines):
+ logger.info('%s file would be empty: deleting' % display_path(pth))
+ if not self.simulate:
+ os.unlink(pth)
+ else:
+ if not self.simulate:
+ f = open(pth, 'wb')
+ f.writelines(new_lines)
+ f.close()
+ return
+ logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
+
+ def add_filename_to_pth(self, filename):
+ path = os.path.dirname(filename)
+ dest = os.path.join(path, filename + '.pth')
+ if path not in self.paths():
+ logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
+ if not self.simulate:
+ if os.path.exists(dest):
+ f = open(dest)
+ lines = f.readlines()
+ f.close()
+ if lines and not lines[-1].endswith('\n'):
+ lines[-1] += '\n'
+ lines.append(filename+'\n')
+ else:
+ lines = [filename + '\n']
+ f = open(dest, 'wb')
+ f.writelines(lines)
+ f.close()
+
+ def pth_files(self):
+ for path in self.paths():
+ if not os.path.exists(path) or not os.path.isdir(path):
+ continue
+ for filename in os.listdir(path):
+ if filename.endswith('.pth'):
+ yield os.path.join(path, filename)
+
+ def find_package(self, package):
+ for path in self.paths():
+ full = os.path.join(path, package)
+ if os.path.exists(full):
+ return package, full
+ if not os.path.isdir(path) and zipfile.is_zipfile(path):
+ zip = zipfile.ZipFile(path, 'r')
+ try:
+ zip.read(os.path.join(package, '__init__.py'))
+ except KeyError:
+ pass
+ else:
+ zip.close()
+ return package, full
+ zip.close()
+ ## FIXME: need special error for package.py case:
+ raise InstallationError(
+ 'No package with the name %s found' % package)
+
+ def list(self, options, args):
+ if args:
+ raise InstallationError(
+ 'You cannot give an argument with --list')
+ for path in sorted(self.paths()):
+ if not os.path.exists(path):
+ continue
+ basename = os.path.basename(path.rstrip(os.path.sep))
+ if os.path.isfile(path) and zipfile.is_zipfile(path):
+ if os.path.dirname(path) not in self.paths():
+ logger.notify('Zipped egg: %s' % display_path(path))
+ continue
+ if (basename != 'site-packages' and basename != 'dist-packages'
+ and not path.replace('\\', '/').endswith('lib/python')):
+ continue
+ logger.notify('In %s:' % display_path(path))
+ logger.indent += 2
+ zipped = []
+ unzipped = []
+ try:
+ for filename in sorted(os.listdir(path)):
+ ext = os.path.splitext(filename)[1].lower()
+ if ext in ('.pth', '.egg-info', '.egg-link'):
+ continue
+ if ext == '.py':
+ logger.info('Not displaying %s: not a package' % display_path(filename))
+ continue
+ full = os.path.join(path, filename)
+ if os.path.isdir(full):
+ unzipped.append((filename, self.count_package(full)))
+ elif zipfile.is_zipfile(full):
+ zipped.append(filename)
+ else:
+ logger.info('Unknown file: %s' % display_path(filename))
+ if zipped:
+ logger.notify('Zipped packages:')
+ logger.indent += 2
+ try:
+ for filename in zipped:
+ logger.notify(filename)
+ finally:
+ logger.indent -= 2
+ else:
+ logger.notify('No zipped packages.')
+ if unzipped:
+ if options.sort_files:
+ unzipped.sort(key=lambda x: -x[1])
+ logger.notify('Unzipped packages:')
+ logger.indent += 2
+ try:
+ for filename, count in unzipped:
+ logger.notify('%s (%i files)' % (filename, count))
+ finally:
+ logger.indent -= 2
+ else:
+ logger.notify('No unzipped packages.')
+ finally:
+ logger.indent -= 2
+
+ def count_package(self, path):
+ total = 0
+ for dirpath, dirnames, filenames in os.walk(path):
+ filenames = [f for f in filenames
+ if not f.lower().endswith('.pyc')]
+ total += len(filenames)
+ return total
+
+
+ZipCommand()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py
new file mode 100755
index 00000000..f1b63936
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/download.py
@@ -0,0 +1,470 @@
+import xmlrpclib
+import re
+import getpass
+import urllib
+import urllib2
+import urlparse
+import os
+import mimetypes
+import shutil
+import tempfile
+from pip.backwardcompat import md5, copytree
+from pip.exceptions import InstallationError
+from pip.util import (splitext,
+ format_size, display_path, backup_dir, ask,
+ unpack_file, create_download_cache_folder, cache_download)
+from pip.vcs import vcs
+from pip.log import logger
+
+
+__all__ = ['xmlrpclib_transport', 'get_file_content', 'urlopen',
+ 'is_url', 'url_to_path', 'path_to_url', 'path_to_url2',
+ 'geturl', 'is_archive_file', 'unpack_vcs_link',
+ 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
+
+
+xmlrpclib_transport = xmlrpclib.Transport()
+
+
+def get_file_content(url, comes_from=None):
+ """Gets the content of a file; it may be a filename, file: URL, or
+ http: URL. Returns (location, content)"""
+ match = _scheme_re.search(url)
+ if match:
+ scheme = match.group(1).lower()
+ if (scheme == 'file' and comes_from
+ and comes_from.startswith('http')):
+ raise InstallationError(
+ 'Requirements file %s references URL %s, which is local'
+ % (comes_from, url))
+ if scheme == 'file':
+ path = url.split(':', 1)[1]
+ path = path.replace('\\', '/')
+ match = _url_slash_drive_re.match(path)
+ if match:
+ path = match.group(1) + ':' + path.split('|', 1)[1]
+ path = urllib.unquote(path)
+ if path.startswith('/'):
+ path = '/' + path.lstrip('/')
+ url = path
+ else:
+ ## FIXME: catch some errors
+ resp = urlopen(url)
+ return geturl(resp), resp.read()
+ try:
+ f = open(url)
+ content = f.read()
+ except IOError, e:
+ raise InstallationError('Could not open requirements file: %s' % str(e))
+ else:
+ f.close()
+ return url, content
+
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
+
+class URLOpener(object):
+ """
+ pip's own URL helper that adds HTTP auth and proxy support
+ """
+ def __init__(self):
+ self.passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
+
+ def __call__(self, url):
+ """
+ If the given url contains auth info or if a normal request gets a 401
+ response, an attempt is made to fetch the resource using basic HTTP
+ auth.
+
+ """
+ url, username, password = self.extract_credentials(url)
+ if username is None:
+ try:
+ response = urllib2.urlopen(self.get_request(url))
+ except urllib2.HTTPError, e:
+ if e.code != 401:
+ raise
+ response = self.get_response(url)
+ else:
+ response = self.get_response(url, username, password)
+ return response
+
+ def get_request(self, url):
+ """
+ Wraps the URL to retrieve to protects against "creative"
+ interpretation of the RFC: http://bugs.python.org/issue8732
+ """
+ if isinstance(url, basestring):
+ url = urllib2.Request(url, headers={'Accept-encoding': 'identity'})
+ return url
+
+ def get_response(self, url, username=None, password=None):
+ """
+ does the dirty work of actually getting the rsponse object using urllib2
+ and its HTTP auth builtins.
+ """
+ scheme, netloc, path, query, frag = urlparse.urlsplit(url)
+ pass_url = urlparse.urlunsplit(('_none_', netloc, path, query, frag)).replace('_none_://', '', 1)
+ req = self.get_request(url)
+
+ stored_username, stored_password = self.passman.find_user_password(None, netloc)
+ # see if we have a password stored
+ if stored_username is None:
+ if username is None and self.prompting:
+ username = urllib.quote(raw_input('User for %s: ' % netloc))
+ password = urllib.quote(getpass.getpass('Password: '))
+ if username and password:
+ self.passman.add_password(None, netloc, username, password)
+ stored_username, stored_password = self.passman.find_user_password(None, netloc)
+ authhandler = urllib2.HTTPBasicAuthHandler(self.passman)
+ opener = urllib2.build_opener(authhandler)
+ # FIXME: should catch a 401 and offer to let the user reenter credentials
+ return opener.open(req)
+
+ def setup(self, proxystr='', prompting=True):
+ """
+ Sets the proxy handler given the option passed on the command
+ line. If an empty string is passed it looks at the HTTP_PROXY
+ environment variable.
+ """
+ self.prompting = prompting
+ proxy = self.get_proxy(proxystr)
+ if proxy:
+ proxy_support = urllib2.ProxyHandler({"http": proxy, "ftp": proxy})
+ opener = urllib2.build_opener(proxy_support, urllib2.CacheFTPHandler)
+ urllib2.install_opener(opener)
+
+ def parse_credentials(self, netloc):
+ if "@" in netloc:
+ userinfo = netloc.rsplit("@", 1)[0]
+ if ":" in userinfo:
+ return userinfo.split(":", 1)
+ return userinfo, None
+ return None, None
+
+ def extract_credentials(self, url):
+ """
+ Extracts user/password from a url.
+
+ Returns a tuple:
+ (url-without-auth, username, password)
+ """
+ if isinstance(url, urllib2.Request):
+ result = urlparse.urlsplit(url.get_full_url())
+ else:
+ result = urlparse.urlsplit(url)
+ scheme, netloc, path, query, frag = result
+
+ username, password = self.parse_credentials(netloc)
+ if username is None:
+ return url, None, None
+ elif password is None and self.prompting:
+ # remove the auth credentials from the url part
+ netloc = netloc.replace('%s@' % username, '', 1)
+ # prompt for the password
+ prompt = 'Password for %s@%s: ' % (username, netloc)
+ password = urllib.quote(getpass.getpass(prompt))
+ else:
+ # remove the auth credentials from the url part
+ netloc = netloc.replace('%s:%s@' % (username, password), '', 1)
+
+ target_url = urlparse.urlunsplit((scheme, netloc, path, query, frag))
+ return target_url, username, password
+
+ def get_proxy(self, proxystr=''):
+ """
+ Get the proxy given the option passed on the command line.
+ If an empty string is passed it looks at the HTTP_PROXY
+ environment variable.
+ """
+ if not proxystr:
+ proxystr = os.environ.get('HTTP_PROXY', '')
+ if proxystr:
+ if '@' in proxystr:
+ user_password, server_port = proxystr.split('@', 1)
+ if ':' in user_password:
+ user, password = user_password.split(':', 1)
+ else:
+ user = user_password
+ prompt = 'Password for %s@%s: ' % (user, server_port)
+ password = urllib.quote(getpass.getpass(prompt))
+ return '%s:%s@%s' % (user, password, server_port)
+ else:
+ return proxystr
+ else:
+ return None
+
+urlopen = URLOpener()
+
+
+def is_url(name):
+ """Returns true if the name looks like a URL"""
+ if ':' not in name:
+ return False
+ scheme = name.split(':', 1)[0].lower()
+ return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+
+def url_to_path(url):
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+ path = url[len('file:'):].lstrip('/')
+ path = urllib.unquote(path)
+ if _url_drive_re.match(path):
+ path = path[0] + ':' + path[2:]
+ else:
+ path = '/' + path
+ return path
+
+
+_drive_re = re.compile('^([a-z]):', re.I)
+_url_drive_re = re.compile('^([a-z])[:|]', re.I)
+
+
+def path_to_url(path):
+ """
+ Convert a path to a file: URL. The path will be made absolute.
+ """
+ path = os.path.normcase(os.path.abspath(path))
+ if _drive_re.match(path):
+ path = path[0] + '|' + path[2:]
+ url = urllib.quote(path)
+ url = url.replace(os.path.sep, '/')
+ url = url.lstrip('/')
+ return 'file:///' + url
+
+
+def path_to_url2(path):
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ drive, path = os.path.splitdrive(path)
+ filepath = path.split(os.path.sep)
+ url = '/'.join([urllib.quote(part) for part in filepath])
+ if not drive:
+ url = url.lstrip('/')
+ return 'file:///' + drive + url
+
+
+def geturl(urllib2_resp):
+ """
+ Use instead of urllib.addinfourl.geturl(), which appears to have
+ some issues with dropping the double slash for certain schemes
+ (e.g. file://). This implementation is probably over-eager, as it
+ always restores '://' if it is missing, and it appears some url
+ schemata aren't always followed by '//' after the colon, but as
+ far as I know pip doesn't need any of those.
+ The URI RFC can be found at: http://tools.ietf.org/html/rfc1630
+
+ This function assumes that
+ scheme:/foo/bar
+ is the same as
+ scheme:///foo/bar
+ """
+ url = urllib2_resp.geturl()
+ scheme, rest = url.split(':', 1)
+ if rest.startswith('//'):
+ return url
+ else:
+ # FIXME: write a good test to cover it
+ return '%s://%s' % (scheme, rest)
+
+
+def is_archive_file(name):
+ """Return True if `name` is a considered as an archive file."""
+ archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle')
+ ext = splitext(name)[1].lower()
+ if ext in archives:
+ return True
+ return False
+
+
+def unpack_vcs_link(link, location, only_download=False):
+ vcs_backend = _get_used_vcs_backend(link)
+ if only_download:
+ vcs_backend.export(location)
+ else:
+ vcs_backend.unpack(location)
+
+
+def unpack_file_url(link, location):
+ source = url_to_path(link.url)
+ content_type = mimetypes.guess_type(source)[0]
+ if os.path.isdir(source):
+ # delete the location since shutil will create it again :(
+ if os.path.isdir(location):
+ shutil.rmtree(location)
+ copytree(source, location)
+ else:
+ unpack_file(source, location, content_type, link)
+
+
+def _get_used_vcs_backend(link):
+ for backend in vcs.backends:
+ if link.scheme in backend.schemes:
+ vcs_backend = backend(link.url)
+ return vcs_backend
+
+
+def is_vcs_url(link):
+ return bool(_get_used_vcs_backend(link))
+
+
+def is_file_url(link):
+ return link.url.lower().startswith('file:')
+
+
+def _check_md5(download_hash, link):
+ download_hash = download_hash.hexdigest()
+ if download_hash != link.md5_hash:
+ logger.fatal("MD5 hash of the package %s (%s) doesn't match the expected hash %s!"
+ % (link, download_hash, link.md5_hash))
+ raise InstallationError('Bad MD5 hash for package %s' % link)
+
+
+def _get_md5_from_file(target_file, link):
+ download_hash = md5()
+ fp = open(target_file, 'rb')
+ while 1:
+ chunk = fp.read(4096)
+ if not chunk:
+ break
+ download_hash.update(chunk)
+ fp.close()
+ return download_hash
+
+
+def _download_url(resp, link, temp_location):
+ fp = open(temp_location, 'wb')
+ download_hash = None
+ if link.md5_hash:
+ download_hash = md5()
+ try:
+ total_length = int(resp.info()['content-length'])
+ except (ValueError, KeyError):
+ total_length = 0
+ downloaded = 0
+ show_progress = total_length > 40*1000 or not total_length
+ show_url = link.show_url
+ try:
+ if show_progress:
+ ## FIXME: the URL can get really long in this message:
+ if total_length:
+ logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
+ else:
+ logger.start_progress('Downloading %s (unknown size): ' % show_url)
+ else:
+ logger.notify('Downloading %s' % show_url)
+ logger.debug('Downloading from URL %s' % link)
+
+ while 1:
+ chunk = resp.read(4096)
+ if not chunk:
+ break
+ downloaded += len(chunk)
+ if show_progress:
+ if not total_length:
+ logger.show_progress('%s' % format_size(downloaded))
+ else:
+ logger.show_progress('%3i%% %s' % (100*downloaded/total_length, format_size(downloaded)))
+ if link.md5_hash:
+ download_hash.update(chunk)
+ fp.write(chunk)
+ fp.close()
+ finally:
+ if show_progress:
+ logger.end_progress('%s downloaded' % format_size(downloaded))
+ return download_hash
+
+
+def _copy_file(filename, location, content_type, link):
+ copy = True
+ download_location = os.path.join(location, link.filename)
+ if os.path.exists(download_location):
+ response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
+ % display_path(download_location), ('i', 'w', 'b'))
+ if response == 'i':
+ copy = False
+ elif response == 'w':
+ logger.warn('Deleting %s' % display_path(download_location))
+ os.remove(download_location)
+ elif response == 'b':
+ dest_file = backup_dir(download_location)
+ logger.warn('Backing up %s to %s'
+ % (display_path(download_location), display_path(dest_file)))
+ shutil.move(download_location, dest_file)
+ if copy:
+ shutil.copy(filename, download_location)
+ logger.indent -= 2
+ logger.notify('Saved %s' % display_path(download_location))
+
+
+def unpack_http_url(link, location, download_cache, only_download):
+ temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
+ target_url = link.url.split('#', 1)[0]
+ target_file = None
+ download_hash = None
+ if download_cache:
+ target_file = os.path.join(download_cache,
+ urllib.quote(target_url, ''))
+ if not os.path.isdir(download_cache):
+ create_download_cache_folder(download_cache)
+ if (target_file
+ and os.path.exists(target_file)
+ and os.path.exists(target_file+'.content-type')):
+ fp = open(target_file+'.content-type')
+ content_type = fp.read().strip()
+ fp.close()
+ if link.md5_hash:
+ download_hash = _get_md5_from_file(target_file, link)
+ temp_location = target_file
+ logger.notify('Using download cache from %s' % target_file)
+ else:
+ resp = _get_response_from_url(target_url, link)
+ content_type = resp.info()['content-type']
+ filename = link.filename
+ ext = splitext(filename)[1]
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ filename += ext
+ if not ext and link.url != geturl(resp):
+ ext = os.path.splitext(geturl(resp))[1]
+ if ext:
+ filename += ext
+ temp_location = os.path.join(temp_dir, filename)
+ download_hash = _download_url(resp, link, temp_location)
+ if link.md5_hash:
+ _check_md5(download_hash, link)
+ if only_download:
+ _copy_file(temp_location, location, content_type, link)
+ else:
+ unpack_file(temp_location, location, content_type, link)
+ if target_file and target_file != temp_location:
+ cache_download(target_file, temp_location, content_type)
+ if target_file is None:
+ os.unlink(temp_location)
+ os.rmdir(temp_dir)
+
+
+def _get_response_from_url(target_url, link):
+ try:
+ resp = urlopen(target_url)
+ except urllib2.HTTPError, e:
+ logger.fatal("HTTP error %s while getting %s" % (e.code, link))
+ raise
+ except IOError, e:
+ # Typically an FTP error
+ logger.fatal("Error %s while getting %s" % (e, link))
+ raise
+ return resp
+
+class Urllib2HeadRequest(urllib2.Request):
+ def get_method(self):
+ return "HEAD"
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py
new file mode 100755
index 00000000..1ad1a616
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/exceptions.py
@@ -0,0 +1,17 @@
+"""Exceptions used throughout package"""
+
+
+class InstallationError(Exception):
+ """General exception during installation"""
+
+
+class UninstallationError(Exception):
+ """General exception during uninstallation"""
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class BadCommand(Exception):
+ """Raised when virtualenv or a command is not found"""
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py
new file mode 100755
index 00000000..e42d8c86
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/index.py
@@ -0,0 +1,686 @@
+"""Routines related to PyPI, indexes"""
+
+import sys
+import os
+import re
+import mimetypes
+import threading
+import posixpath
+import pkg_resources
+import urllib
+import urllib2
+import urlparse
+import httplib
+import random
+import socket
+import string
+from Queue import Queue
+from Queue import Empty as QueueEmpty
+from pip.log import logger
+from pip.util import Inf
+from pip.util import normalize_name, splitext
+from pip.exceptions import DistributionNotFound
+from pip.backwardcompat import WindowsError, product
+from pip.download import urlopen, path_to_url2, url_to_path, geturl, Urllib2HeadRequest
+
+__all__ = ['PackageFinder']
+
+
+DEFAULT_MIRROR_URL = "last.pypi.python.org"
+
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links
+ """
+
+ def __init__(self, find_links, index_urls,
+ use_mirrors=False, mirrors=None, main_mirror_url=None):
+ self.find_links = find_links
+ self.index_urls = index_urls
+ self.dependency_links = []
+ self.cache = PageCache()
+ # These are boring links that have already been logged somehow:
+ self.logged_links = set()
+ if use_mirrors:
+ self.mirror_urls = self._get_mirror_urls(mirrors, main_mirror_url)
+ logger.info('Using PyPI mirrors: %s' % ', '.join(self.mirror_urls))
+ else:
+ self.mirror_urls = []
+
+ def add_dependency_links(self, links):
+ ## FIXME: this shouldn't be global list this, it should only
+ ## apply to requirements of the package that specifies the
+ ## dependency_links value
+ ## FIXME: also, we should track comes_from (i.e., use Link)
+ self.dependency_links.extend(links)
+
+ @staticmethod
+ def _sort_locations(locations):
+ """
+ Sort locations into "files" (archives) and "urls", and return
+ a pair of lists (files,urls)
+ """
+ files = []
+ urls = []
+
+ # puts the url for the given file path into the appropriate
+ # list
+ def sort_path(path):
+ url = path_to_url2(path)
+ if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
+ urls.append(url)
+ else:
+ files.append(url)
+
+ for url in locations:
+ if url.startswith('file:'):
+ path = url_to_path(url)
+ if os.path.isdir(path):
+ path = os.path.realpath(path)
+ for item in os.listdir(path):
+ sort_path(os.path.join(path, item))
+ elif os.path.isfile(path):
+ sort_path(path)
+ else:
+ urls.append(url)
+ return files, urls
+
+ def find_requirement(self, req, upgrade):
+ url_name = req.url_name
+ # Only check main index if index URL is given:
+ main_index_url = None
+ if self.index_urls:
+ # Check that we have the url_name correctly spelled:
+ main_index_url = Link(posixpath.join(self.index_urls[0], url_name))
+ # This will also cache the page, so it's okay that we get it again later:
+ page = self._get_page(main_index_url, req)
+ if page is None:
+ url_name = self._find_url_name(Link(self.index_urls[0]), url_name, req) or req.url_name
+
+ # Combine index URLs with mirror URLs here to allow
+ # adding more index URLs from requirements files
+ all_index_urls = self.index_urls + self.mirror_urls
+
+ def mkurl_pypi_url(url):
+ loc = posixpath.join(url, url_name)
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's behavior.
+ if not loc.endswith('/'):
+ loc = loc + '/'
+ return loc
+ if url_name is not None:
+ locations = [
+ mkurl_pypi_url(url)
+ for url in all_index_urls] + self.find_links
+ else:
+ locations = list(self.find_links)
+ locations.extend(self.dependency_links)
+ for version in req.absolute_versions:
+ if url_name is not None and main_index_url is not None:
+ locations = [
+ posixpath.join(main_index_url.url, version)] + locations
+
+ file_locations, url_locations = self._sort_locations(locations)
+
+ locations = [Link(url) for url in url_locations]
+ logger.debug('URLs to search for versions for %s:' % req)
+ for location in locations:
+ logger.debug('* %s' % location)
+ found_versions = []
+ found_versions.extend(
+ self._package_versions(
+ [Link(url, '-f') for url in self.find_links], req.name.lower()))
+ page_versions = []
+ for page in self._get_pages(locations, req):
+ logger.debug('Analyzing links from page %s' % page.url)
+ logger.indent += 2
+ try:
+ page_versions.extend(self._package_versions(page.links, req.name.lower()))
+ finally:
+ logger.indent -= 2
+ dependency_versions = list(self._package_versions(
+ [Link(url) for url in self.dependency_links], req.name.lower()))
+ if dependency_versions:
+ logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
+ file_versions = list(self._package_versions(
+ [Link(url) for url in file_locations], req.name.lower()))
+ if not found_versions and not page_versions and not dependency_versions and not file_versions:
+ logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
+ raise DistributionNotFound('No distributions at all found for %s' % req)
+ if req.satisfied_by is not None:
+ found_versions.append((req.satisfied_by.parsed_version, Inf, req.satisfied_by.version))
+ if file_versions:
+ file_versions.sort(reverse=True)
+ logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
+ found_versions = file_versions + found_versions
+ all_versions = found_versions + page_versions + dependency_versions
+ applicable_versions = []
+ for (parsed_version, link, version) in all_versions:
+ if version not in req.req:
+ logger.info("Ignoring link %s, version %s doesn't match %s"
+ % (link, version, ','.join([''.join(s) for s in req.req.specs])))
+ continue
+ applicable_versions.append((link, version))
+ applicable_versions = sorted(applicable_versions, key=lambda v: pkg_resources.parse_version(v[1]), reverse=True)
+ existing_applicable = bool([link for link, version in applicable_versions if link is Inf])
+ if not upgrade and existing_applicable:
+ if applicable_versions[0][1] is Inf:
+ logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
+ % req.satisfied_by.version)
+ else:
+ logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
+ % (req.satisfied_by.version, applicable_versions[0][1]))
+ return None
+ if not applicable_versions:
+ logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
+ % (req, ', '.join([version for parsed_version, link, version in found_versions])))
+ raise DistributionNotFound('No distributions matching the version for %s' % req)
+ if applicable_versions[0][0] is Inf:
+ # We have an existing version, and its the best version
+ logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
+ % (req.satisfied_by.version, ', '.join([version for link, version in applicable_versions[1:]]) or 'none'))
+ return None
+ if len(applicable_versions) > 1:
+ logger.info('Using version %s (newest of versions: %s)' %
+ (applicable_versions[0][1], ', '.join([version for link, version in applicable_versions])))
+ return applicable_versions[0][0]
+
+ def _find_url_name(self, index_url, url_name, req):
+ """Finds the true URL name of a package, when the given name isn't quite correct.
+ This is usually used to implement case-insensitivity."""
+ if not index_url.url.endswith('/'):
+ # Vaguely part of the PyPI API... weird but true.
+ ## FIXME: bad to modify this?
+ index_url.url += '/'
+ page = self._get_page(index_url, req)
+ if page is None:
+ logger.fatal('Cannot fetch index base URL %s' % index_url)
+ return
+ norm_name = normalize_name(req.url_name)
+ for link in page.links:
+ base = posixpath.basename(link.path.rstrip('/'))
+ if norm_name == normalize_name(base):
+ logger.notify('Real name of requirement %s is %s' % (url_name, base))
+ return base
+ return None
+
+ def _get_pages(self, locations, req):
+ """Yields (page, page_url) from the given locations, skipping
+ locations that have errors, and adding download/homepage links"""
+ pending_queue = Queue()
+ for location in locations:
+ pending_queue.put(location)
+ done = []
+ seen = set()
+ threads = []
+ for i in range(min(10, len(locations))):
+ t = threading.Thread(target=self._get_queued_page, args=(req, pending_queue, done, seen))
+ t.setDaemon(True)
+ threads.append(t)
+ t.start()
+ for t in threads:
+ t.join()
+ return done
+
+ _log_lock = threading.Lock()
+
+ def _get_queued_page(self, req, pending_queue, done, seen):
+ while 1:
+ try:
+ location = pending_queue.get(False)
+ except QueueEmpty:
+ return
+ if location in seen:
+ continue
+ seen.add(location)
+ page = self._get_page(location, req)
+ if page is None:
+ continue
+ done.append(page)
+ for link in page.rel_links():
+ pending_queue.put(link)
+
+ _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+ _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
+ _py_version_re = re.compile(r'-py([123]\.[0-9])$')
+
+ def _sort_links(self, links):
+ "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
+ eggs, no_eggs = [], []
+ seen = set()
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _package_versions(self, links, search_name):
+ for link in self._sort_links(links):
+ for v in self._link_package_versions(link, search_name):
+ yield v
+
+ def _link_package_versions(self, link, search_name):
+ """
+ Return an iterable of triples (pkg_resources_version_key,
+ link, python_version) that can be extracted from the given
+ link.
+
+ Meant to be overridden by subclasses, not called by clients.
+ """
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ if link not in self.logged_links:
+ logger.debug('Skipping link %s; not a file' % link)
+ self.logged_links.add(link)
+ return []
+ if egg_info.endswith('.tar'):
+ # Special double-extension case:
+ egg_info = egg_info[:-4]
+ ext = '.tar' + ext
+ if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'):
+ if link not in self.logged_links:
+ logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
+ self.logged_links.add(link)
+ return []
+ version = self._egg_info_matches(egg_info, search_name, link)
+ if version is None:
+ logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
+ return []
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != sys.version[:3]:
+ logger.debug('Skipping %s because Python version is incorrect' % link)
+ return []
+ logger.debug('Found link %s, version: %s' % (link, version))
+ return [(pkg_resources.parse_version(version),
+ link,
+ version)]
+
+ def _egg_info_matches(self, egg_info, search_name, link):
+ match = self._egg_info_re.search(egg_info)
+ if not match:
+ logger.debug('Could not parse version from link: %s' % link)
+ return None
+ name = match.group(0).lower()
+ # To match the "safe" name that pkg_resources creates:
+ name = name.replace('_', '-')
+ if name.startswith(search_name.lower()):
+ return match.group(0)[len(search_name):].lstrip('-')
+ else:
+ return None
+
+ def _get_page(self, link, req):
+ return HTMLPage.get_page(link, req, cache=self.cache)
+
+ def _get_mirror_urls(self, mirrors=None, main_mirror_url=None):
+ """Retrieves a list of URLs from the main mirror DNS entry
+ unless a list of mirror URLs are passed.
+ """
+ if not mirrors:
+ mirrors = get_mirrors(main_mirror_url)
+ # Should this be made "less random"? E.g. netselect like?
+ random.shuffle(mirrors)
+
+ mirror_urls = set()
+ for mirror_url in mirrors:
+ # Make sure we have a valid URL
+ if not ("http://" or "https://" or "file://") in mirror_url:
+ mirror_url = "http://%s" % mirror_url
+ if not mirror_url.endswith("/simple"):
+ mirror_url = "%s/simple/" % mirror_url
+ mirror_urls.add(mirror_url)
+
+ return list(mirror_urls)
+
+
+class PageCache(object):
+ """Cache of HTML pages"""
+
+ failure_limit = 3
+
+ def __init__(self):
+ self._failures = {}
+ self._pages = {}
+ self._archives = {}
+
+ def too_many_failures(self, url):
+ return self._failures.get(url, 0) >= self.failure_limit
+
+ def get_page(self, url):
+ return self._pages.get(url)
+
+ def is_archive(self, url):
+ return self._archives.get(url, False)
+
+ def set_is_archive(self, url, value=True):
+ self._archives[url] = value
+
+ def add_page_failure(self, url, level):
+ self._failures[url] = self._failures.get(url, 0)+level
+
+ def add_page(self, urls, page):
+ for url in urls:
+ self._pages[url] = page
+
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ ## FIXME: these regexes are horrible hacks:
+ _homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
+ _download_re = re.compile(r'<th>\s*download\s+url', re.I)
+ ## These aren't so aweful:
+ _rel_re = re.compile("""<[^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*>""", re.I)
+ _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
+ _base_re = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I)
+
+ def __init__(self, content, url, headers=None):
+ self.content = content
+ self.url = url
+ self.headers = headers
+
+ def __str__(self):
+ return self.url
+
+ @classmethod
+ def get_page(cls, link, req, cache=None, skip_archives=True):
+ url = link.url
+ url = url.split('#', 1)[0]
+ if cache.too_many_failures(url):
+ return None
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ from pip.vcs import VcsSupport
+ for scheme in VcsSupport.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
+ return None
+
+ if cache is not None:
+ inst = cache.get_page(url)
+ if inst is not None:
+ return inst
+ try:
+ if skip_archives:
+ if cache is not None:
+ if cache.is_archive(url):
+ return None
+ filename = link.filename
+ for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
+ if filename.endswith(bad_ext):
+ content_type = cls._get_content_type(url)
+ if content_type.lower().startswith('text/html'):
+ break
+ else:
+ logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
+ if cache is not None:
+ cache.set_is_archive(url)
+ return None
+ logger.debug('Getting page %s' % url)
+
+ # Tack index.html onto file:// URLs that point to directories
+ (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
+ if scheme == 'file' and os.path.isdir(urllib.url2pathname(path)):
+ # add trailing slash if not present so urljoin doesn't trim final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urlparse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s' % url)
+
+ resp = urlopen(url)
+
+ real_url = geturl(resp)
+ headers = resp.info()
+ inst = cls(resp.read(), real_url, headers)
+ except (urllib2.HTTPError, urllib2.URLError, socket.timeout, socket.error, OSError, WindowsError), e:
+ desc = str(e)
+ if isinstance(e, socket.timeout):
+ log_meth = logger.info
+ level =1
+ desc = 'timed out'
+ elif isinstance(e, urllib2.URLError):
+ log_meth = logger.info
+ if hasattr(e, 'reason') and isinstance(e.reason, socket.timeout):
+ desc = 'timed out'
+ level = 1
+ else:
+ level = 2
+ elif isinstance(e, urllib2.HTTPError) and e.code == 404:
+ ## FIXME: notify?
+ log_meth = logger.info
+ level = 2
+ else:
+ log_meth = logger.info
+ level = 1
+ log_meth('Could not fetch URL %s: %s' % (link, desc))
+ log_meth('Will skip URL %s when looking for download links for %s' % (link.url, req))
+ if cache is not None:
+ cache.add_page_failure(url, level)
+ return None
+ if cache is not None:
+ cache.add_page([url, real_url], inst)
+ return inst
+
+ @staticmethod
+ def _get_content_type(url):
+ """Get the Content-Type of the given url, using a HEAD request"""
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
+ if not scheme in ('http', 'https', 'ftp', 'ftps'):
+ ## FIXME: some warning or something?
+ ## assertion error?
+ return ''
+ req = Urllib2HeadRequest(url, headers={'Host': netloc})
+ resp = urlopen(req)
+ try:
+ if hasattr(resp, 'code') and resp.code != 200 and scheme not in ('ftp', 'ftps'):
+ ## FIXME: doesn't handle redirects
+ return ''
+ return resp.info().get('content-type', '')
+ finally:
+ resp.close()
+
+ @property
+ def base_url(self):
+ if not hasattr(self, "_base_url"):
+ match = self._base_re.search(self.content)
+ if match:
+ self._base_url = match.group(1)
+ else:
+ self._base_url = self.url
+ return self._base_url
+
+ @property
+ def links(self):
+ """Yields all links in the page"""
+ for match in self._href_re.finditer(self.content):
+ url = match.group(1) or match.group(2) or match.group(3)
+ url = self.clean_link(urlparse.urljoin(self.base_url, url))
+ yield Link(url, self)
+
+ def rel_links(self):
+ for url in self.explicit_rel_links():
+ yield url
+ for url in self.scraped_rel_links():
+ yield url
+
+ def explicit_rel_links(self, rels=('homepage', 'download')):
+ """Yields all links with the given relations"""
+ for match in self._rel_re.finditer(self.content):
+ found_rels = match.group(1).lower().split()
+ for rel in rels:
+ if rel in found_rels:
+ break
+ else:
+ continue
+ match = self._href_re.search(match.group(0))
+ if not match:
+ continue
+ url = match.group(1) or match.group(2) or match.group(3)
+ url = self.clean_link(urlparse.urljoin(self.base_url, url))
+ yield Link(url, self)
+
+ def scraped_rel_links(self):
+ for regex in (self._homepage_re, self._download_re):
+ match = regex.search(self.content)
+ if not match:
+ continue
+ href_match = self._href_re.search(self.content, pos=match.end())
+ if not href_match:
+ continue
+ url = match.group(1) or match.group(2) or match.group(3)
+ if not url:
+ continue
+ url = self.clean_link(urlparse.urljoin(self.base_url, url))
+ yield Link(url, self)
+
+ _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+ def clean_link(self, url):
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ return self._clean_re.sub(
+ lambda match: '%%%2x' % ord(match.group(0)), url)
+
+
+class Link(object):
+
+ def __init__(self, url, comes_from=None):
+ self.url = url
+ self.comes_from = comes_from
+
+ def __str__(self):
+ if self.comes_from:
+ return '%s (from %s)' % (self.url, self.comes_from)
+ else:
+ return self.url
+
+ def __repr__(self):
+ return '<Link %s>' % self
+
+ def __eq__(self, other):
+ return self.url == other.url
+
+ def __hash__(self):
+ return hash(self.url)
+
+ @property
+ def filename(self):
+ url = self.url
+ url = url.split('#', 1)[0]
+ url = url.split('?', 1)[0]
+ url = url.rstrip('/')
+ name = posixpath.basename(url)
+ assert name, (
+ 'URL %r produced no filename' % url)
+ return name
+
+ @property
+ def scheme(self):
+ return urlparse.urlsplit(self.url)[0]
+
+ @property
+ def path(self):
+ return urlparse.urlsplit(self.url)[2]
+
+ def splitext(self):
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ _egg_fragment_re = re.compile(r'#egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ match = self._egg_fragment_re.search(self.url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _md5_re = re.compile(r'md5=([a-f0-9]+)')
+
+ @property
+ def md5_hash(self):
+ match = self._md5_re.search(self.url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
+
+
+def get_requirement_from_url(url):
+ """Get a requirement from the URL, if possible. This looks for #egg
+ in the URL"""
+ link = Link(url)
+ egg_info = link.egg_fragment
+ if not egg_info:
+ egg_info = splitext(link.filename)[0]
+ return package_to_requirement(egg_info)
+
+
+def package_to_requirement(package_name):
+ """Translate a name like Foo-1.2 to Foo==1.3"""
+ match = re.search(r'^(.*?)(-dev|-\d.*)', package_name)
+ if match:
+ name = match.group(1)
+ version = match.group(2)
+ else:
+ name = package_name
+ version = ''
+ if version:
+ return '%s==%s' % (name, version)
+ else:
+ return name
+
+
+def get_mirrors(hostname=None):
+ """Return the list of mirrors from the last record found on the DNS
+ entry::
+
+ >>> from pip.index import get_mirrors
+ >>> get_mirrors()
+ ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
+ 'd.pypi.python.org']
+
+ Originally written for the distutils2 project by Alexis Metaireau.
+ """
+ if hostname is None:
+ hostname = DEFAULT_MIRROR_URL
+
+ # return the last mirror registered on PyPI.
+ try:
+ hostname = socket.gethostbyname_ex(hostname)[0]
+ except socket.gaierror:
+ return []
+ end_letter = hostname.split(".", 1)
+
+ # determine the list from the last one.
+ return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
+
+
+def string_range(last):
+ """Compute the range of string between "a" and last.
+
+ This works for simple "a to z" lists, but also for "a to zz" lists.
+ """
+ for k in range(len(last)):
+ for x in product(string.ascii_lowercase, repeat=k+1):
+ result = ''.join(x)
+ yield result
+ if result == last:
+ return
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py
new file mode 100755
index 00000000..4254ef2f
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/locations.py
@@ -0,0 +1,45 @@
+"""Locations where we look for configs, install stuff, etc"""
+
+import sys
+import os
+from distutils import sysconfig
+
+
+def running_under_virtualenv():
+ """
+ Return True if we're running inside a virtualenv, False otherwise.
+
+ """
+ return hasattr(sys, 'real_prefix')
+
+
+if running_under_virtualenv():
+ ## FIXME: is build/ a good name?
+ build_prefix = os.path.join(sys.prefix, 'build')
+ src_prefix = os.path.join(sys.prefix, 'src')
+else:
+ ## FIXME: this isn't a very good default
+ build_prefix = os.path.join(os.getcwd(), 'build')
+ src_prefix = os.path.join(os.getcwd(), 'src')
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_python_lib()
+user_dir = os.path.expanduser('~')
+if sys.platform == 'win32':
+ bin_py = os.path.join(sys.prefix, 'Scripts')
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(sys.prefix, 'bin')
+ user_dir = os.environ.get('APPDATA', user_dir) # Use %APPDATA% for roaming
+ default_storage_dir = os.path.join(user_dir, 'pip')
+ default_config_file = os.path.join(default_storage_dir, 'pip.ini')
+ default_log_file = os.path.join(default_storage_dir, 'pip.log')
+else:
+ bin_py = os.path.join(sys.prefix, 'bin')
+ default_storage_dir = os.path.join(user_dir, '.pip')
+ default_config_file = os.path.join(default_storage_dir, 'pip.conf')
+ default_log_file = os.path.join(default_storage_dir, 'pip.log')
+ # Forcing to use /usr/local/bin for standard Mac OS X framework installs
+ if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+ bin_py = '/usr/local/bin'
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py
new file mode 100755
index 00000000..0218ab1a
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/log.py
@@ -0,0 +1,181 @@
+"""Logging
+"""
+
+import sys
+import logging
+
+
+class Logger(object):
+
+ """
+ Logging object for use in command-line script. Allows ranges of
+ levels, to avoid some redundancy of displayed information.
+ """
+
+ VERBOSE_DEBUG = logging.DEBUG-1
+ DEBUG = logging.DEBUG
+ INFO = logging.INFO
+ NOTIFY = (logging.INFO+logging.WARN)/2
+ WARN = WARNING = logging.WARN
+ ERROR = logging.ERROR
+ FATAL = logging.FATAL
+
+ LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
+
+ def __init__(self):
+ self.consumers = []
+ self.indent = 0
+ self.explicit_levels = False
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def debug(self, msg, *args, **kw):
+ self.log(self.DEBUG, msg, *args, **kw)
+
+ def info(self, msg, *args, **kw):
+ self.log(self.INFO, msg, *args, **kw)
+
+ def notify(self, msg, *args, **kw):
+ self.log(self.NOTIFY, msg, *args, **kw)
+
+ def warn(self, msg, *args, **kw):
+ self.log(self.WARN, msg, *args, **kw)
+
+ def error(self, msg, *args, **kw):
+ self.log(self.WARN, msg, *args, **kw)
+
+ def fatal(self, msg, *args, **kw):
+ self.log(self.FATAL, msg, *args, **kw)
+
+ def log(self, level, msg, *args, **kw):
+ if args:
+ if kw:
+ raise TypeError(
+ "You may give positional or keyword arguments, not both")
+ args = args or kw
+ rendered = None
+ for consumer_level, consumer in self.consumers:
+ if self.level_matches(level, consumer_level):
+ if (self.in_progress_hanging
+ and consumer in (sys.stdout, sys.stderr)):
+ self.in_progress_hanging = False
+ sys.stdout.write('\n')
+ sys.stdout.flush()
+ if rendered is None:
+ if args:
+ rendered = msg % args
+ else:
+ rendered = msg
+ rendered = ' '*self.indent + rendered
+ if self.explicit_levels:
+ ## FIXME: should this be a name, not a level number?
+ rendered = '%02i %s' % (level, rendered)
+ if hasattr(consumer, 'write'):
+ consumer.write(rendered+'\n')
+ else:
+ consumer(rendered)
+
+ def start_progress(self, msg):
+ assert not self.in_progress, (
+ "Tried to start_progress(%r) while in_progress %r"
+ % (msg, self.in_progress))
+ if self.level_matches(self.NOTIFY, self._stdout_level()):
+ sys.stdout.write(' '*self.indent + msg)
+ sys.stdout.flush()
+ self.in_progress_hanging = True
+ else:
+ self.in_progress_hanging = False
+ self.in_progress = msg
+ self.last_message = None
+
+ def end_progress(self, msg='done.'):
+ assert self.in_progress, (
+ "Tried to end_progress without start_progress")
+ if self.stdout_level_matches(self.NOTIFY):
+ if not self.in_progress_hanging:
+ # Some message has been printed out since start_progress
+ sys.stdout.write('...' + self.in_progress + msg + '\n')
+ sys.stdout.flush()
+ else:
+ # These erase any messages shown with show_progress (besides .'s)
+ logger.show_progress('')
+ logger.show_progress('')
+ sys.stdout.write(msg + '\n')
+ sys.stdout.flush()
+ self.in_progress = None
+ self.in_progress_hanging = False
+
+ def show_progress(self, message=None):
+ """If we are in a progress scope, and no log messages have been
+ shown, write out another '.'"""
+ if self.in_progress_hanging:
+ if message is None:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ else:
+ if self.last_message:
+ padding = ' ' * max(0, len(self.last_message)-len(message))
+ else:
+ padding = ''
+ sys.stdout.write('\r%s%s%s%s' % (' '*self.indent, self.in_progress, message, padding))
+ sys.stdout.flush()
+ self.last_message = message
+
+ def stdout_level_matches(self, level):
+ """Returns true if a message at this level will go to stdout"""
+ return self.level_matches(level, self._stdout_level())
+
+ def _stdout_level(self):
+ """Returns the level that stdout runs at"""
+ for level, consumer in self.consumers:
+ if consumer is sys.stdout:
+ return level
+ return self.FATAL
+
+ def level_matches(self, level, consumer_level):
+ """
+ >>> l = Logger()
+ >>> l.level_matches(3, 4)
+ False
+ >>> l.level_matches(3, 2)
+ True
+ >>> l.level_matches(slice(None, 3), 3)
+ False
+ >>> l.level_matches(slice(None, 3), 2)
+ True
+ >>> l.level_matches(slice(1, 3), 1)
+ True
+ >>> l.level_matches(slice(2, 3), 1)
+ False
+ """
+ if isinstance(level, slice):
+ start, stop = level.start, level.stop
+ if start is not None and start > consumer_level:
+ return False
+ if stop is not None or stop <= consumer_level:
+ return False
+ return True
+ else:
+ return level >= consumer_level
+
+ @classmethod
+ def level_for_integer(cls, level):
+ levels = cls.LEVELS
+ if level < 0:
+ return levels[0]
+ if level >= len(levels):
+ return levels[-1]
+ return levels[level]
+
+ def move_stdout_to_stderr(self):
+ to_remove = []
+ to_add = []
+ for consumer_level, consumer in self.consumers:
+ if consumer == sys.stdout:
+ to_remove.append((consumer_level, consumer))
+ to_add.append((consumer_level, sys.stderr))
+ for item in to_remove:
+ self.consumers.remove(item)
+ self.consumers.extend(to_add)
+
+logger = Logger()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py
new file mode 100755
index 00000000..444e7252
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/req.py
@@ -0,0 +1,1432 @@
+import sys
+import os
+import shutil
+import re
+import zipfile
+import pkg_resources
+import tempfile
+import urlparse
+import urllib2
+import urllib
+import ConfigParser
+from distutils.sysconfig import get_python_version
+from email.FeedParser import FeedParser
+from pip.locations import bin_py, running_under_virtualenv
+from pip.exceptions import InstallationError, UninstallationError
+from pip.vcs import vcs
+from pip.log import logger
+from pip.util import display_path, rmtree
+from pip.util import ask, backup_dir
+from pip.util import is_installable_dir, is_local, dist_is_local
+from pip.util import renames, normalize_path, egg_link_path
+from pip.util import make_path_relative
+from pip import call_subprocess
+from pip.backwardcompat import any, copytree
+from pip.index import Link
+from pip.locations import build_prefix
+from pip.download import (get_file_content, is_url, url_to_path,
+ path_to_url, is_archive_file,
+ unpack_vcs_link, is_vcs_url, is_file_url,
+ unpack_file_url, unpack_http_url)
+
+
+PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
+
+class InstallRequirement(object):
+
+ def __init__(self, req, comes_from, source_dir=None, editable=False,
+ url=None, update=True):
+ if isinstance(req, basestring):
+ req = pkg_resources.Requirement.parse(req)
+ self.req = req
+ self.comes_from = comes_from
+ self.source_dir = source_dir
+ self.editable = editable
+ self.url = url
+ self._egg_info_path = None
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ # This hold the pkg_resources.Distribution object if this requirement
+ # conflicts with another installed distribution:
+ self.conflicts_with = None
+ self._temp_build_dir = None
+ self._is_bundle = None
+ # True if the editable should be updated:
+ self.update = update
+ # Set to True after successful installation
+ self.install_succeeded = None
+ # UninstallPathSet of uninstalled distribution (for possible rollback)
+ self.uninstalled = None
+
+ @classmethod
+ def from_editable(cls, editable_req, comes_from=None, default_vcs=None):
+ name, url = parse_editable(editable_req, default_vcs)
+ if url.startswith('file:'):
+ source_dir = url_to_path(url)
+ else:
+ source_dir = None
+ return cls(name, comes_from, source_dir=source_dir, editable=True, url=url)
+
+ @classmethod
+ def from_line(cls, name, comes_from=None):
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+ """
+ url = None
+ name = name.strip()
+ req = name
+ path = os.path.normpath(os.path.abspath(name))
+
+ if is_url(name):
+ url = name
+ ## FIXME: I think getting the requirement here is a bad idea:
+ #req = get_requirement_from_url(url)
+ req = None
+ elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
+ if not is_installable_dir(path):
+ raise InstallationError("Directory %r is not installable. File 'setup.py' not found."
+ % name)
+ url = path_to_url(name)
+ #req = get_requirement_from_url(url)
+ req = None
+ elif is_archive_file(path):
+ if not os.path.isfile(path):
+ logger.warn('Requirement %r looks like a filename, but the file does not exist'
+ % name)
+ url = path_to_url(name)
+ #req = get_requirement_from_url(url)
+ req = None
+ return cls(req, comes_from, url=url)
+
+ def __str__(self):
+ if self.req:
+ s = str(self.req)
+ if self.url:
+ s += ' from %s' % self.url
+ else:
+ s = self.url
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, basestring):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def from_path(self):
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, basestring):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir, unpack=True):
+ if self._temp_build_dir is not None:
+ return self._temp_build_dir
+ if self.req is None:
+ self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-')
+ self._ideal_build_dir = build_dir
+ return self._temp_build_dir
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
+ if not os.path.exists(build_dir):
+ _make_build_dir(build_dir)
+ return os.path.join(build_dir, name)
+
+ def correct_build_location(self):
+ """If the build location was a temporary directory, this will move it
+ to a new more permanent location"""
+ if self.source_dir is not None:
+ return
+ assert self.req is not None
+ assert self._temp_build_dir
+ old_location = self._temp_build_dir
+ new_build_dir = self._ideal_build_dir
+ del self._ideal_build_dir
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ new_location = os.path.join(new_build_dir, name)
+ if not os.path.exists(new_build_dir):
+ logger.debug('Creating directory %s' % new_build_dir)
+ _make_build_dir(new_build_dir)
+ if os.path.exists(new_location):
+ raise InstallationError(
+ 'A package already exists in %s; please remove it to continue'
+ % display_path(new_location))
+ logger.debug('Moving package %s from %s to new location %s'
+ % (self, display_path(old_location), display_path(new_location)))
+ shutil.move(old_location, new_location)
+ self._temp_build_dir = new_location
+ self.source_dir = new_location
+ self._egg_info_path = None
+
+ @property
+ def name(self):
+ if self.req is None:
+ return None
+ return self.req.project_name
+
+ @property
+ def url_name(self):
+ if self.req is None:
+ return None
+ return urllib.quote(self.req.unsafe_name)
+
+ @property
+ def setup_py(self):
+ return os.path.join(self.source_dir, 'setup.py')
+
+ def run_egg_info(self, force_root_egg_info=False):
+ assert self.source_dir
+ if self.name:
+ logger.notify('Running setup.py egg_info for package %s' % self.name)
+ else:
+ logger.notify('Running setup.py egg_info for package from %s' % self.url)
+ logger.indent += 2
+ try:
+ script = self._run_setup_py
+ script = script.replace('__SETUP_PY__', repr(self.setup_py))
+ script = script.replace('__PKG_NAME__', repr(self.name))
+ # We can't put the .egg-info files at the root, because then the source code will be mistaken
+ # for an installed egg, causing problems
+ if self.editable or force_root_egg_info:
+ egg_base_option = []
+ else:
+ egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info')
+ if not os.path.exists(egg_info_dir):
+ os.makedirs(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pip-egg-info']
+ call_subprocess(
+ [sys.executable, '-c', script, 'egg_info'] + egg_base_option,
+ cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
+ command_level=logger.VERBOSE_DEBUG,
+ command_desc='python setup.py egg_info')
+ finally:
+ logger.indent -= 2
+ if not self.req:
+ self.req = pkg_resources.Requirement.parse(self.pkg_info()['Name'])
+ self.correct_build_location()
+
+ ## FIXME: This is a lame hack, entirely for PasteScript which has
+ ## a self-provided entry point that causes this awkwardness
+ _run_setup_py = """
+__file__ = __SETUP_PY__
+from setuptools.command import egg_info
+def replacement_run(self):
+ self.mkpath(self.egg_info)
+ installer = self.distribution.fetch_build_egg
+ for ep in egg_info.iter_entry_points('egg_info.writers'):
+ # require=False is the change we're making:
+ writer = ep.load(require=False)
+ if writer:
+ writer(self, ep.name, egg_info.os.path.join(self.egg_info,ep.name))
+ self.find_sources()
+egg_info.egg_info.run = replacement_run
+execfile(__file__)
+"""
+
+ def egg_info_data(self, filename):
+ if self.satisfied_by is not None:
+ if not self.satisfied_by.has_metadata(filename):
+ return None
+ return self.satisfied_by.get_metadata(filename)
+ assert self.source_dir
+ filename = self.egg_info_path(filename)
+ if not os.path.exists(filename):
+ return None
+ fp = open(filename, 'r')
+ data = fp.read()
+ fp.close()
+ return data
+
+ def egg_info_path(self, filename):
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.source_dir, 'pip-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = []
+ for root, dirs, files in os.walk(base):
+ for dir in vcs.dirnames:
+ if dir in dirs:
+ dirs.remove(dir)
+ for dir in dirs:
+ # Don't search in anything that looks like a virtualenv environment
+ if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
+ or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
+ dirs.remove(dir)
+ # Also don't search through tests
+ if dir == 'test' or dir == 'tests':
+ dirs.remove(dir)
+ filenames.extend([os.path.join(root, dir)
+ for dir in dirs])
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+
+ if not filenames:
+ raise InstallationError('No files/directores in %s (from %s)' % (base, filename))
+ assert filenames, "No files/directories in %s (from %s)" % (base, filename)
+
+ # if we have more than one match, we pick the toplevel one. This can
+ # easily be the case if there is a dist folder which contains an
+ # extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and
+ x.count(os.path.altsep) or 0))
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return os.path.join(self._egg_info_path, filename)
+
+ def egg_info_lines(self, filename):
+ data = self.egg_info_data(filename)
+ if not data:
+ return []
+ result = []
+ for line in data.splitlines():
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ result.append(line)
+ return result
+
+ def pkg_info(self):
+ p = FeedParser()
+ data = self.egg_info_data('PKG-INFO')
+ if not data:
+ logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
+ p.feed(data or '')
+ return p.close()
+
+ @property
+ def dependency_links(self):
+ return self.egg_info_lines('dependency_links.txt')
+
+ _requirements_section_re = re.compile(r'\[(.*?)\]')
+
+ def requirements(self, extras=()):
+ in_extra = None
+ for line in self.egg_info_lines('requires.txt'):
+ match = self._requirements_section_re.match(line)
+ if match:
+ in_extra = match.group(1)
+ continue
+ if in_extra and in_extra not in extras:
+ # Skip requirement for an extra we aren't requiring
+ continue
+ yield line
+
+ @property
+ def absolute_versions(self):
+ for qualifier, version in self.req.specs:
+ if qualifier == '==':
+ yield version
+
+ @property
+ def installed_version(self):
+ return self.pkg_info()['version']
+
+ def assert_source_matches_version(self):
+ assert self.source_dir
+ if self.comes_from is None:
+ # We don't check the versions of things explicitly installed.
+ # This makes, e.g., "pip Package==dev" possible
+ return
+ version = self.installed_version
+ if version not in self.req:
+ logger.fatal(
+ 'Source in %s has the version %s, which does not match the requirement %s'
+ % (display_path(self.source_dir), version, self))
+ raise InstallationError(
+ 'Source in %s has version %s that conflicts with %s'
+ % (display_path(self.source_dir), version, self))
+ else:
+ logger.debug('Source in %s has version %s, which satisfies requirement %s'
+ % (display_path(self.source_dir), version, self))
+
+ def update_editable(self, obtain=True):
+ if not self.url:
+ logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.url.startswith('file:'):
+ # Static paths don't get updated
+ return
+ assert '+' in self.url, "bad url: %r" % self.url
+ if not self.update:
+ return
+ vc_type, url = self.url.split('+', 1)
+ backend = vcs.get_backend(vc_type)
+ if backend:
+ vcs_backend = backend(self.url)
+ if obtain:
+ vcs_backend.obtain(self.source_dir)
+ else:
+ vcs_backend.export(self.source_dir)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.url, vc_type))
+
+ def uninstall(self, auto_confirm=False):
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ if not self.check_if_exists():
+ raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
+ dist = self.satisfied_by or self.conflicts_with
+
+ paths_to_remove = UninstallPathSet(dist)
+
+ pip_egg_info_path = os.path.join(dist.location,
+ dist.egg_name()) + '.egg-info'
+ easy_install_egg = dist.egg_name() + '.egg'
+ develop_egg_link = egg_link_path(dist)
+ if os.path.exists(pip_egg_info_path):
+ # package installed by pip
+ paths_to_remove.add(pip_egg_info_path)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata('installed-files.txt').splitlines():
+ path = os.path.normpath(os.path.join(pip_egg_info_path, installed_file))
+ paths_to_remove.add(path)
+ if dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+
+ elif dist.location.endswith(easy_install_egg):
+ # package installed by easy_install
+ paths_to_remove.add(dist.location)
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif os.path.isfile(develop_egg_link):
+ # develop egg
+ fh = open(develop_egg_link, 'r')
+ link_pointer = os.path.normcase(fh.readline().strip())
+ fh.close()
+ assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ paths_to_remove.add(os.path.join(bin_py, script))
+ if sys.platform == 'win32':
+ paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
+
+ # find console_scripts
+ if dist.has_metadata('entry_points.txt'):
+ config = ConfigParser.SafeConfigParser()
+ config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
+ if config.has_section('console_scripts'):
+ for name, value in config.items('console_scripts'):
+ paths_to_remove.add(os.path.join(bin_py, name))
+ if sys.platform == 'win32':
+ paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
+ paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest')
+ paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
+
+ paths_to_remove.remove(auto_confirm)
+ self.uninstalled = paths_to_remove
+
+ def rollback_uninstall(self):
+ if self.uninstalled:
+ self.uninstalled.rollback()
+ else:
+ logger.error("Can't rollback %s, nothing uninstalled."
+ % (self.project_name,))
+
+ def commit_uninstall(self):
+ if self.uninstalled:
+ self.uninstalled.commit()
+ else:
+ logger.error("Can't commit %s, nothing uninstalled."
+ % (self.project_name,))
+
+ def archive(self, build_dir):
+ assert self.source_dir
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.installed_version)
+ archive_path = os.path.join(build_dir, archive_name)
+ if os.path.exists(archive_path):
+ response = ask('The file %s exists. (i)gnore, (w)ipe, (b)ackup '
+ % display_path(archive_path), ('i', 'w', 'b'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warn('Deleting %s' % display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warn('Backing up %s to %s'
+ % (display_path(archive_path), display_path(dest_file)))
+ shutil.move(archive_path, dest_file)
+ if create_archive:
+ zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
+ dir = os.path.normcase(os.path.abspath(self.source_dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dirname = os.path.join(dirpath, dirname)
+ name = self._clean_zip_name(dirname, dir)
+ zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
+ zipdir.external_attr = 0755 << 16L
+ zip.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ filename = os.path.join(dirpath, filename)
+ name = self._clean_zip_name(filename, dir)
+ zip.write(filename, self.name + '/' + name)
+ zip.close()
+ logger.indent -= 2
+ logger.notify('Saved %s' % display_path(archive_path))
+
+ def _clean_zip_name(self, name, prefix):
+ assert name.startswith(prefix+os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix))
+ name = name[len(prefix)+1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ def install(self, install_options, global_options=()):
+ if self.editable:
+ self.install_editable(install_options, global_options)
+ return
+ temp_location = tempfile.mkdtemp('-record', 'pip-')
+ record_filename = os.path.join(temp_location, 'install-record.txt')
+ try:
+
+ install_args = [
+ sys.executable, '-c',
+ "import setuptools;__file__=%r;"\
+ "execfile(__file__)" % self.setup_py] +\
+ list(global_options) + [
+ 'install',
+ '--single-version-externally-managed',
+ '--record', record_filename]
+
+ if running_under_virtualenv():
+ ## FIXME: I'm not sure if this is a reasonable location; probably not
+ ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
+ install_args += ['--install-headers',
+ os.path.join(sys.prefix, 'include', 'site',
+ 'python' + get_python_version())]
+ logger.notify('Running setup.py install for %s' % self.name)
+ logger.indent += 2
+ try:
+ call_subprocess(install_args + install_options,
+ cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
+ finally:
+ logger.indent -= 2
+ if not os.path.exists(record_filename):
+ logger.notify('Record file %s not found' % record_filename)
+ return
+ self.install_succeeded = True
+ f = open(record_filename)
+ for line in f:
+ line = line.strip()
+ if line.endswith('.egg-info'):
+ egg_info_dir = line
+ break
+ else:
+ logger.warn('Could not find .egg-info directory in install record for %s' % self)
+ ## FIXME: put the record somewhere
+ ## FIXME: should this be an error?
+ return
+ f.close()
+ new_lines = []
+ f = open(record_filename)
+ for line in f:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(make_path_relative(filename, egg_info_dir))
+ f.close()
+ f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
+ f.write('\n'.join(new_lines)+'\n')
+ f.close()
+ finally:
+ if os.path.exists(record_filename):
+ os.remove(record_filename)
+ os.rmdir(temp_location)
+
+ def remove_temporary_source(self):
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.is_bundle or os.path.exists(self.delete_marker_filename):
+ logger.info('Removing source in %s' % self.source_dir)
+ if self.source_dir:
+ rmtree(self.source_dir)
+ self.source_dir = None
+ if self._temp_build_dir and os.path.exists(self._temp_build_dir):
+ rmtree(self._temp_build_dir)
+ self._temp_build_dir = None
+
+ def install_editable(self, install_options, global_options=()):
+ logger.notify('Running setup.py develop for %s' % self.name)
+ logger.indent += 2
+ try:
+ ## FIXME: should we do --install-headers here too?
+ call_subprocess(
+ [sys.executable, '-c',
+ "import setuptools; __file__=%r; execfile(%r)" % (self.setup_py, self.setup_py)]
+ + list(global_options) + ['develop', '--no-deps'] + list(install_options),
+
+ cwd=self.source_dir, filter_stdout=self._filter_install,
+ show_stdout=False)
+ finally:
+ logger.indent -= 2
+ self.install_succeeded = True
+
+ def _filter_install(self, line):
+ level = logger.NOTIFY
+ for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
+ r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
+ r'^byte-compiling ',
+ # Not sure what this warning is, but it seems harmless:
+ r"^warning: manifest_maker: standard file '-c' not found$"]:
+ if re.search(regex, line.strip()):
+ level = logger.INFO
+ break
+ return (level, line)
+
+ def check_if_exists(self):
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.conflicts_with appropriately."""
+ if self.req is None:
+ return False
+ try:
+ self.satisfied_by = pkg_resources.get_distribution(self.req)
+ except pkg_resources.DistributionNotFound:
+ return False
+ except pkg_resources.VersionConflict:
+ self.conflicts_with = pkg_resources.get_distribution(self.req.project_name)
+ return True
+
+ @property
+ def is_bundle(self):
+ if self._is_bundle is not None:
+ return self._is_bundle
+ base = self._temp_build_dir
+ if not base:
+ ## FIXME: this doesn't seem right:
+ return False
+ self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
+ or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
+ return self._is_bundle
+
+ def bundle_requirements(self):
+ for dest_dir in self._bundle_editable_dirs:
+ package = os.path.basename(dest_dir)
+ ## FIXME: svnism:
+ for vcs_backend in vcs.backends:
+ url = rev = None
+ vcs_bundle_file = os.path.join(
+ dest_dir, vcs_backend.bundle_file)
+ if os.path.exists(vcs_bundle_file):
+ vc_type = vcs_backend.name
+ fp = open(vcs_bundle_file)
+ content = fp.read()
+ fp.close()
+ url, rev = vcs_backend().parse_vcs_bundle_file(content)
+ break
+ if url:
+ url = '%s+%s@%s' % (vc_type, url, rev)
+ else:
+ url = None
+ yield InstallRequirement(
+ package, self, editable=True, url=url,
+ update=False, source_dir=dest_dir)
+ for dest_dir in self._bundle_build_dirs:
+ package = os.path.basename(dest_dir)
+ yield InstallRequirement(
+ package, self,
+ source_dir=dest_dir)
+
+ def move_bundle_files(self, dest_build_dir, dest_src_dir):
+ base = self._temp_build_dir
+ assert base
+ src_dir = os.path.join(base, 'src')
+ build_dir = os.path.join(base, 'build')
+ bundle_build_dirs = []
+ bundle_editable_dirs = []
+ for source_dir, dest_dir, dir_collection in [
+ (src_dir, dest_src_dir, bundle_editable_dirs),
+ (build_dir, dest_build_dir, bundle_build_dirs)]:
+ if os.path.exists(source_dir):
+ for dirname in os.listdir(source_dir):
+ dest = os.path.join(dest_dir, dirname)
+ dir_collection.append(dest)
+ if os.path.exists(dest):
+ logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
+ % (dest, dirname, self))
+ continue
+ if not os.path.exists(dest_dir):
+ logger.info('Creating directory %s' % dest_dir)
+ os.makedirs(dest_dir)
+ shutil.move(os.path.join(source_dir, dirname), dest)
+ if not os.listdir(source_dir):
+ os.rmdir(source_dir)
+ self._temp_build_dir = None
+ self._bundle_build_dirs = bundle_build_dirs
+ self._bundle_editable_dirs = bundle_editable_dirs
+
+ @property
+ def delete_marker_filename(self):
+ assert self.source_dir
+ return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)
+
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+
+
+class RequirementSet(object):
+
+ def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
+ upgrade=False, ignore_installed=False,
+ ignore_dependencies=False):
+ self.build_dir = build_dir
+ self.src_dir = src_dir
+ self.download_dir = download_dir
+ self.download_cache = download_cache
+ self.upgrade = upgrade
+ self.ignore_installed = ignore_installed
+ self.requirements = {}
+ # Mapping of alias: real_name
+ self.requirement_aliases = {}
+ self.unnamed_requirements = []
+ self.ignore_dependencies = ignore_dependencies
+ self.successfully_downloaded = []
+ self.successfully_installed = []
+ self.reqs_to_cleanup = []
+
+ def __str__(self):
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def add_requirement(self, install_req):
+ name = install_req.name
+ if not name:
+ self.unnamed_requirements.append(install_req)
+ else:
+ if self.has_requirement(name):
+ raise InstallationError(
+ 'Double requirement given: %s (aready in %s, name=%r)'
+ % (install_req, self.get_requirement(name), name))
+ self.requirements[name] = install_req
+ ## FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+
+ def has_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements or name in self.requirement_aliases:
+ return True
+ return False
+
+ @property
+ def has_requirements(self):
+ return self.requirements.values() or self.unnamed_requirements
+
+ @property
+ def has_editables(self):
+ if any(req.editable for req in self.requirements.values()):
+ return True
+ if any(req.editable for req in self.unnamed_requirements):
+ return True
+ return False
+
+ @property
+ def is_download(self):
+ if self.download_dir:
+ self.download_dir = os.path.expanduser(self.download_dir)
+ if os.path.exists(self.download_dir):
+ return True
+ else:
+ logger.fatal('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '%s'"
+ % display_path(self.download_dir))
+ return False
+
+ def get_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def uninstall(self, auto_confirm=False):
+ for req in self.requirements.values():
+ req.uninstall(auto_confirm=auto_confirm)
+ req.commit_uninstall()
+
+ def locate_files(self):
+ ## FIXME: duplicates code from install_files; relevant code should
+ ## probably be factored out into a separate method
+ unnamed = list(self.unnamed_requirements)
+ reqs = self.requirements.values()
+ while reqs or unnamed:
+ if unnamed:
+ req_to_install = unnamed.pop(0)
+ else:
+ req_to_install = reqs.pop(0)
+ install_needed = True
+ if not self.ignore_installed and not req_to_install.editable:
+ req_to_install.check_if_exists()
+ if req_to_install.satisfied_by:
+ if self.upgrade:
+ req_to_install.conflicts_with = req_to_install.satisfied_by
+ req_to_install.satisfied_by = None
+ else:
+ install_needed = False
+ if req_to_install.satisfied_by:
+ logger.notify('Requirement already satisfied '
+ '(use --upgrade to upgrade): %s'
+ % req_to_install)
+
+ if req_to_install.editable:
+ if req_to_install.source_dir is None:
+ req_to_install.source_dir = req_to_install.build_location(self.src_dir)
+ elif install_needed:
+ req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
+
+ if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
+ raise InstallationError('Could not install requirement %s '
+ 'because source folder %s does not exist '
+ '(perhaps --no-download was used without first running '
+ 'an equivalent install with --no-install?)'
+ % (req_to_install, req_to_install.source_dir))
+
+ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
+ """Prepare process. Create temp directories, download and/or unpack files."""
+ unnamed = list(self.unnamed_requirements)
+ reqs = self.requirements.values()
+ while reqs or unnamed:
+ if unnamed:
+ req_to_install = unnamed.pop(0)
+ else:
+ req_to_install = reqs.pop(0)
+ install = True
+ if not self.ignore_installed and not req_to_install.editable:
+ req_to_install.check_if_exists()
+ if req_to_install.satisfied_by:
+ if self.upgrade:
+ req_to_install.conflicts_with = req_to_install.satisfied_by
+ req_to_install.satisfied_by = None
+ else:
+ install = False
+ if req_to_install.satisfied_by:
+ logger.notify('Requirement already satisfied '
+ '(use --upgrade to upgrade): %s'
+ % req_to_install)
+ if req_to_install.editable:
+ logger.notify('Obtaining %s' % req_to_install)
+ elif install:
+ if req_to_install.url and req_to_install.url.lower().startswith('file:'):
+ logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
+ else:
+ logger.notify('Downloading/unpacking %s' % req_to_install)
+ logger.indent += 2
+ try:
+ is_bundle = False
+ if req_to_install.editable:
+ if req_to_install.source_dir is None:
+ location = req_to_install.build_location(self.src_dir)
+ req_to_install.source_dir = location
+ else:
+ location = req_to_install.source_dir
+ if not os.path.exists(self.build_dir):
+ _make_build_dir(self.build_dir)
+ req_to_install.update_editable(not self.is_download)
+ if self.is_download:
+ req_to_install.run_egg_info()
+ req_to_install.archive(self.download_dir)
+ else:
+ req_to_install.run_egg_info()
+ elif install:
+ ##@@ if filesystem packages are not marked
+ ##editable in a req, a non deterministic error
+ ##occurs when the script attempts to unpack the
+ ##build directory
+
+ location = req_to_install.build_location(self.build_dir, not self.is_download)
+ ## FIXME: is the existance of the checkout good enough to use it? I don't think so.
+ unpack = True
+ if not os.path.exists(os.path.join(location, 'setup.py')):
+ ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
+ if req_to_install.url is None:
+ url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
+ else:
+ ## FIXME: should req_to_install.url already be a link?
+ url = Link(req_to_install.url)
+ assert url
+ if url:
+ try:
+ self.unpack_url(url, location, self.is_download)
+ except urllib2.HTTPError, e:
+ logger.fatal('Could not install requirement %s because of error %s'
+ % (req_to_install, e))
+ raise InstallationError(
+ 'Could not install requirement %s because of HTTP error %s for URL %s'
+ % (req_to_install, e, url))
+ else:
+ unpack = False
+ if unpack:
+ is_bundle = req_to_install.is_bundle
+ url = None
+ if is_bundle:
+ req_to_install.move_bundle_files(self.build_dir, self.src_dir)
+ for subreq in req_to_install.bundle_requirements():
+ reqs.append(subreq)
+ self.add_requirement(subreq)
+ elif self.is_download:
+ req_to_install.source_dir = location
+ if url and url.scheme in vcs.all_schemes:
+ req_to_install.run_egg_info()
+ req_to_install.archive(self.download_dir)
+ else:
+ req_to_install.source_dir = location
+ req_to_install.run_egg_info()
+ if force_root_egg_info:
+ # We need to run this to make sure that the .egg-info/
+ # directory is created for packing in the bundle
+ req_to_install.run_egg_info(force_root_egg_info=True)
+ req_to_install.assert_source_matches_version()
+ #@@ sketchy way of identifying packages not grabbed from an index
+ if bundle and req_to_install.url:
+ self.copy_to_build_dir(req_to_install)
+ if not is_bundle and not self.is_download:
+ ## FIXME: shouldn't be globally added:
+ finder.add_dependency_links(req_to_install.dependency_links)
+ ## FIXME: add extras in here:
+ if not self.ignore_dependencies:
+ for req in req_to_install.requirements():
+ try:
+ name = pkg_resources.Requirement.parse(req).project_name
+ except ValueError, e:
+ ## FIXME: proper warning
+ logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install))
+ continue
+ if self.has_requirement(name):
+ ## FIXME: check for conflict
+ continue
+ subreq = InstallRequirement(req, req_to_install)
+ reqs.append(subreq)
+ self.add_requirement(subreq)
+ if req_to_install.name not in self.requirements:
+ self.requirements[req_to_install.name] = req_to_install
+ else:
+ self.reqs_to_cleanup.append(req_to_install)
+ if install:
+ self.successfully_downloaded.append(req_to_install)
+ if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
+ self.copy_to_build_dir(req_to_install)
+ finally:
+ logger.indent -= 2
+
+ def cleanup_files(self, bundle=False):
+ """Clean up files, remove builds."""
+ logger.notify('Cleaning up...')
+ logger.indent += 2
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
+
+ remove_dir = []
+ if self._pip_has_created_build_dir():
+ remove_dir.append(self.build_dir)
+
+ # The source dir of a bundle can always be removed.
+ if bundle:
+ remove_dir.append(self.src_dir)
+
+ for dir in remove_dir:
+ if os.path.exists(dir):
+ logger.info('Removing temporary dir %s...' % dir)
+ rmtree(dir)
+
+ logger.indent -= 2
+
+ def _pip_has_created_build_dir(self):
+ return (self.build_dir == build_prefix and
+ os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
+
+ def copy_to_build_dir(self, req_to_install):
+ target_dir = req_to_install.editable and self.src_dir or self.build_dir
+ logger.info("Copying %s to %s" %(req_to_install.name, target_dir))
+ dest = os.path.join(target_dir, req_to_install.name)
+ copytree(req_to_install.source_dir, dest)
+ call_subprocess(["python", "%s/setup.py"%dest, "clean"])
+
+ def unpack_url(self, link, location, only_download=False):
+ if only_download:
+ location = self.download_dir
+ if is_vcs_url(link):
+ return unpack_vcs_link(link, location, only_download)
+ elif is_file_url(link):
+ return unpack_file_url(link, location)
+ else:
+ if self.download_cache:
+ self.download_cache = os.path.expanduser(self.download_cache)
+ return unpack_http_url(link, location, self.download_cache, only_download)
+
+ def install(self, install_options, global_options=()):
+ """Install everything in this set (after having downloaded and unpacked the packages)"""
+ to_install = sorted([r for r in self.requirements.values()
+ if self.upgrade or not r.satisfied_by],
+ key=lambda p: p.name.lower())
+ if to_install:
+ logger.notify('Installing collected packages: %s' % (', '.join([req.name for req in to_install])))
+ logger.indent += 2
+ try:
+ for requirement in to_install:
+ if requirement.conflicts_with:
+ logger.notify('Found existing installation: %s'
+ % requirement.conflicts_with)
+ logger.indent += 2
+ try:
+ requirement.uninstall(auto_confirm=True)
+ finally:
+ logger.indent -= 2
+ try:
+ requirement.install(install_options, global_options)
+ except:
+ # if install did not succeed, rollback previous uninstall
+ if requirement.conflicts_with and not requirement.install_succeeded:
+ requirement.rollback_uninstall()
+ raise
+ else:
+ if requirement.conflicts_with and requirement.install_succeeded:
+ requirement.commit_uninstall()
+ requirement.remove_temporary_source()
+ finally:
+ logger.indent -= 2
+ self.successfully_installed = to_install
+
+ def create_bundle(self, bundle_filename):
+ ## FIXME: can't decide which is better; zip is easier to read
+ ## random files from, but tar.bz2 is smaller and not as lame a
+ ## format.
+
+ ## FIXME: this file should really include a manifest of the
+ ## packages, maybe some other metadata files. It would make
+ ## it easier to detect as well.
+ zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED)
+ vcs_dirs = []
+ for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'):
+ dir = os.path.normcase(os.path.abspath(dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ for backend in vcs.backends:
+ vcs_backend = backend()
+ vcs_url = vcs_rev = None
+ if vcs_backend.dirname in dirnames:
+ for vcs_dir in vcs_dirs:
+ if dirpath.startswith(vcs_dir):
+ # vcs bundle file already in parent directory
+ break
+ else:
+ vcs_url, vcs_rev = vcs_backend.get_info(
+ os.path.join(dir, dirpath))
+ vcs_dirs.append(dirpath)
+ vcs_bundle_file = vcs_backend.bundle_file
+ vcs_guide = vcs_backend.guide % {'url': vcs_url,
+ 'rev': vcs_rev}
+ dirnames.remove(vcs_backend.dirname)
+ break
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dirname = os.path.join(dirpath, dirname)
+ name = self._clean_zip_name(dirname, dir)
+ zip.writestr(basename + '/' + name + '/', '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ filename = os.path.join(dirpath, filename)
+ name = self._clean_zip_name(filename, dir)
+ zip.write(filename, basename + '/' + name)
+ if vcs_url:
+ name = os.path.join(dirpath, vcs_bundle_file)
+ name = self._clean_zip_name(name, dir)
+ zip.writestr(basename + '/' + name, vcs_guide)
+
+ zip.writestr('pip-manifest.txt', self.bundle_requirements())
+ zip.close()
+
+ BUNDLE_HEADER = '''\
+# This is a pip bundle file, that contains many source packages
+# that can be installed as a group. You can install this like:
+# pip this_file.zip
+# The rest of the file contains a list of all the packages included:
+'''
+
+ def bundle_requirements(self):
+ parts = [self.BUNDLE_HEADER]
+ for req in sorted(
+ [req for req in self.requirements.values()
+ if not req.comes_from],
+ key=lambda x: x.name):
+ parts.append('%s==%s\n' % (req.name, req.installed_version))
+ parts.append('# These packages were installed to satisfy the above requirements:\n')
+ for req in sorted(
+ [req for req in self.requirements.values()
+ if req.comes_from],
+ key=lambda x: x.name):
+ parts.append('%s==%s\n' % (req.name, req.installed_version))
+ ## FIXME: should we do something with self.unnamed_requirements?
+ return ''.join(parts)
+
+ def _clean_zip_name(self, name, prefix):
+ assert name.startswith(prefix+os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix))
+ name = name[len(prefix)+1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+
+def _make_build_dir(build_dir):
+ os.makedirs(build_dir)
+ _write_delete_marker_message(os.path.join(build_dir, PIP_DELETE_MARKER_FILENAME))
+
+
+def _write_delete_marker_message(filepath):
+ marker_fp = open(filepath, 'w')
+ marker_fp.write(DELETE_MARKER_MESSAGE)
+ marker_fp.close()
+
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+
+
+def parse_requirements(filename, finder=None, comes_from=None, options=None):
+ skip_match = None
+ skip_regex = options.skip_requirements_regex
+ if skip_regex:
+ skip_match = re.compile(skip_regex)
+ filename, content = get_file_content(filename, comes_from=comes_from)
+ for line_number, line in enumerate(content.splitlines()):
+ line_number += 1
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ if skip_match and skip_match.search(line):
+ continue
+ if line.startswith('-r') or line.startswith('--requirement'):
+ if line.startswith('-r'):
+ req_url = line[2:].strip()
+ else:
+ req_url = line[len('--requirement'):].strip().strip('=')
+ if _scheme_re.search(filename):
+ # Relative to a URL
+ req_url = urlparse.urljoin(req_url, filename)
+ elif not _scheme_re.search(req_url):
+ req_url = os.path.join(os.path.dirname(filename), req_url)
+ for item in parse_requirements(req_url, finder, comes_from=filename, options=options):
+ yield item
+ elif line.startswith('-Z') or line.startswith('--always-unzip'):
+ # No longer used, but previously these were used in
+ # requirement files, so we'll ignore.
+ pass
+ elif line.startswith('-f') or line.startswith('--find-links'):
+ if line.startswith('-f'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--find-links'):].strip().lstrip('=')
+ ## FIXME: it would be nice to keep track of the source of
+ ## the find_links:
+ if finder:
+ finder.find_links.append(line)
+ elif line.startswith('-i') or line.startswith('--index-url'):
+ if line.startswith('-i'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--index-url'):].strip().lstrip('=')
+ if finder:
+ finder.index_urls = [line]
+ elif line.startswith('--extra-index-url'):
+ line = line[len('--extra-index-url'):].strip().lstrip('=')
+ if finder:
+ finder.index_urls.append(line)
+ else:
+ comes_from = '-r %s (line %s)' % (filename, line_number)
+ if line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip()
+ req = InstallRequirement.from_editable(
+ line, comes_from=comes_from, default_vcs=options.default_vcs)
+ else:
+ req = InstallRequirement.from_line(line, comes_from)
+ yield req
+
+
+def parse_editable(editable_req, default_vcs=None):
+ """Parses svn+http://blahblah@rev#egg=Foobar into a requirement
+ (Foobar) and a URL"""
+ url = editable_req
+ if os.path.isdir(url) and os.path.exists(os.path.join(url, 'setup.py')):
+ # Treating it as code that has already been checked out
+ url = path_to_url(url)
+ if url.lower().startswith('file:'):
+ return None, url
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ if '+' not in url:
+ if default_vcs:
+ url = default_vcs + '+' + url
+ else:
+ raise InstallationError(
+ '--editable=%s should be formatted with svn+URL, git+URL, hg+URL or bzr+URL' % editable_req)
+ vc_type = url.split('+', 1)[0].lower()
+ if not vcs.get_backend(vc_type):
+ raise InstallationError(
+ 'For --editable=%s only svn (svn+URL), Git (git+URL), Mercurial (hg+URL) and Bazaar (bzr+URL) is currently supported' % editable_req)
+ match = re.search(r'(?:#|#.*?&)egg=([^&]*)', editable_req)
+ if (not match or not match.group(1)) and vcs.get_backend(vc_type):
+ parts = [p for p in editable_req.split('#', 1)[0].split('/') if p]
+ if parts[-2] in ('tags', 'branches', 'tag', 'branch'):
+ req = parts[-3]
+ elif parts[-1] == 'trunk':
+ req = parts[-2]
+ else:
+ raise InstallationError(
+ '--editable=%s is not the right format; it must have #egg=Package'
+ % editable_req)
+ else:
+ req = match.group(1)
+ ## FIXME: use package_to_requirement?
+ match = re.search(r'^(.*?)(?:-dev|-\d.*)', req)
+ if match:
+ # Strip off -dev, -0.2, etc.
+ req = match.group(1)
+ return req, url
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ self.paths = set()
+ self._refuse = set()
+ self.pth = {}
+ self.dist = dist
+ self.save_dir = None
+ self._moved_paths = []
+
+ def _permitted(self, path):
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def _can_uninstall(self):
+ if not dist_is_local(self.dist):
+ logger.notify("Not uninstalling %s at %s, outside environment %s"
+ % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
+ return False
+ return True
+
+ def add(self, path):
+ path = normalize_path(path)
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ def add_pth(self, pth_file, entry):
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def compact(self, paths):
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+ short_paths = set()
+ for path in sorted(paths, key=len):
+ if not any([(path.startswith(shortpath) and
+ path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
+ for shortpath in short_paths]):
+ short_paths.add(path)
+ return short_paths
+
+ def _stash(self, path):
+ return os.path.join(
+ self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
+
+ def remove(self, auto_confirm=False):
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+ if not self._can_uninstall():
+ return
+ logger.notify('Uninstalling %s:' % self.dist.project_name)
+ logger.indent += 2
+ paths = sorted(self.compact(self.paths))
+ try:
+ if auto_confirm:
+ response = 'y'
+ else:
+ for path in paths:
+ logger.notify(path)
+ response = ask('Proceed (y/n)? ', ('y', 'n'))
+ if self._refuse:
+ logger.notify('Not removing or modifying (outside of prefix):')
+ for path in self.compact(self._refuse):
+ logger.notify(path)
+ if response == 'y':
+ self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
+ prefix='pip-')
+ for path in paths:
+ new_path = self._stash(path)
+ logger.info('Removing file or directory %s' % path)
+ self._moved_paths.append(path)
+ renames(path, new_path)
+ for pth in self.pth.values():
+ pth.remove()
+ logger.notify('Successfully uninstalled %s' % self.dist.project_name)
+
+ finally:
+ logger.indent -= 2
+
+ def rollback(self):
+ """Rollback the changes previously made by remove()."""
+ if self.save_dir is None:
+ logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
+ return False
+ logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
+ for path in self._moved_paths:
+ tmp_path = self._stash(path)
+ logger.info('Replacing %s' % path)
+ renames(tmp_path, path)
+ for pth in self.pth:
+ pth.rollback()
+
+ def commit(self):
+ """Remove temporary save dir: rollback will no longer be possible."""
+ if self.save_dir is not None:
+ shutil.rmtree(self.save_dir)
+ self.save_dir = None
+ self._moved_paths = []
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ if not os.path.isfile(pth_file):
+ raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
+ self.file = pth_file
+ self.entries = set()
+ self._saved_lines = None
+
+ def add(self, entry):
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ logger.info('Removing pth entries from %s:' % self.file)
+ fh = open(self.file, 'r')
+ lines = fh.readlines()
+ self._saved_lines = lines
+ fh.close()
+ try:
+ for entry in self.entries:
+ logger.info('Removing entry: %s' % entry)
+ try:
+ lines.remove(entry + '\n')
+ except ValueError:
+ pass
+ finally:
+ pass
+ fh = open(self.file, 'wb')
+ fh.writelines(lines)
+ fh.close()
+
+ def rollback(self):
+ if self._saved_lines is None:
+ logger.error('Cannot roll back changes to %s, none were made' % self.file)
+ return False
+ logger.info('Rolling %s back to previous state' % self.file)
+ fh = open(self.file, 'wb')
+ fh.writelines(self._saved_lines)
+ fh.close()
+ return True
+
+
+class FakeFile(object):
+ """Wrap a list of lines in an object with readline() to make
+ ConfigParser happy."""
+ def __init__(self, lines):
+ self._gen = (l for l in lines)
+
+ def readline(self):
+ try:
+ return self._gen.next()
+ except StopIteration:
+ return ''
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py
new file mode 100755
index 00000000..be830ad9
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/runner.py
@@ -0,0 +1,18 @@
+import sys
+import os
+
+
+def run():
+ base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ ## FIXME: this is kind of crude; if we could create a fake pip
+ ## module, then exec into it and update pip.__path__ properly, we
+ ## wouldn't have to update sys.path:
+ sys.path.insert(0, base)
+ import pip
+ return pip.main()
+
+
+if __name__ == '__main__':
+ exit = run()
+ if exit:
+ sys.exit(exit)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py
new file mode 100755
index 00000000..1eab34c0
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/util.py
@@ -0,0 +1,479 @@
+import sys
+import shutil
+import os
+import stat
+import re
+import posixpath
+import pkg_resources
+import zipfile
+import tarfile
+from pip.exceptions import InstallationError
+from pip.backwardcompat import WindowsError
+from pip.locations import site_packages, running_under_virtualenv
+from pip.log import logger
+
+__all__ = ['rmtree', 'display_path', 'backup_dir',
+ 'find_command', 'ask', 'Inf',
+ 'normalize_name', 'splitext',
+ 'format_size', 'is_installable_dir',
+ 'is_svn_page', 'file_contents',
+ 'split_leading_dir', 'has_leading_dir',
+ 'make_path_relative', 'normalize_path',
+ 'renames', 'get_terminal_size',
+ 'unzip_file', 'untar_file', 'create_download_cache_folder',
+ 'cache_download', 'unpack_file']
+
+
+def rmtree(dir):
+ shutil.rmtree(dir, ignore_errors=True,
+ onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func, path, exc_info):
+ """On Windows, the files in .svn are read-only, so when rmtree() tries to
+ remove them, an exception is thrown. We catch that here, remove the
+ read-only attribute, and hopefully continue without problems."""
+ exctype, value = exc_info[:2]
+ # lookin for a windows error
+ if exctype is not WindowsError or 'Access is denied' not in str(value):
+ raise
+ # file type should currently be read only
+ if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
+ raise
+ # convert to read/write
+ os.chmod(path, stat.S_IWRITE)
+ # use the original function to repeat the operation
+ func(path)
+
+
+def display_path(path):
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = '.' + path[len(os.getcwd()):]
+ return path
+
+
+def backup_dir(dir, ext='.bak'):
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def find_command(cmd, paths=None, pathext=None):
+ """Searches the PATH for the given command and returns its path"""
+ if paths is None:
+ paths = os.environ.get('PATH', []).split(os.pathsep)
+ if isinstance(paths, basestring):
+ paths = [paths]
+ # check if there are funny path extensions for executables, e.g. Windows
+ if pathext is None:
+ pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
+ pathext = [ext for ext in pathext.lower().split(os.pathsep)]
+ # don't use extensions if the command ends with one of them
+ if os.path.splitext(cmd)[1].lower() in pathext:
+ pathext = ['']
+ # check if we find the command on PATH
+ for path in paths:
+ # try without extension first
+ cmd_path = os.path.join(path, cmd)
+ for ext in pathext:
+ # then including the extension
+ cmd_path_ext = cmd_path + ext
+ if os.path.exists(cmd_path_ext):
+ return cmd_path_ext
+ if os.path.exists(cmd_path):
+ return cmd_path
+ return None
+
+
+def ask(message, options):
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ if os.environ.get('PIP_NO_INPUT'):
+ raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
+ response = raw_input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print 'Your response (%r) was not one of the expected responses: %s' % (
+ response, ', '.join(options))
+ else:
+ return response
+
+
+class _Inf(object):
+ """I am bigger than everything!"""
+ def __cmp__(self, a):
+ if self is a:
+ return 0
+ return 1
+
+ def __repr__(self):
+ return 'Inf'
+
+Inf = _Inf()
+del _Inf
+
+
+_normalize_re = re.compile(r'[^a-z]', re.I)
+
+
+def normalize_name(name):
+ return _normalize_re.sub('-', name.lower())
+
+
+def format_size(bytes):
+ if bytes > 1000*1000:
+ return '%.1fMb' % (bytes/1000.0/1000)
+ elif bytes > 10*1000:
+ return '%iKb' % (bytes/1000)
+ elif bytes > 1000:
+ return '%.1fKb' % (bytes/1000.0)
+ else:
+ return '%ibytes' % bytes
+
+
+def is_installable_dir(path):
+ """Return True if `path` is a directory containing a setup.py file."""
+ if not os.path.isdir(path):
+ return False
+ setup_py = os.path.join(path, 'setup.py')
+ if os.path.isfile(setup_py):
+ return True
+ return False
+
+
+def is_svn_page(html):
+ """Returns true if the page appears to be the index page of an svn repository"""
+ return (re.search(r'<title>[^<]*Revision \d+:', html)
+ and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
+
+
+def file_contents(filename):
+ fp = open(filename, 'rb')
+ try:
+ return fp.read()
+ finally:
+ fp.close()
+
+
+def split_leading_dir(path):
+ path = str(path)
+ path = path.lstrip('/').lstrip('\\')
+ if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
+ or '\\' not in path):
+ return path.split('/', 1)
+ elif '\\' in path:
+ return path.split('\\', 1)
+ else:
+ return path, ''
+
+
+def has_leading_dir(paths):
+ """Returns true if all the paths have the same leading path name
+ (i.e., everything is in one subdirectory in an archive)"""
+ common_prefix = None
+ for path in paths:
+ prefix, rest = split_leading_dir(path)
+ if not prefix:
+ return False
+ elif common_prefix is None:
+ common_prefix = prefix
+ elif prefix != common_prefix:
+ return False
+ return True
+
+
+def make_path_relative(path, rel_to):
+ """
+ Make a filename relative, where the filename path, and it is
+ relative to rel_to
+
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/usr/share/another-place/src/Directory')
+ '../../../something/a-file.pth'
+ >>> make_relative_path('/usr/share/something/a-file.pth',
+ ... '/home/user/src/Directory')
+ '../../../usr/share/something/a-file.pth'
+ >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+ 'a-file.pth'
+ """
+ path_filename = os.path.basename(path)
+ path = os.path.dirname(path)
+ path = os.path.normpath(os.path.abspath(path))
+ rel_to = os.path.normpath(os.path.abspath(rel_to))
+ path_parts = path.strip(os.path.sep).split(os.path.sep)
+ rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
+ while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
+ path_parts.pop(0)
+ rel_to_parts.pop(0)
+ full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
+ if full_parts == ['']:
+ return '.' + os.path.sep
+ return os.path.sep.join(full_parts)
+
+
+def normalize_path(path):
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ return os.path.normcase(os.path.realpath(path))
+
+
+def splitext(path):
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old, new):
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path):
+ """
+ Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+ If we're not in a virtualenv, all paths are considered "local."
+
+ """
+ if not running_under_virtualenv():
+ return True
+ return normalize_path(path).startswith(normalize_path(sys.prefix))
+
+
+def dist_is_local(dist):
+ """
+ Return True if given Distribution object is installed locally
+ (i.e. within current virtualenv).
+
+ Always True if we're not in a virtualenv.
+
+ """
+ return is_local(dist_location(dist))
+
+
+def get_installed_distributions(local_only=True, skip=('setuptools', 'pip', 'python')):
+ """
+ Return a list of installed Distribution objects.
+
+ If ``local_only`` is True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+
+ ``skip`` argument is an iterable of lower-case project names to
+ ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
+ skip virtualenv?]
+
+ """
+ if local_only:
+ local_test = dist_is_local
+ else:
+ local_test = lambda d: True
+ return [d for d in pkg_resources.working_set if local_test(d) and d.key not in skip]
+
+
+def egg_link_path(dist):
+ """
+ Return the path where we'd expect to find a .egg-link file for
+ this distribution. (There doesn't seem to be any metadata in the
+ Distribution object for a develop egg that points back to its
+ .egg-link and easy-install.pth files).
+
+ This won't find a globally-installed develop egg if we're in a
+ virtualenv.
+
+ """
+ return os.path.join(site_packages, dist.project_name) + '.egg-link'
+
+
+def dist_location(dist):
+ """
+ Get the site-packages location of this distribution. Generally
+ this is dist.location, except in the case of develop-installed
+ packages, where dist.location is the source code location, and we
+ want to know where the egg-link file is.
+
+ """
+ egg_link = egg_link_path(dist)
+ if os.path.exists(egg_link):
+ return egg_link
+ return dist.location
+
+
+def get_terminal_size():
+ """Returns a tuple (x, y) representing the width(x) and the height(x)
+ in characters of the terminal window."""
+ def ioctl_GWINSZ(fd):
+ try:
+ import fcntl
+ import termios
+ import struct
+ cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
+ '1234'))
+ except:
+ return None
+ if cr == (0, 0):
+ return None
+ if cr == (0, 0):
+ return None
+ return cr
+ cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ cr = ioctl_GWINSZ(fd)
+ os.close(fd)
+ except:
+ pass
+ if not cr:
+ cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ return int(cr[1]), int(cr[0])
+
+
+def unzip_file(filename, location, flatten=True):
+ """Unzip the file (zip file located at filename) to the destination
+ location"""
+ if not os.path.exists(location):
+ os.makedirs(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for name in zip.namelist():
+ data = zip.read(name)
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ if fn.endswith('/') or fn.endswith('\\'):
+ # A directory
+ if not os.path.exists(fn):
+ os.makedirs(fn)
+ else:
+ fp = open(fn, 'wb')
+ try:
+ fp.write(data)
+ finally:
+ fp.close()
+ finally:
+ zipfp.close()
+
+
+def untar_file(filename, location):
+ """Untar the file (tar file located at filename) to the destination location"""
+ if not os.path.exists(location):
+ os.makedirs(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
+ mode = 'r:bz2'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warn('Cannot determine compression type for file %s' % filename)
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ # note: python<=2.5 doesnt seem to know about pax headers, filter them
+ leading = has_leading_dir([
+ member.name for member in tar.getmembers()
+ if member.name != 'pax_global_header'
+ ])
+ for member in tar.getmembers():
+ fn = member.name
+ if fn == 'pax_global_header':
+ continue
+ if leading:
+ fn = split_leading_dir(fn)[1]
+ path = os.path.join(location, fn)
+ if member.isdir():
+ if not os.path.exists(path):
+ os.makedirs(path)
+ else:
+ try:
+ fp = tar.extractfile(member)
+ except (KeyError, AttributeError), e:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warn(
+ 'In the tar file %s the member %s is invalid: %s'
+ % (filename, member.name, e))
+ continue
+ if not os.path.exists(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+ destfp = open(path, 'wb')
+ try:
+ shutil.copyfileobj(fp, destfp)
+ finally:
+ destfp.close()
+ fp.close()
+ finally:
+ tar.close()
+
+
+def create_download_cache_folder(folder):
+ logger.indent -= 2
+ logger.notify('Creating supposed download cache at %s' % folder)
+ logger.indent += 2
+ os.makedirs(folder)
+
+
+def cache_download(target_file, temp_location, content_type):
+ logger.notify('Storing download in cache at %s' % display_path(target_file))
+ shutil.copyfile(temp_location, target_file)
+ fp = open(target_file+'.content-type', 'w')
+ fp.write(content_type)
+ fp.close()
+ os.unlink(temp_location)
+
+
+def unpack_file(filename, location, content_type, link):
+ if (content_type == 'application/zip'
+ or filename.endswith('.zip')
+ or filename.endswith('.pybundle')
+ or zipfile.is_zipfile(filename)):
+ unzip_file(filename, location, flatten=not filename.endswith('.pybundle'))
+ elif (content_type == 'application/x-gzip'
+ or tarfile.is_tarfile(filename)
+ or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
+ untar_file(filename, location)
+ elif (content_type and content_type.startswith('text/html')
+ and is_svn_page(file_contents(filename))):
+ # We don't really care about this
+ from pip.vcs.subversion import Subversion
+ Subversion('svn+' + link.url).unpack(location)
+ else:
+ ## FIXME: handle?
+ ## FIXME: magic signatures?
+ logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
+ % (filename, location, content_type))
+ raise InstallationError('Cannot determine archive format of %s' % location)
+
+
+
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py
new file mode 100755
index 00000000..e110440c
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/__init__.py
@@ -0,0 +1,238 @@
+"""Handles all VCS (version control) support"""
+
+import os
+import shutil
+import urlparse
+import urllib
+
+from pip.exceptions import BadCommand
+from pip.log import logger
+from pip.util import display_path, backup_dir, find_command, ask
+
+
+__all__ = ['vcs', 'get_src_requirement', 'import_vcs_support']
+
+
+class VcsSupport(object):
+ _registry = {}
+ schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp']
+
+ def __init__(self):
+ # Register more schemes with urlparse for various version control systems
+ urlparse.uses_netloc.extend(self.schemes)
+ urlparse.uses_fragment.extend(self.schemes)
+ super(VcsSupport, self).__init__()
+
+ def __iter__(self):
+ return self._registry.__iter__()
+
+ @property
+ def backends(self):
+ return self._registry.values()
+
+ @property
+ def dirnames(self):
+ return [backend.dirname for backend in self.backends]
+
+ @property
+ def all_schemes(self):
+ schemes = []
+ for backend in self.backends:
+ schemes.extend(backend.schemes)
+ return schemes
+
+ def register(self, cls):
+ if not hasattr(cls, 'name'):
+ logger.warn('Cannot register VCS %s' % cls.__name__)
+ return
+ if cls.name not in self._registry:
+ self._registry[cls.name] = cls
+
+ def unregister(self, cls=None, name=None):
+ if name in self._registry:
+ del self._registry[name]
+ elif cls in self._registry.values():
+ del self._registry[cls.name]
+ else:
+ logger.warn('Cannot unregister because no class or name given')
+
+ def get_backend_name(self, location):
+ """
+ Return the name of the version control backend if found at given
+ location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
+ """
+ for vc_type in self._registry.values():
+ path = os.path.join(location, vc_type.dirname)
+ if os.path.exists(path):
+ return vc_type.name
+ return None
+
+ def get_backend(self, name):
+ name = name.lower()
+ if name in self._registry:
+ return self._registry[name]
+
+ def get_backend_from_location(self, location):
+ vc_type = self.get_backend_name(location)
+ if vc_type:
+ return self.get_backend(vc_type)
+ return None
+
+
+vcs = VcsSupport()
+
+
+class VersionControl(object):
+ name = ''
+ dirname = ''
+
+ def __init__(self, url=None, *args, **kwargs):
+ self.url = url
+ self._cmd = None
+ super(VersionControl, self).__init__(*args, **kwargs)
+
+ def _filter(self, line):
+ return (logger.INFO, line)
+
+ def _is_local_repository(self, repo):
+ """
+ posix absolute paths start with os.path.sep,
+ win32 ones ones start with drive (like c:\\folder)
+ """
+ drive, tail = os.path.splitdrive(repo)
+ return repo.startswith(os.path.sep) or drive
+
+ @property
+ def cmd(self):
+ if self._cmd is not None:
+ return self._cmd
+ command = find_command(self.name)
+ if command is None:
+ raise BadCommand('Cannot find command %r' % self.name)
+ logger.info('Found command %r at %r' % (self.name, command))
+ self._cmd = command
+ return command
+
+ def get_url_rev(self):
+ """
+ Returns the correct repository URL and revision by parsing the given
+ repository URL
+ """
+ url = self.url.split('+', 1)[1]
+ scheme, netloc, path, query, frag = urlparse.urlsplit(url)
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+ url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
+ return url, rev
+
+ def get_info(self, location):
+ """
+ Returns (url, revision), where both are strings
+ """
+ assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+ return self.get_url(location), self.get_revision(location)
+
+ def normalize_url(self, url):
+ """
+ Normalize a URL for comparison by unquoting it and removing any trailing slash.
+ """
+ return urllib.unquote(url).rstrip('/')
+
+ def compare_urls(self, url1, url2):
+ """
+ Compare two repo URLs for identity, ignoring incidental differences.
+ """
+ return (self.normalize_url(url1) == self.normalize_url(url2))
+
+ def parse_vcs_bundle_file(self, content):
+ """
+ Takes the contents of the bundled text file that explains how to revert
+ the stripped off version control data of the given package and returns
+ the URL and revision of it.
+ """
+ raise NotImplementedError
+
+ def obtain(self, dest):
+ """
+ Called when installing or updating an editable package, takes the
+ source path of the checkout.
+ """
+ raise NotImplementedError
+
+ def switch(self, dest, url, rev_options):
+ """
+ Switch the repo at ``dest`` to point to ``URL``.
+ """
+ raise NotImplemented
+
+ def update(self, dest, rev_options):
+ """
+ Update an already-existing repo to the given ``rev_options``.
+ """
+ raise NotImplementedError
+
+ def check_destination(self, dest, url, rev_options, rev_display):
+ """
+ Prepare a location to receive a checkout/clone.
+
+ Return True if the location is ready for (and requires) a
+ checkout/clone, False otherwise.
+ """
+ checkout = True
+ prompt = False
+ if os.path.exists(dest):
+ checkout = False
+ if os.path.exists(os.path.join(dest, self.dirname)):
+ existing_url = self.get_url(dest)
+ if self.compare_urls(existing_url, url):
+ logger.info('%s in %s exists, and has correct URL (%s)'
+ % (self.repo_name.title(), display_path(dest), url))
+ logger.notify('Updating %s %s%s'
+ % (display_path(dest), self.repo_name, rev_display))
+ self.update(dest, rev_options)
+ else:
+ logger.warn('%s %s in %s exists with URL %s'
+ % (self.name, self.repo_name, display_path(dest), existing_url))
+ prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b'))
+ else:
+ logger.warn('Directory %s already exists, and is not a %s %s.'
+ % (dest, self.name, self.repo_name))
+ prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
+ if prompt:
+ logger.warn('The plan is to install the %s repository %s'
+ % (self.name, url))
+ response = ask('What to do? %s' % prompt[0], prompt[1])
+
+ if response == 's':
+ logger.notify('Switching %s %s to %s%s'
+ % (self.repo_name, display_path(dest), url, rev_display))
+ self.switch(dest, url, rev_options)
+ elif response == 'i':
+ # do nothing
+ pass
+ elif response == 'w':
+ logger.warn('Deleting %s' % display_path(dest))
+ shutil.rmtree(dest)
+ checkout = True
+ elif response == 'b':
+ dest_dir = backup_dir(dest)
+ logger.warn('Backing up %s to %s'
+ % (display_path(dest), dest_dir))
+ shutil.move(dest, dest_dir)
+ checkout = True
+ return checkout
+
+ def unpack(self, location):
+ raise NotImplementedError
+
+ def get_src_requirement(self, dist, location, find_tags=False):
+ raise NotImplementedError
+
+
+def get_src_requirement(dist, location, find_tags):
+ version_control = vcs.get_backend_from_location(location)
+ if version_control:
+ return version_control().get_src_requirement(dist, location, find_tags)
+ logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
+ return dist.as_requirement()
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py
new file mode 100755
index 00000000..3b6ea8f0
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/bazaar.py
@@ -0,0 +1,138 @@
+import os
+import shutil
+import tempfile
+import re
+from pip import call_subprocess
+from pip.log import logger
+from pip.util import rmtree, display_path
+from pip.vcs import vcs, VersionControl
+from pip.download import path_to_url2
+
+
+class Bazaar(VersionControl):
+ name = 'bzr'
+ dirname = '.bzr'
+ repo_name = 'branch'
+ bundle_file = 'bzr-branch.txt'
+ schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp')
+ guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
+ 'bzr branch -r %(rev)s %(url)s .\n')
+
+ def parse_vcs_bundle_file(self, content):
+ url = rev = None
+ for line in content.splitlines():
+ if not line.strip() or line.strip().startswith('#'):
+ continue
+ match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
+ if match:
+ rev = match.group(1).strip()
+ url = line[match.end():].strip().split(None, 1)[0]
+ if url and rev:
+ return url, rev
+ return None, None
+
+ def unpack(self, location):
+ """Get the bzr branch at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ logger.notify('Checking out bzr repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ if os.path.exists(location):
+ os.rmdir(location)
+ call_subprocess(
+ [self.cmd, 'branch', url, location],
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def export(self, location):
+ """Export the Bazaar repository at the url to the destination location"""
+ temp_dir = tempfile.mkdtemp('-export', 'pip-')
+ self.unpack(temp_dir)
+ if os.path.exists(location):
+ # Remove the location to make sure Bazaar can export it correctly
+ rmtree(location)
+ try:
+ call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ def switch(self, dest, url, rev_options):
+ call_subprocess([self.cmd, 'switch', url], cwd=dest)
+
+ def update(self, dest, rev_options):
+ call_subprocess(
+ [self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ if rev:
+ rev_options = ['-r', rev]
+ rev_display = ' (to revision %s)' % rev
+ else:
+ rev_options = []
+ rev_display = ''
+ if self.check_destination(dest, url, rev_options, rev_display):
+ logger.notify('Checking out %s%s to %s'
+ % (url, rev_display, display_path(dest)))
+ call_subprocess(
+ [self.cmd, 'branch', '-q'] + rev_options + [url, dest])
+
+ def get_url_rev(self):
+ # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+ url, rev = super(Bazaar, self).get_url_rev()
+ if url.startswith('ssh://'):
+ url = 'bzr+' + url
+ return url, rev
+
+ def get_url(self, location):
+ urls = call_subprocess(
+ [self.cmd, 'info'], show_stdout=False, cwd=location)
+ for line in urls.splitlines():
+ line = line.strip()
+ for x in ('checkout of branch: ',
+ 'parent branch: '):
+ if line.startswith(x):
+ repo = line.split(x)[1]
+ if self._is_local_repository(repo):
+ return path_to_url2(repo)
+ return repo
+ return None
+
+ def get_revision(self, location):
+ revision = call_subprocess(
+ [self.cmd, 'revno'], show_stdout=False, cwd=location)
+ return revision.splitlines()[-1]
+
+ def get_tag_revs(self, location):
+ tags = call_subprocess(
+ [self.cmd, 'tags'], show_stdout=False, cwd=location)
+ tag_revs = []
+ for line in tags.splitlines():
+ tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
+ if tags_match:
+ tag = tags_match.group(1)
+ rev = tags_match.group(2)
+ tag_revs.append((rev.strip(), tag.strip()))
+ return dict(tag_revs)
+
+ def get_src_requirement(self, dist, location, find_tags):
+ repo = self.get_url(location)
+ if not repo.lower().startswith('bzr:'):
+ repo = 'bzr+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if not repo:
+ return None
+ current_rev = self.get_revision(location)
+ tag_revs = self.get_tag_revs(location)
+
+ if current_rev in tag_revs:
+ # It's a tag
+ full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+ else:
+ full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
+ return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
+
+
+vcs.register(Bazaar)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py
new file mode 100755
index 00000000..0701e49e
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/git.py
@@ -0,0 +1,204 @@
+import os
+import shutil
+import tempfile
+import re
+from pip import call_subprocess
+from pip.util import display_path
+from pip.vcs import vcs, VersionControl
+from pip.log import logger
+from urllib import url2pathname
+from urlparse import urlsplit, urlunsplit
+
+
+class Git(VersionControl):
+ name = 'git'
+ dirname = '.git'
+ repo_name = 'clone'
+ schemes = ('git', 'git+http', 'git+ssh', 'git+git', 'git+file')
+ bundle_file = 'git-clone.txt'
+ guide = ('# This was a Git repo; to make it a repo again run:\n'
+ 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
+
+ def __init__(self, url=None, *args, **kwargs):
+
+ # Works around an apparent Git bug
+ # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
+ if url:
+ scheme, netloc, path, query, fragment = urlsplit(url)
+ if scheme.endswith('file'):
+ initial_slashes = path[:-len(path.lstrip('/'))]
+ newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
+ url = urlunsplit((scheme, netloc, newpath, query, fragment))
+ after_plus = scheme.find('+')+1
+ url = scheme[:after_plus]+ urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
+
+ super(Git, self).__init__(url, *args, **kwargs)
+
+ def parse_vcs_bundle_file(self, content):
+ url = rev = None
+ for line in content.splitlines():
+ if not line.strip() or line.strip().startswith('#'):
+ continue
+ url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
+ if url_match:
+ url = url_match.group(1).strip()
+ rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
+ if rev_match:
+ rev = rev_match.group(1).strip()
+ if url and rev:
+ return url, rev
+ return None, None
+
+ def unpack(self, location):
+ """Clone the Git repository at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ logger.notify('Cloning Git repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ if os.path.exists(location):
+ os.rmdir(location)
+ call_subprocess(
+ [self.cmd, 'clone', url, location],
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def export(self, location):
+ """Export the Git repository at the url to the destination location"""
+ temp_dir = tempfile.mkdtemp('-export', 'pip-')
+ self.unpack(temp_dir)
+ try:
+ if not location.endswith('/'):
+ location = location + '/'
+ call_subprocess(
+ [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
+ filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ def check_rev_options(self, rev, dest, rev_options):
+ """Check the revision options before checkout to compensate that tags
+ and branches may need origin/ as a prefix.
+ Returns the SHA1 of the branch or tag if found.
+ """
+ revisions = self.get_tag_revs(dest)
+ revisions.update(self.get_branch_revs(dest))
+ inverse_revisions = dict((v, k) for k, v in revisions.iteritems())
+ # Check if rev is a branch name
+ origin_rev = 'origin/%s' % rev
+ if origin_rev in inverse_revisions:
+ return [inverse_revisions[origin_rev]]
+ elif rev in inverse_revisions:
+ return [inverse_revisions[rev]]
+ else:
+ logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
+ return rev_options
+
+ def switch(self, dest, url, rev_options):
+ call_subprocess(
+ [self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
+ call_subprocess(
+ [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
+
+ def update(self, dest, rev_options):
+ call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
+ call_subprocess(
+ [self.cmd, 'checkout', '-q', '-f'] + rev_options, cwd=dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ if rev:
+ rev_options = [rev]
+ rev_display = ' (to %s)' % rev
+ else:
+ rev_options = ['master']
+ rev_display = ''
+ if self.check_destination(dest, url, rev_options, rev_display):
+ logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
+ call_subprocess([self.cmd, 'clone', '-q', url, dest])
+ if rev:
+ rev_options = self.check_rev_options(rev, dest, rev_options)
+ # Only do a checkout if rev_options differs from HEAD
+ if not self.get_revision(dest).startswith(rev_options[0]):
+ call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
+
+ def get_url(self, location):
+ url = call_subprocess(
+ [self.cmd, 'config', 'remote.origin.url'],
+ show_stdout=False, cwd=location)
+ return url.strip()
+
+ def get_revision(self, location):
+ current_rev = call_subprocess(
+ [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
+ return current_rev.strip()
+
+ def get_tag_revs(self, location):
+ tags = call_subprocess(
+ [self.cmd, 'tag', '-l'],
+ show_stdout=False, raise_on_returncode=False, cwd=location)
+ tag_revs = []
+ for line in tags.splitlines():
+ tag = line.strip()
+ rev = call_subprocess(
+ [self.cmd, 'rev-parse', tag], show_stdout=False, cwd=location)
+ tag_revs.append((rev.strip(), tag))
+ tag_revs = dict(tag_revs)
+ return tag_revs
+
+ def get_branch_revs(self, location):
+ branches = call_subprocess(
+ [self.cmd, 'branch', '-r'], show_stdout=False, cwd=location)
+ branch_revs = []
+ for line in branches.splitlines():
+ line = line.split('->')[0].strip()
+ branch = "".join([b for b in line.split() if b != '*'])
+ rev = call_subprocess(
+ [self.cmd, 'rev-parse', branch], show_stdout=False, cwd=location)
+ branch_revs.append((rev.strip(), branch))
+ branch_revs = dict(branch_revs)
+ return branch_revs
+
+ def get_src_requirement(self, dist, location, find_tags):
+ repo = self.get_url(location)
+ if not repo.lower().startswith('git:'):
+ repo = 'git+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if not repo:
+ return None
+ current_rev = self.get_revision(location)
+ tag_revs = self.get_tag_revs(location)
+ branch_revs = self.get_branch_revs(location)
+
+ if current_rev in tag_revs:
+ # It's a tag
+ full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+ elif (current_rev in branch_revs and
+ branch_revs[current_rev] != 'origin/master'):
+ # It's the head of a branch
+ full_egg_name = '%s-%s' % (dist.egg_name(),
+ branch_revs[current_rev].replace('origin/', ''))
+ else:
+ full_egg_name = '%s-dev' % dist.egg_name()
+
+ return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
+
+ def get_url_rev(self):
+ """
+ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+ That's required because although they use SSH they sometimes doesn't
+ work with a ssh:// scheme (e.g. Github). But we need a scheme for
+ parsing. Hence we remove it again afterwards and return it as a stub.
+ """
+ if not '://' in self.url:
+ assert not 'file:' in self.url
+ self.url = self.url.replace('git+', 'git+ssh://')
+ url, rev = super(Git, self).get_url_rev()
+ url = url.replace('ssh://', '')
+ else:
+ url, rev = super(Git, self).get_url_rev()
+
+ return url, rev
+
+
+vcs.register(Git)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py
new file mode 100755
index 00000000..70c8c833
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/mercurial.py
@@ -0,0 +1,162 @@
+import os
+import shutil
+import tempfile
+import re
+import ConfigParser
+from pip import call_subprocess
+from pip.util import display_path
+from pip.log import logger
+from pip.vcs import vcs, VersionControl
+from pip.download import path_to_url2
+
+
+class Mercurial(VersionControl):
+ name = 'hg'
+ dirname = '.hg'
+ repo_name = 'clone'
+ schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
+ bundle_file = 'hg-clone.txt'
+ guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
+ 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
+
+ def parse_vcs_bundle_file(self, content):
+ url = rev = None
+ for line in content.splitlines():
+ if not line.strip() or line.strip().startswith('#'):
+ continue
+ url_match = re.search(r'hg\s*pull\s*(.*)\s*', line)
+ if url_match:
+ url = url_match.group(1).strip()
+ rev_match = re.search(r'^hg\s*update\s*-r\s*(.*)\s*', line)
+ if rev_match:
+ rev = rev_match.group(1).strip()
+ if url and rev:
+ return url, rev
+ return None, None
+
+ def unpack(self, location):
+ """Clone the Hg repository at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ logger.notify('Cloning Mercurial repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ if os.path.exists(location):
+ os.rmdir(location)
+ call_subprocess(
+ [self.cmd, 'clone', url, location],
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def export(self, location):
+ """Export the Hg repository at the url to the destination location"""
+ temp_dir = tempfile.mkdtemp('-export', 'pip-')
+ self.unpack(temp_dir)
+ try:
+ call_subprocess(
+ [self.cmd, 'archive', location],
+ filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ def switch(self, dest, url, rev_options):
+ repo_config = os.path.join(dest, self.dirname, 'hgrc')
+ config = ConfigParser.SafeConfigParser()
+ try:
+ config.read(repo_config)
+ config.set('paths', 'default', url)
+ config_file = open(repo_config, 'w')
+ config.write(config_file)
+ config_file.close()
+ except (OSError, ConfigParser.NoSectionError), e:
+ logger.warn(
+ 'Could not switch Mercurial repository to %s: %s'
+ % (url, e))
+ else:
+ call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
+
+ def update(self, dest, rev_options):
+ call_subprocess([self.cmd, 'pull', '-q'], cwd=dest)
+ call_subprocess(
+ [self.cmd, 'update', '-q'] + rev_options, cwd=dest)
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ if rev:
+ rev_options = [rev]
+ rev_display = ' (to revision %s)' % rev
+ else:
+ rev_options = []
+ rev_display = ''
+ if self.check_destination(dest, url, rev_options, rev_display):
+ logger.notify('Cloning hg %s%s to %s'
+ % (url, rev_display, display_path(dest)))
+ call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
+ call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
+
+ def get_url(self, location):
+ url = call_subprocess(
+ [self.cmd, 'showconfig', 'paths.default'],
+ show_stdout=False, cwd=location).strip()
+ if self._is_local_repository(url):
+ url = path_to_url2(url)
+ return url.strip()
+
+ def get_tag_revs(self, location):
+ tags = call_subprocess(
+ [self.cmd, 'tags'], show_stdout=False, cwd=location)
+ tag_revs = []
+ for line in tags.splitlines():
+ tags_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
+ if tags_match:
+ tag = tags_match.group(1)
+ rev = tags_match.group(2)
+ tag_revs.append((rev.strip(), tag.strip()))
+ return dict(tag_revs)
+
+ def get_branch_revs(self, location):
+ branches = call_subprocess(
+ [self.cmd, 'branches'], show_stdout=False, cwd=location)
+ branch_revs = []
+ for line in branches.splitlines():
+ branches_match = re.search(r'([\w\d\.-]+)\s*([\d]+):.*$', line)
+ if branches_match:
+ branch = branches_match.group(1)
+ rev = branches_match.group(2)
+ branch_revs.append((rev.strip(), branch.strip()))
+ return dict(branch_revs)
+
+ def get_revision(self, location):
+ current_revision = call_subprocess(
+ [self.cmd, 'parents', '--template={rev}'],
+ show_stdout=False, cwd=location).strip()
+ return current_revision
+
+ def get_revision_hash(self, location):
+ current_rev_hash = call_subprocess(
+ [self.cmd, 'parents', '--template={node}'],
+ show_stdout=False, cwd=location).strip()
+ return current_rev_hash
+
+ def get_src_requirement(self, dist, location, find_tags):
+ repo = self.get_url(location)
+ if not repo.lower().startswith('hg:'):
+ repo = 'hg+' + repo
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ if not repo:
+ return None
+ current_rev = self.get_revision(location)
+ current_rev_hash = self.get_revision_hash(location)
+ tag_revs = self.get_tag_revs(location)
+ branch_revs = self.get_branch_revs(location)
+ if current_rev in tag_revs:
+ # It's a tag
+ full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
+ elif current_rev in branch_revs:
+ # It's the tip of a branch
+ full_egg_name = '%s-%s' % (dist.egg_name(), branch_revs[current_rev])
+ else:
+ full_egg_name = '%s-dev' % dist.egg_name()
+ return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
+
+vcs.register(Mercurial)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py
new file mode 100755
index 00000000..85715d97
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/vcs/subversion.py
@@ -0,0 +1,260 @@
+import os
+import re
+from pip import call_subprocess
+from pip.index import Link
+from pip.util import rmtree, display_path
+from pip.log import logger
+from pip.vcs import vcs, VersionControl
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile('committed-rev="(\d+)"')
+_svn_url_re = re.compile(r'URL: (.+)')
+_svn_revision_re = re.compile(r'Revision: (.+)')
+
+
+class Subversion(VersionControl):
+ name = 'svn'
+ dirname = '.svn'
+ repo_name = 'checkout'
+ schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https')
+ bundle_file = 'svn-checkout.txt'
+ guide = ('# This was an svn checkout; to make it a checkout again run:\n'
+ 'svn checkout --force -r %(rev)s %(url)s .\n')
+
+ def get_info(self, location):
+ """Returns (url, revision), where both are strings"""
+ assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+ output = call_subprocess(
+ [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
+ match = _svn_url_re.search(output)
+ if not match:
+ logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
+ logger.info('Output that cannot be parsed: \n%s' % output)
+ return None, None
+ url = match.group(1).strip()
+ match = _svn_revision_re.search(output)
+ if not match:
+ logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
+ logger.info('Output that cannot be parsed: \n%s' % output)
+ return url, None
+ return url, match.group(1)
+
+ def parse_vcs_bundle_file(self, content):
+ for line in content.splitlines():
+ if not line.strip() or line.strip().startswith('#'):
+ continue
+ match = re.search(r'^-r\s*([^ ])?', line)
+ if not match:
+ return None, None
+ rev = match.group(1)
+ rest = line[match.end():].strip().split(None, 1)[0]
+ return rest, rev
+ return None, None
+
+ def unpack(self, location):
+ """Check out the svn repository at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ logger.notify('Checking out svn repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing directory
+ # --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ call_subprocess(
+ [self.cmd, 'checkout', url, location],
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def export(self, location):
+ """Export the svn repository at the url to the destination location"""
+ url, rev = self.get_url_rev()
+ logger.notify('Exporting svn repository %s to %s' % (url, location))
+ logger.indent += 2
+ try:
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing directory
+ # --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ call_subprocess(
+ [self.cmd, 'export', url, location],
+ filter_stdout=self._filter, show_stdout=False)
+ finally:
+ logger.indent -= 2
+
+ def switch(self, dest, url, rev_options):
+ call_subprocess(
+ [self.cmd, 'switch'] + rev_options + [url, dest])
+
+ def update(self, dest, rev_options):
+ call_subprocess(
+ [self.cmd, 'update'] + rev_options + [dest])
+
+ def obtain(self, dest):
+ url, rev = self.get_url_rev()
+ if rev:
+ rev_options = ['-r', rev]
+ rev_display = ' (to revision %s)' % rev
+ else:
+ rev_options = []
+ rev_display = ''
+ if self.check_destination(dest, url, rev_options, rev_display):
+ logger.notify('Checking out %s%s to %s'
+ % (url, rev_display, display_path(dest)))
+ call_subprocess(
+ [self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
+
+ def get_location(self, dist, dependency_links):
+ for url in dependency_links:
+ egg_fragment = Link(url).egg_fragment
+ if not egg_fragment:
+ continue
+ if '-' in egg_fragment:
+ ## FIXME: will this work when a package has - in the name?
+ key = '-'.join(egg_fragment.split('-')[:-1]).lower()
+ else:
+ key = egg_fragment
+ if key == dist.key:
+ return url.split('#', 1)[0]
+ return None
+
+ def get_revision(self, location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if self.dirname not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove(self.dirname)
+ entries_fn = os.path.join(base, self.dirname, 'entries')
+ if not os.path.exists(entries_fn):
+ ## FIXME: should we warn?
+ continue
+ f = open(entries_fn)
+ data = f.read()
+ f.close()
+
+ if data.startswith('8') or data.startswith('9') or data.startswith('10'):
+ data = map(str.splitlines, data.split('\n\x0c\n'))
+ del data[0][0] # get rid of the '8'
+ dirurl = data[0][3]
+ revs = [int(d[9]) for d in data if len(d)>9 and d[9]]+[0]
+ if revs:
+ localrev = max(revs)
+ else:
+ localrev = 0
+ elif data.startswith('<?xml'):
+ dirurl = _svn_xml_url_re.search(data).group(1) # get repository URL
+ revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)]+[0]
+ if revs:
+ localrev = max(revs)
+ else:
+ localrev = 0
+ else:
+ logger.warn("Unrecognized .svn/entries format; skipping %s", base)
+ dirs[:] = []
+ continue
+ if base == location:
+ base_url = dirurl+'/' # save the root url
+ elif not dirurl.startswith(base_url):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+ return revision
+
+ def get_url_rev(self):
+ # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+ url, rev = super(Subversion, self).get_url_rev()
+ if url.startswith('ssh://'):
+ url = 'svn+' + url
+ return url, rev
+
+ def get_url(self, location):
+ # In cases where the source is in a subdirectory, not alongside setup.py
+ # we have to look up in the location until we find a real setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without finding setup.py
+ logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
+ % orig_location)
+ return None
+ f = open(os.path.join(location, self.dirname, 'entries'))
+ data = f.read()
+ f.close()
+ if data.startswith('8') or data.startswith('9') or data.startswith('10'):
+ data = map(str.splitlines, data.split('\n\x0c\n'))
+ del data[0][0] # get rid of the '8'
+ return data[0][3]
+ elif data.startswith('<?xml'):
+ match = _svn_xml_url_re.search(data)
+ if not match:
+ raise ValueError('Badly formatted data: %r' % data)
+ return match.group(1) # get repository URL
+ else:
+ logger.warn("Unrecognized .svn/entries format in %s" % location)
+ # Or raise exception?
+ return None
+
+ def get_tag_revs(self, svn_tag_url):
+ stdout = call_subprocess(
+ [self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
+ results = []
+ for line in stdout.splitlines():
+ parts = line.split()
+ rev = int(parts[0])
+ tag = parts[-1].strip('/')
+ results.append((tag, rev))
+ return results
+
+ def find_tag_match(self, rev, tag_revs):
+ best_match_rev = None
+ best_tag = None
+ for tag, tag_rev in tag_revs:
+ if (tag_rev > rev and
+ (best_match_rev is None or best_match_rev > tag_rev)):
+ # FIXME: Is best_match > tag_rev really possible?
+ # or is it a sign something is wacky?
+ best_match_rev = tag_rev
+ best_tag = tag
+ return best_tag
+
+ def get_src_requirement(self, dist, location, find_tags=False):
+ repo = self.get_url(location)
+ if repo is None:
+ return None
+ parts = repo.split('/')
+ ## FIXME: why not project name?
+ egg_project_name = dist.egg_name().split('-', 1)[0]
+ rev = self.get_revision(location)
+ if parts[-2] in ('tags', 'tag'):
+ # It's a tag, perfect!
+ full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
+ elif parts[-2] in ('branches', 'branch'):
+ # It's a branch :(
+ full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
+ elif parts[-1] == 'trunk':
+ # Trunk :-/
+ full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
+ if find_tags:
+ tag_url = '/'.join(parts[:-1]) + '/tags'
+ tag_revs = self.get_tag_revs(tag_url)
+ match = self.find_tag_match(rev, tag_revs)
+ if match:
+ logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
+ repo = '%s/%s' % (tag_url, match)
+ full_egg_name = '%s-%s' % (egg_project_name, match)
+ else:
+ # Don't know what it is
+ logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
+ full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
+ return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
+
+vcs.register(Subversion)
diff --git a/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py
new file mode 100755
index 00000000..708abb05
--- /dev/null
+++ b/lib/python2.7/site-packages/pip-0.8.1-py2.7.egg/pip/venv.py
@@ -0,0 +1,53 @@
+"""Tools for working with virtualenv environments"""
+
+import os
+import sys
+import subprocess
+from pip.exceptions import BadCommand
+from pip.log import logger
+
+
+def restart_in_venv(venv, base, site_packages, args):
+ """
+ Restart this script using the interpreter in the given virtual environment
+ """
+ if base and not os.path.isabs(venv) and not venv.startswith('~'):
+ base = os.path.expanduser(base)
+ # ensure we have an abs basepath at this point:
+ # a relative one makes no sense (or does it?)
+ if os.path.isabs(base):
+ venv = os.path.join(base, venv)
+
+ if venv.startswith('~'):
+ venv = os.path.expanduser(venv)
+
+ if not os.path.exists(venv):
+ try:
+ import virtualenv
+ except ImportError:
+ print 'The virtual environment does not exist: %s' % venv
+ print 'and virtualenv is not installed, so a new environment cannot be created'
+ sys.exit(3)
+ print 'Creating new virtualenv environment in %s' % venv
+ virtualenv.logger = logger
+ logger.indent += 2
+ virtualenv.create_environment(venv, site_packages=site_packages)
+ if sys.platform == 'win32':
+ python = os.path.join(venv, 'Scripts', 'python.exe')
+ # check for bin directory which is used in buildouts
+ if not os.path.exists(python):
+ python = os.path.join(venv, 'bin', 'python.exe')
+ else:
+ python = os.path.join(venv, 'bin', 'python')
+ if not os.path.exists(python):
+ python = venv
+ if not os.path.exists(python):
+ raise BadCommand('Cannot find virtual environment interpreter at %s' % python)
+ base = os.path.dirname(os.path.dirname(python))
+ file = os.path.join(os.path.dirname(__file__), 'runner.py')
+ if file.endswith('.pyc'):
+ file = file[:-1]
+ proc = subprocess.Popen(
+ [python, file] + args + [base, '___VENV_RESTART___'])
+ proc.wait()
+ sys.exit(proc.returncode)
diff --git a/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info b/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info
new file mode 100644
index 00000000..0e358148
--- /dev/null
+++ b/lib/python2.7/site-packages/setuptools-0.6c11-py2.7.egg-info
@@ -0,0 +1,9 @@
+Metadata-Version: 1.0
+Name: setuptools
+Version: 0.6c11
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
diff --git a/lib/python2.7/site-packages/setuptools.pth b/lib/python2.7/site-packages/setuptools.pth
new file mode 100644
index 00000000..0f744252
--- /dev/null
+++ b/lib/python2.7/site-packages/setuptools.pth
@@ -0,0 +1 @@
+./distribute-0.6.14-py2.7.egg
diff --git a/lib/python2.7/site.py b/lib/python2.7/site.py
new file mode 100644
index 00000000..a49cfc34
--- /dev/null
+++ b/lib/python2.7/site.py
@@ -0,0 +1,713 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code. Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path. On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+It also supports the Debian convention of
+lib/python<version>/dist-packages. On other platforms (mainly Mac and
+Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
+but this is unlikely). The resulting directories, if they exist, are
+appended to sys.path, and also inspected for path configuration files.
+
+FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
+Local addons go into /usr/local/lib/python<version>/site-packages
+(resp. /usr/local/lib/site-python), Debian addons install into
+/usr/{lib,share}/python<version>/dist-packages.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path. Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once. Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth. Assume foo.pth contains the
+following:
+
+ # foo package configuration
+ foo
+ bar
+ bletch
+
+and bar.pth contains:
+
+ # bar package configuration
+ bar
+
+Then the following directories are added to sys.path, in this order:
+
+ /usr/local/lib/python2.X/site-packages/bar
+ /usr/local/lib/python2.X/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations. If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+import __builtin__
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+# for distutils.commands.install
+USER_SITE = None
+USER_BASE = None
+
+_is_pypy = hasattr(sys, 'pypy_version_info')
+_is_jython = sys.platform[:4] == 'java'
+if _is_jython:
+ ModuleType = type(os)
+
+def makepath(*paths):
+ dir = os.path.join(*paths)
+ if _is_jython and (dir == '__classpath__' or
+ dir.startswith('__pyclasspath__')):
+ return dir, dir
+ dir = os.path.abspath(dir)
+ return dir, os.path.normcase(dir)
+
+def abs__file__():
+ """Set all module' __file__ attribute to an absolute path"""
+ for m in sys.modules.values():
+ if ((_is_jython and not isinstance(m, ModuleType)) or
+ hasattr(m, '__loader__')):
+ # only modules need the abspath in Jython. and don't mess
+ # with a PEP 302-supplied __file__
+ continue
+ f = getattr(m, '__file__', None)
+ if f is None:
+ continue
+ m.__file__ = os.path.abspath(f)
+
+def removeduppaths():
+ """ Remove duplicate entries from sys.path along with making them
+ absolute"""
+ # This ensures that the initial path provided by the interpreter contains
+ # only absolute pathnames, even if we're running from the build directory.
+ L = []
+ known_paths = set()
+ for dir in sys.path:
+ # Filter out duplicate paths (on case-insensitive file systems also
+ # if they only differ in case); turn relative paths into absolute
+ # paths.
+ dir, dircase = makepath(dir)
+ if not dircase in known_paths:
+ L.append(dir)
+ known_paths.add(dircase)
+ sys.path[:] = L
+ return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python. See http://www.python.org/sf/586680
+def addbuilddir():
+ """Append ./build/lib.<platform> in case we're running in the build dir
+ (especially for Guido :-)"""
+ from distutils.util import get_platform
+ s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+ if hasattr(sys, 'gettotalrefcount'):
+ s += '-pydebug'
+ s = os.path.join(os.path.dirname(sys.path[-1]), s)
+ sys.path.append(s)
+
+def _init_pathinfo():
+ """Return a set containing all existing directory entries from sys.path"""
+ d = set()
+ for dir in sys.path:
+ try:
+ if os.path.isdir(dir):
+ dir, dircase = makepath(dir)
+ d.add(dircase)
+ except TypeError:
+ continue
+ return d
+
+def addpackage(sitedir, name, known_paths):
+ """Add a new path to known_paths by combining sitedir and 'name' or execute
+ sitedir if it starts with 'import'"""
+ if known_paths is None:
+ _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ fullname = os.path.join(sitedir, name)
+ try:
+ f = open(fullname, "rU")
+ except IOError:
+ return
+ try:
+ for line in f:
+ if line.startswith("#"):
+ continue
+ if line.startswith("import"):
+ exec line
+ continue
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if not dircase in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ finally:
+ f.close()
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+ """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+ 'sitedir'"""
+ if known_paths is None:
+ known_paths = _init_pathinfo()
+ reset = 1
+ else:
+ reset = 0
+ sitedir, sitedircase = makepath(sitedir)
+ if not sitedircase in known_paths:
+ sys.path.append(sitedir) # Add path component
+ try:
+ names = os.listdir(sitedir)
+ except os.error:
+ return
+ names.sort()
+ for name in names:
+ if name.endswith(os.extsep + "pth"):
+ addpackage(sitedir, name, known_paths)
+ if reset:
+ known_paths = None
+ return known_paths
+
+def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
+ """Add site-packages (and possibly site-python) to sys.path"""
+ prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
+ if exec_prefix != sys_prefix:
+ prefixes.append(os.path.join(exec_prefix, "local"))
+
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos') or _is_jython:
+ sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+ elif _is_pypy:
+ sitedirs = [os.path.join(prefix, 'site-packages')]
+ elif sys.platform == 'darwin' and prefix == sys_prefix:
+
+ if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
+
+ sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
+ os.path.join(prefix, "Extras", "lib", "python")]
+
+ else: # any other Python distros on OSX work this way
+ sitedirs = [os.path.join(prefix, "lib",
+ "python" + sys.version[:3], "site-packages")]
+
+ elif os.sep == '/':
+ sitedirs = [os.path.join(prefix,
+ "lib",
+ "python" + sys.version[:3],
+ "site-packages"),
+ os.path.join(prefix, "lib", "site-python"),
+ os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
+ lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+ if (os.path.exists(lib64_dir) and
+ os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+ sitedirs.append(lib64_dir)
+ try:
+ # sys.getobjects only available in --with-pydebug build
+ sys.getobjects
+ sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
+ except AttributeError:
+ pass
+ # Debian-specific dist-packages directories:
+ sitedirs.append(os.path.join(prefix, "lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages"))
+ sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
+ else:
+ sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ sitedirs.append(
+ os.path.join(home,
+ 'Library',
+ 'Python',
+ sys.version[:3],
+ 'site-packages'))
+ for sitedir in sitedirs:
+ if os.path.isdir(sitedir):
+ addsitedir(sitedir, known_paths)
+ return None
+
+def check_enableusersite():
+ """Check if user site directory is safe for inclusion
+
+ The function tests for the command line flag (including environment var),
+ process uid/gid equal to effective uid/gid.
+
+ None: Disabled for security reasons
+ False: Disabled by user (command line option)
+ True: Safe and enabled
+ """
+ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
+ return False
+
+ if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+ # check process uid == effective uid
+ if os.geteuid() != os.getuid():
+ return None
+ if hasattr(os, "getgid") and hasattr(os, "getegid"):
+ # check process gid == effective gid
+ if os.getegid() != os.getgid():
+ return None
+
+ return True
+
+def addusersitepackages(known_paths):
+ """Add a per user site-package to sys.path
+
+ Each user has its own python directory with site-packages in the
+ home directory.
+
+ USER_BASE is the root directory for all Python versions
+
+ USER_SITE is the user specific site-packages directory
+
+ USER_SITE/.. can be used for data.
+ """
+ global USER_BASE, USER_SITE, ENABLE_USER_SITE
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ #if sys.platform in ('os2emx', 'riscos'):
+ # # Don't know what to put here
+ # USER_BASE = ''
+ # USER_SITE = ''
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser(base, "Python")
+ USER_SITE = os.path.join(USER_BASE,
+ "Python" + sys.version[0] + sys.version[2],
+ "site-packages")
+ else:
+ if env_base:
+ USER_BASE = env_base
+ else:
+ USER_BASE = joinuser("~", ".local")
+ USER_SITE = os.path.join(USER_BASE, "lib",
+ "python" + sys.version[:3],
+ "site-packages")
+
+ if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
+ addsitedir(USER_SITE, known_paths)
+ if ENABLE_USER_SITE:
+ for dist_libdir in ("lib", "local/lib"):
+ user_site = os.path.join(USER_BASE, dist_libdir,
+ "python" + sys.version[:3],
+ "dist-packages")
+ if os.path.isdir(user_site):
+ addsitedir(user_site, known_paths)
+ return known_paths
+
+
+
+def setBEGINLIBPATH():
+ """The OS/2 EMX port has optional extension modules that do double duty
+ as DLLs (and must use the .DLL file extension) for other extensions.
+ The library search path needs to be amended so these will be found
+ during module import. Use BEGINLIBPATH so that these are at the start
+ of the library search path.
+
+ """
+ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+ libpath = os.environ['BEGINLIBPATH'].split(';')
+ if libpath[-1]:
+ libpath.append(dllpath)
+ else:
+ libpath[-1] = dllpath
+ os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+ """Define new built-ins 'quit' and 'exit'.
+ These are simply strings that display a hint on how to exit.
+
+ """
+ if os.sep == ':':
+ eof = 'Cmd-Q'
+ elif os.sep == '\\':
+ eof = 'Ctrl-Z plus Return'
+ else:
+ eof = 'Ctrl-D (i.e. EOF)'
+
+ class Quitter(object):
+ def __init__(self, name):
+ self.name = name
+ def __repr__(self):
+ return 'Use %s() or %s to exit' % (self.name, eof)
+ def __call__(self, code=None):
+ # Shells like IDLE catch the SystemExit, but listen when their
+ # stdin wrapper is closed.
+ try:
+ sys.stdin.close()
+ except:
+ pass
+ raise SystemExit(code)
+ __builtin__.quit = Quitter('quit')
+ __builtin__.exit = Quitter('exit')
+
+
+class _Printer(object):
+ """interactive prompt objects for printing the license text, a list of
+ contributors and the copyright notice."""
+
+ MAXLINES = 23
+
+ def __init__(self, name, data, files=(), dirs=()):
+ self.__name = name
+ self.__data = data
+ self.__files = files
+ self.__dirs = dirs
+ self.__lines = None
+
+ def __setup(self):
+ if self.__lines:
+ return
+ data = None
+ for dir in self.__dirs:
+ for filename in self.__files:
+ filename = os.path.join(dir, filename)
+ try:
+ fp = file(filename, "rU")
+ data = fp.read()
+ fp.close()
+ break
+ except IOError:
+ pass
+ if data:
+ break
+ if not data:
+ data = self.__data
+ self.__lines = data.split('\n')
+ self.__linecnt = len(self.__lines)
+
+ def __repr__(self):
+ self.__setup()
+ if len(self.__lines) <= self.MAXLINES:
+ return "\n".join(self.__lines)
+ else:
+ return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+ def __call__(self):
+ self.__setup()
+ prompt = 'Hit Return for more, or q (and Return) to quit: '
+ lineno = 0
+ while 1:
+ try:
+ for i in range(lineno, lineno + self.MAXLINES):
+ print self.__lines[i]
+ except IndexError:
+ break
+ else:
+ lineno += self.MAXLINES
+ key = None
+ while key is None:
+ key = raw_input(prompt)
+ if key not in ('', 'q'):
+ key = None
+ if key == 'q':
+ break
+
+def setcopyright():
+ """Set 'copyright' and 'credits' in __builtin__"""
+ __builtin__.copyright = _Printer("copyright", sys.copyright)
+ if _is_jython:
+ __builtin__.credits = _Printer(
+ "credits",
+ "Jython is maintained by the Jython developers (www.jython.org).")
+ elif _is_pypy:
+ __builtin__.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://codespeak.net/pypy")
+ else:
+ __builtin__.credits = _Printer("credits", """\
+ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+ for supporting Python development. See www.python.org for more information.""")
+ here = os.path.dirname(os.__file__)
+ __builtin__.license = _Printer(
+ "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+ ["LICENSE.txt", "LICENSE"],
+ [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+ """Define the built-in 'help'.
+ This is a wrapper around pydoc.help (with a twist).
+
+ """
+
+ def __repr__(self):
+ return "Type help() for interactive help, " \
+ "or help(object) for help about object."
+ def __call__(self, *args, **kwds):
+ import pydoc
+ return pydoc.help(*args, **kwds)
+
+def sethelper():
+ __builtin__.help = _Helper()
+
+def aliasmbcs():
+ """On Windows, some default encodings are not provided by Python,
+ while they are always available as "mbcs" in each locale. Make
+ them usable by aliasing to "mbcs" in such a case."""
+ if sys.platform == 'win32':
+ import locale, codecs
+ enc = locale.getdefaultlocale()[1]
+ if enc.startswith('cp'): # "cp***" ?
+ try:
+ codecs.lookup(enc)
+ except LookupError:
+ import encodings
+ encodings._cache[enc] = encodings._unknown
+ encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+ """Set the string encoding used by the Unicode implementation. The
+ default is 'ascii', but if you're willing to experiment, you can
+ change this."""
+ encoding = "ascii" # Default value set by _PyUnicode_Init()
+ if 0:
+ # Enable to support locale aware default string encodings.
+ import locale
+ loc = locale.getdefaultlocale()
+ if loc[1]:
+ encoding = loc[1]
+ if 0:
+ # Enable to switch off string to Unicode coercion and implicit
+ # Unicode to string conversion.
+ encoding = "undefined"
+ if encoding != "ascii":
+ # On Non-Unicode builds this will raise an AttributeError...
+ sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+ """Run custom site specific code, if available."""
+ try:
+ import sitecustomize
+ except ImportError:
+ pass
+
+def virtual_install_main_packages():
+ f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
+ sys.real_prefix = f.read().strip()
+ f.close()
+ pos = 2
+ if sys.path[0] == '':
+ pos += 1
+ if sys.platform == 'win32':
+ paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
+ elif _is_jython:
+ paths = [os.path.join(sys.real_prefix, 'Lib')]
+ elif _is_pypy:
+ cpyver = '%d.%d.%d' % sys.version_info[:3]
+ paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
+ os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver),
+ os.path.join(sys.real_prefix, 'lib-python', cpyver)]
+ else:
+ paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
+ lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+ if os.path.exists(lib64_path):
+ paths.append(lib64_path)
+ # This is hardcoded in the Python executable, but relative to sys.prefix:
+ plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3],
+ 'plat-%s' % sys.platform)
+ if os.path.exists(plat_path):
+ paths.append(plat_path)
+ # This is hardcoded in the Python executable, but
+ # relative to sys.prefix, so we have to fix up:
+ for path in list(paths):
+ tk_dir = os.path.join(path, 'lib-tk')
+ if os.path.exists(tk_dir):
+ paths.append(tk_dir)
+
+ # These are hardcoded in the Apple's Python executable,
+ # but relative to sys.prefix, so we have to fix them up:
+ if sys.platform == 'darwin':
+ hardcoded_paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], module)
+ for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
+
+ for path in hardcoded_paths:
+ if os.path.exists(path):
+ paths.append(path)
+
+ sys.path.extend(paths)
+
+def force_global_eggs_after_local_site_packages():
+ """
+ Force easy_installed eggs in the global environment to get placed
+ in sys.path after all packages inside the virtualenv. This
+ maintains the "least surprise" result that packages in the
+ virtualenv always mask global packages, never the other way
+ around.
+
+ """
+ egginsert = getattr(sys, '__egginsert', 0)
+ for i, path in enumerate(sys.path):
+ if i > egginsert and path.startswith(sys.prefix):
+ egginsert = i
+ sys.__egginsert = egginsert + 1
+
+def virtual_addsitepackages(known_paths):
+ force_global_eggs_after_local_site_packages()
+ return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
+
+def fixclasspath():
+ """Adjust the special classpath sys.path entries for Jython. These
+ entries should follow the base virtualenv lib directories.
+ """
+ paths = []
+ classpaths = []
+ for path in sys.path:
+ if path == '__classpath__' or path.startswith('__pyclasspath__'):
+ classpaths.append(path)
+ else:
+ paths.append(path)
+ sys.path = paths
+ sys.path.extend(classpaths)
+
+def execusercustomize():
+ """Run custom user specific code, if available."""
+ try:
+ import usercustomize
+ except ImportError:
+ pass
+
+
+def main():
+ global ENABLE_USER_SITE
+ virtual_install_main_packages()
+ abs__file__()
+ paths_in_sys = removeduppaths()
+ if (os.name == "posix" and sys.path and
+ os.path.basename(sys.path[-1]) == "Modules"):
+ addbuilddir()
+ if _is_jython:
+ fixclasspath()
+ GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
+ if not GLOBAL_SITE_PACKAGES:
+ ENABLE_USER_SITE = False
+ if ENABLE_USER_SITE is None:
+ ENABLE_USER_SITE = check_enableusersite()
+ paths_in_sys = addsitepackages(paths_in_sys)
+ paths_in_sys = addusersitepackages(paths_in_sys)
+ if GLOBAL_SITE_PACKAGES:
+ paths_in_sys = virtual_addsitepackages(paths_in_sys)
+ if sys.platform == 'os2emx':
+ setBEGINLIBPATH()
+ setquit()
+ setcopyright()
+ sethelper()
+ aliasmbcs()
+ setencoding()
+ execsitecustomize()
+ if ENABLE_USER_SITE:
+ execusercustomize()
+ # Remove sys.setdefaultencoding() so that users cannot change the
+ # encoding after initialization. The test for presence is needed when
+ # this module is run as a script, because this code is executed twice.
+ if hasattr(sys, "setdefaultencoding"):
+ del sys.setdefaultencoding
+
+main()
+
+def _script():
+ help = """\
+ %s [--user-base] [--user-site]
+
+ Without arguments print some useful information
+ With arguments print the value of USER_BASE and/or USER_SITE separated
+ by '%s'.
+
+ Exit codes with --user-base or --user-site:
+ 0 - user site directory is enabled
+ 1 - user site directory is disabled by user
+ 2 - uses site directory is disabled by super user
+ or for security reasons
+ >2 - unknown error
+ """
+ args = sys.argv[1:]
+ if not args:
+ print "sys.path = ["
+ for dir in sys.path:
+ print " %r," % (dir,)
+ print "]"
+ def exists(path):
+ if os.path.isdir(path):
+ return "exists"
+ else:
+ return "doesn't exist"
+ print "USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))
+ print "USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))
+ print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE
+ sys.exit(0)
+
+ buffer = []
+ if '--user-base' in args:
+ buffer.append(USER_BASE)
+ if '--user-site' in args:
+ buffer.append(USER_SITE)
+
+ if buffer:
+ print os.pathsep.join(buffer)
+ if ENABLE_USER_SITE:
+ sys.exit(0)
+ elif ENABLE_USER_SITE is False:
+ sys.exit(1)
+ elif ENABLE_USER_SITE is None:
+ sys.exit(2)
+ else:
+ sys.exit(3)
+ else:
+ import textwrap
+ print textwrap.dedent(help % (sys.argv[0], os.pathsep))
+ sys.exit(10)
+
+if __name__ == '__main__':
+ _script()
diff --git a/lib/python2.7/sre.py b/lib/python2.7/sre.py
new file mode 120000
index 00000000..27f81b93
--- /dev/null
+++ b/lib/python2.7/sre.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/sre.py \ No newline at end of file
diff --git a/lib/python2.7/sre_compile.py b/lib/python2.7/sre_compile.py
new file mode 120000
index 00000000..dce5da4c
--- /dev/null
+++ b/lib/python2.7/sre_compile.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/sre_compile.py \ No newline at end of file
diff --git a/lib/python2.7/sre_constants.py b/lib/python2.7/sre_constants.py
new file mode 120000
index 00000000..b9c9797e
--- /dev/null
+++ b/lib/python2.7/sre_constants.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/sre_constants.py \ No newline at end of file
diff --git a/lib/python2.7/sre_parse.py b/lib/python2.7/sre_parse.py
new file mode 120000
index 00000000..f33a572f
--- /dev/null
+++ b/lib/python2.7/sre_parse.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/sre_parse.py \ No newline at end of file
diff --git a/lib/python2.7/stat.py b/lib/python2.7/stat.py
new file mode 120000
index 00000000..c1d654c1
--- /dev/null
+++ b/lib/python2.7/stat.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/stat.py \ No newline at end of file
diff --git a/lib/python2.7/types.py b/lib/python2.7/types.py
new file mode 120000
index 00000000..55464783
--- /dev/null
+++ b/lib/python2.7/types.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/types.py \ No newline at end of file
diff --git a/lib/python2.7/warnings.py b/lib/python2.7/warnings.py
new file mode 120000
index 00000000..a9c47309
--- /dev/null
+++ b/lib/python2.7/warnings.py
@@ -0,0 +1 @@
+/usr/lib/python2.7/warnings.py \ No newline at end of file
diff --git a/media/admin_media b/media/admin_media
new file mode 120000
index 00000000..585cf837
--- /dev/null
+++ b/media/admin_media
@@ -0,0 +1 @@
+/usr/lib/python2.7/site-packages/django/contrib/admin/media \ No newline at end of file
diff --git a/media/airvm_button.png b/media/airvm_button.png
new file mode 100644
index 00000000..0acadc9c
--- /dev/null
+++ b/media/airvm_button.png
Binary files differ
diff --git a/media/archnavbar/archlogo.gif b/media/archnavbar/archlogo.gif
new file mode 100644
index 00000000..e1852a06
--- /dev/null
+++ b/media/archnavbar/archlogo.gif
Binary files differ
diff --git a/media/archnavbar/archlogo.png b/media/archnavbar/archlogo.png
new file mode 100644
index 00000000..e873e94b
--- /dev/null
+++ b/media/archnavbar/archlogo.png
Binary files differ
diff --git a/media/archnavbar/archnavbar.css b/media/archnavbar/archnavbar.css
new file mode 100644
index 00000000..6b82cb92
--- /dev/null
+++ b/media/archnavbar/archnavbar.css
@@ -0,0 +1,33 @@
+/*
+ * ARCH GLOBAL NAVBAR
+ *
+ * We're forcing all generic selectors with !important
+ * to help prevent other stylesheets from interfering.
+ *
+ */
+
+/* container for the entire bar */
+#archnavbar { height: 40px !important; padding: 10px 15px !important;
+background: #000 !important; border-bottom: 5px #787DAB solid !important; }
+
+#archnavbarlogo { float: left !important; margin: 0 !important; padding: 0
+!important; height: 50px !important; width: 397px !important; }
+/* and use a proper PNG for all other modern browsers */
+html > body #archnavbarlogo { background: url('parabolabw.png') no-repeat !important; }
+
+/* move the heading/paragraph text offscreen */
+#archnavbarlogo p { margin: 0 !important; padding: 0 !important; text-indent: -9999px !important; }
+#archnavbarlogo h1 { margin: 0 !important; padding: 0 !important; text-indent: -9999px !important; }
+
+/* make the link the same size as the logo */
+#archnavbarlogo a { display: block !important; height: 50px !important; width:
+397px !important; }
+
+/* display the list inline, float it to the right and style it */
+#archnavbar ul { display: inline !important; float: right !important; list-style: none !important; margin: 0 !important; padding: 0 !important; }
+#archnavbar ul li { float: left !important; font-size: 14px !important; font-family: sans-serif !important; line-height: 45px !important; padding-right: 15px !important; padding-left: 15px !important; }
+
+/* style the links */
+#archnavbar ul#archnavbarlist li a { color: #999; font-weight: bold !important; text-decoration: none !important; }
+#archnavbar ul li a:hover { color: white !important; text-decoration: underline !important; }
+
diff --git a/media/archnavbar/parabolabw.png b/media/archnavbar/parabolabw.png
new file mode 100644
index 00000000..0e16d33f
--- /dev/null
+++ b/media/archnavbar/parabolabw.png
Binary files differ
diff --git a/media/archweb-print.css b/media/archweb-print.css
new file mode 100644
index 00000000..2946de54
--- /dev/null
+++ b/media/archweb-print.css
@@ -0,0 +1,11 @@
+/*
+ * ARCH LINUX DJANGO (MAIN SITE)
+ *
+ * Stylesheet for printing
+ */
+
+/* general styling */
+body { font: normal 100% sans-serif; }
+#content { font-size: 0.812em; }
+#archnavbarmenu, #archdev-navbar, .admin-actions { display: none; }
+
diff --git a/media/archweb.css b/media/archweb.css
new file mode 100644
index 00000000..a99dab68
--- /dev/null
+++ b/media/archweb.css
@@ -0,0 +1,269 @@
+/*
+ * ARCH LINUX DJANGO (MAIN SITE)
+ *
+ * Font sizing based on 16px browser defaults (use em):
+ * 14px = 0.875em
+ * 13px = 0.812em
+ * 12px = 0.75em
+ * 11px = 0.6875em
+ *
+ */
+
+/* import the global navbar stylesheet */
+@import url('archnavbar/archnavbar.css');
+
+/* simple reset */
+* { margin: 0; padding: 0; line-height: 1.4; }
+
+/* general styling */
+body { min-width: 650px; background: #f6f7fc; color: #222; font: normal 100% sans-serif; text-align: center; }
+p { margin: .33em 0 1em; }
+ol, ul { margin-bottom: 1em; padding-left: 2em; }
+ul { list-style: square; }
+code { font: 1.2em monospace; background: #ffa; padding: 0.15em 0.25em; }
+pre { font: 1.2em monospace; border: 1px solid #bdb; background: #dfd; padding: 0.5em; margin: 0.25em 2em; }
+blockquote { margin: 1.5em 2em; }
+input { vertical-align: middle; }
+select[multiple] { padding-top: 1px; padding-bottom: 1px; }
+select[multiple] option { padding-left: 0.3em; padding-right: 0.5em; }
+input[type=submit] { padding-left: 0.6em; padding-right: 0.6em; }
+.clear { clear: both; }
+hr { border: none; border-top: 1px solid #888; }
+img { border: 0; }
+
+/* scale fonts down to a sane default (16 * .812 = 13px) */
+#content { font-size: 0.812em; }
+
+/* link style */
+a { text-decoration: none; }
+a:link, th a:visited { color: #07b; }
+a:visited { color: #666; }
+a:hover { text-decoration: underline; color: #666; }
+a:active { color: #e90; }
+
+/* headings */
+h2 { font-size: 1.5em; margin-bottom: 0.5em; border-bottom: 1px solid #888; }
+h3 { font-size: 1.25em; margin-top: 1em; }
+h4 { font-size: 1.15em; margin-top: 1em; }
+h5 { font-size: 1em; margin-top: 1em; }
+
+/* general layout */
+div#content { width: 95%; margin: 0 auto; text-align: left; }
+div#content-left-wrapper { float: left; width: 100%; } /* req to keep content above sidebar in source code */
+div#content-left { margin: 0 340px 0 0; }
+div#content-right { float: left; width: 300px; margin-left: -300px; }
+div.box { margin-bottom: 1.5em; padding: 0.65em; background: #ecedf5; border: 1px solid #bbbedd; }
+div#footer { clear: both; margin: 2em 0 1em; }
+div#footer p { margin: 0; text-align: center; font-size: 0.85em; }
+
+/* alignment */
+div.center, table.center, img.center { width: auto; margin-left: auto; margin-right: auto; }
+p.center, td.center, th.center { text-align: center; }
+
+/* table generics */
+table { width: 100%; border-collapse: collapse; }
+table .wrap { white-space: normal; }
+th, td { white-space: nowrap; text-align: left; }
+th { vertical-align: middle; font-weight: bold; }
+td { vertical-align: top; }
+
+/* table pretty styles */
+table.pretty1 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bbbedd; }
+table.pretty1 th { padding: 0.35em; background: #e4e7ff; border: 1px solid #bbbedd; }
+table.pretty1 td { padding: 0.35em; border: 1px dotted #bbbedd; }
+table.pretty2 { width: auto; margin-top: 0.25em; margin-bottom: 0.5em; border-collapse: collapse; border: 1px solid #bbb; }
+table.pretty2 th { padding: 0.35em; background: #eee; border: 1px solid #bbb; }
+table.pretty2 td { padding: 0.35em; border: 1px dotted #bbb; }
+
+/* forms and input styling */
+form p { margin: 0.5em 0; }
+fieldset { border: 0; }
+label { width: 12em; vertical-align: top; display: inline-block; font-weight: bold; }
+input[type=text], input[type=password], textarea { padding: 0.10em; }
+form.general-form label, form.general-form .form-help { width: 10em; vertical-align: top; display: inline-block; }
+form.general-form input[type=text], form.general-form textarea { width: 45%; }
+
+/* archdev navbar */
+div#archdev-navbar { margin: 1.5em 0; }
+div#archdev-navbar ul { list-style: none; margin: -0.5em 0; padding: 0; }
+div#archdev-navbar li { display: inline; margin: 0; padding: 0; font-size: 0.9em; }
+div#archdev-navbar li a { padding: 0 0.5em; color: #07b; }
+
+/* error/info messages (x pkg is already flagged out-of-date, etc) */
+#sys-message { width: 35em; text-align: center; margin: 1em auto; padding: 0.5em; background: #fff; border: 1px solid #f00; }
+#sys-message p { margin: 0; }
+
+ul.errorlist { color: red; }
+
+/*
+ * PAGE SPECIFIC STYLES
+ */
+
+/* home: introduction */
+#intro p.readmore { margin: -0.5em 0 0 0; font-size: .9em; text-align: right; }
+
+/* home: news */
+#news { margin-top: 1.5em; }
+#news h3 { border-bottom: 1px solid #888; }
+#news div { margin-bottom: 1em; }
+#news div p { margin-bottom: 0.5em; }
+#news .more { font-weight: normal; }
+#news .rss-icon { float: right; margin: -1.6em 0.4em 0 0; }
+#news h4 { font-size: 1em; margin-top: 1.5em; border-bottom: 1px dotted #bbb; }
+#news .timestamp { float: right; font-size: 0.85em; margin: -1.8em 0.5em 0 0; }
+
+/* home: pkgsearch box */
+#pkgsearch { padding: 1em 0.75em; background: #787DAB; color: #fff; border: 1px solid #3c47ab; }
+#pkgsearch label { width: auto; padding: 0.1em 0; }
+#pkgsearch input { width: 10em; float: right; font-size: 1em; color: #000; background: #fff; border: 1px solid #3c47ab; }
+
+/* home: recent pkg updates */
+#pkg-updates h3 { margin: 0 0 0.3em; }
+#pkg-updates .more { font-weight: normal; }
+#pkg-updates .rss-icon { float: right; margin: -2em 0 0 0; }
+#pkg-updates table { margin: 0; }
+#pkg-updates td.pkg-name { white-space: normal; }
+#pkg-updates td.pkg-arch { text-align: right; }
+#pkg-updates span.testing, #pkg-updates span.community-testing, span.multilib-testing { font-style: italic; }
+
+/* home: sidebar navigation */
+div#nav-sidebar ul { list-style: none; margin: 0.5em 0 0.5em 1em; padding: 0; }
+
+/* home: sponsor banners */
+div#arch-sponsors img { padding: 0.3em 0; }
+
+/* home: sidebar components (navlist, sponsors, pkgsearch, etc) */
+div.widget { margin-bottom: 1.5em; }
+
+/* feeds page */
+#rss-feeds .rss { padding-right: 20px; background: url(rss.png) top right no-repeat; }
+
+/* artwork: logo images */
+#artwork img.inverted { background: #333; padding: 0; }
+#artwork div.imagelist img { display: inline; margin: 0.75em; }
+
+/* news: article list */
+.news-nav { float: right; margin-top: -2.2em; }
+.news-nav .prev, .news-nav .next { margin-left: 1em; margin-right: 1em; }
+
+/* news: article pages */
+div.news-article .article-info { margin: 0; color: #999; }
+
+/* news: add/edit article */
+form#newsform { width: 60em; }
+form#newsform input[type=text], form#newsform textarea { width: 75%; }
+
+/* donate: donor list */
+div#donor-list ul { width: 100%; }
+/* max 4 columns, but possibly fewer if screen size doesn't allow for more */
+div#donor-list li { float: left; width: 25%; min-width: 20em; }
+
+/* download page */
+#arch-downloads h3 { border-bottom: 1px dotted #aaa; }
+table#download-torrents .cpu-arch { text-align: center; }
+table#download-mirrors { width: auto; margin-bottom: 1em; }
+table#download-mirrors td.mirror-country { padding-top: 1em; }
+table#download-mirrors td.mirror-server { padding-right: 1em; }
+table#download-mirrors a { display: block; float: right; width: 4em; }
+
+/* pkglists/devlists */
+table.results { font-size: 0.846em; border-top: 1px dotted #999; border-bottom: 1px dotted #999; }
+table.results th { padding: 0.5em 1em 0.25em 0.25em; border-bottom: 1px solid #999; white-space: nowrap; background-color:#fff; }
+table.results td { padding: .3em 1em .3em 3px; }
+table.results tr.odd { background: #fff; }
+table.results tr.even { background: #e4e7ff; }
+/* additional styles for JS sorting */
+table.results th.header { padding-right: 20px; background-image: url(nosort.gif); background-repeat: no-repeat; background-position: center right; cursor: pointer; }
+table.results th.headerSortDown { background-color: #e4e7ff; background-image: url(desc.gif); }
+table.results th.headerSortUp { background-color: #e4e7ff; background-image: url(asc.gif); }
+table.results .flagged { color: red; }
+
+/* pkglist: layout */
+div#pkglist-about { margin-top: 1.5em; }
+
+/* pkglist: results navigation */
+#pkglist-stats-top, #pkglist-stats-bottom { font-size: 0.85em; }
+#pkglist-results .pkglist-nav { float: right; margin-top: -2.2em; }
+.pkglist-nav .prev { margin-right: 1em; }
+.pkglist-nav .next { margin-right: 1em; }
+
+/* search fields and other filter selections */
+.filter-criteria h3 { font-size: 1em; margin-top:0; }
+.filter-criteria div { float: left; margin-right: 1.65em; font-size: 0.85em; }
+.filter-criteria legend { display: none; }
+.filter-criteria label { width: auto; display: block; font-weight: normal; }
+
+/* pkgdetails: details links that float on the right */
+#pkgdetails #detailslinks { float: right; }
+#pkgdetails #detailslinks h4 { margin-top: 0; margin-bottom: 0.25em; }
+#pkgdetails #detailslinks ul { list-style: none; padding: 0; margin-bottom: 0; font-size: 0.846em; }
+#pkgdetails #detailslinks > div { padding: 0.5em; margin-bottom: 1em; background: #eee; border: 1px solid #bbb; }
+#pkgdetails #actionlist .flagged { color: red; font-size: 0.9em; font-style: italic; }
+
+/* pkgdetails: pkg info */
+#pkgdetails #pkginfo { width: auto; }
+#pkgdetails #pkginfo td { padding: 0.25em 0 0.25em 1.5em; }
+
+/* pkgdetails: flag package */
+form#flag-pkg-form label { width: 10em; }
+form#flag-pkg-form textarea, form#flag-pkg-form input[type=text] { width: 45%; }
+
+/* pkgdetails: deps, required by and file lists */
+#pkgdetails #metadata h3 { background: #555; color: #fff; font-size: 1em; margin-bottom: 0.5em; padding: 0.2em 0.35em; }
+#pkgdetails #metadata ul { list-style: none; margin: 0; padding: 0; }
+#pkgdetails #metadata li { padding-left: 0.5em; }
+#pkgdetails #metadata p { padding-left: 0.5em; }
+#pkgdetails #metadata .message { font-style: italic; }
+#pkgdetails #metadata br { clear: both; }
+#pkgdetails #pkgdeps { float: left; width: 48%; margin-right: 2%; }
+#pkgdetails #pkgreqs { float: left; width: 50%; }
+#pkgdetails #pkgfiles { clear: left; padding-top: 1em; }
+
+/* dev/TU biographies */
+div#arch-bio-toc { width: 75%; margin: 0 auto; text-align: center; }
+table.arch-bio-entry td.pic { vertical-align: top; padding-right: 15px; padding-top: 10px; }
+table.arch-bio-entry td.pic img { padding: 4px; border: 1px solid #ccc; }
+table.arch-bio-entry table.bio { margin-bottom: 2em; }
+table.arch-bio-entry table.bio th { text-align: left; padding-right: 0.5em; vertical-align: top; white-space: nowrap; }
+table.arch-bio-entry table.bio td { width: 100%; padding-bottom: 0.25em; }
+
+/* dev: login/out */
+p.login-error {}
+table#dev-login { width: auto; }
+
+/* dev dashboard: flagged packages */
+form#dash-pkg-notify { text-align: right; padding: 1em 0 0; margin-top: 1em; font-size: 0.85em; border-top: 1px dotted #aaa; }
+form#dash-pkg-notify label { width: auto; font-weight: normal; }
+form#dash-pkg-notify input { vertical-align: middle; margin: 0 0.25em; }
+form#dash-pkg-notify input[type=submit] { margin-top: -0.25em; }
+form#dash-pkg-notify p { margin: 0; }
+
+/* dev dashboard: collapse stat tables by default */
+table#stats-by-maintainer, table#stats-by-repo, table#stats-by-arch { display: none; }
+table.dash-stats .key { width: 50%; }
+span.dash-click { font-weight: normal; font-size: 0.8em; color: #888; }
+div.dash-stats h3 { color: #07b; }
+
+/* read only (public) todo lists */
+#public_todo_lists .todo_list {
+ margin-left: 2em;
+}
+
+/* dev dashboard: admin actions (add news items, todo list, etc) */
+ul.admin-actions { float: right; list-style: none; margin-top: -2.5em; }
+ul.admin-actions li { display: inline; padding-left: 1.5em; }
+
+/* dev: todo list */
+.todo-table .complete { color: green; }
+.todo-table .incomplete { color: red; }
+
+/* dev: signoff page */
+#dev-signoffs ul { list-style: none; margin: 0; padding: 0; }
+#dev-signoffs .signoff-yes { color: green; font-weight: bold; }
+#dev-signoffs .signoff-no { color: red; }
+#dev-signoffs .signed-username { color: #888; margin-left: 0.5em; }
+
+/* highlight current website in the navbar */
+#archnavbar.anb-home ul li#anb-home a { color: white !important; }
+#archnavbar.anb-packages ul li#anb-packages a { color: white !important; }
+#archnavbar.anb-download ul li#anb-download a { color: white !important; }
diff --git a/media/archweb.js b/media/archweb.js
new file mode 100644
index 00000000..c5025ded
--- /dev/null
+++ b/media/archweb.js
@@ -0,0 +1,136 @@
+/* tablesorter custom parsers for various pages:
+ * devel/index.html, mirrors/status.html, todolists/view.html */
+if(typeof $.tablesorter !== "undefined") {
+ $.tablesorter.addParser({
+ id: 'pkgcount',
+ is: function(s) { return false; },
+ format: function(s) {
+ var m = s.match(/\d+/);
+ return m ? parseInt(m[0]) : 0;
+ },
+ type: 'numeric'
+ });
+ $.tablesorter.addParser({
+ id: 'todostatus',
+ is: function(s) { return false; },
+ format: function(s) {
+ return s.match(/incomplete/) ? 1 : 0;
+ },
+ type: 'numeric'
+ });
+ $.tablesorter.addParser({
+ /* sorts numeric, but put '', 'unknown', and '∞' last. */
+ id: 'mostlydigit',
+ is: function(s,table) {
+ var special = ['', 'unknown', '∞'];
+ var c = table.config;
+ return ($.inArray(s, special) > -1) || $.tablesorter.isDigit(s,c);
+ },
+ format: function(s) {
+ var special = ['', 'unknown', '∞'];
+ if($.inArray(s, special) > -1) return Number.MAX_VALUE;
+ return $.tablesorter.formatFloat(s);
+ },
+ type: 'numeric'
+ });
+ $.tablesorter.addParser({
+ /* sorts duration; put '', 'unknown', and '∞' last. */
+ id: 'duration',
+ is: function(s,table) {
+ var special = ['', 'unknown', '∞'];
+ return ($.inArray(s, special) > -1) || /^[0-9]+:[0-5][0-9]$/.test(s);
+ },
+ format: function(s) {
+ var special = ['', 'unknown', '∞'];
+ if($.inArray(s, special) > -1) return Number.MAX_VALUE;
+ matches = /^([0-9]+):([0-5][0-9])$/.exec(s);
+ return matches[1] * 60 + matches[2];
+ },
+ type: 'numeric'
+ });
+}
+
+/* news/add.html */
+function enablePreview() {
+ $('#previewbtn').click(function(event) {
+ event.preventDefault();
+ $.post('/news/preview/',
+ { data: $('#id_content').val() },
+ function(data) {
+ $('#previewdata').html(data);
+ $('.news-article').show();
+ }
+ );
+ $('#previewtitle').html($('#id_title').val());
+ });
+}
+
+/* packages/details.html */
+function ajaxifyFiles() {
+ $('#filelink').click(function(event) {
+ event.preventDefault();
+ $.get(this.href, function(data) {
+ $('#pkgfilelist').html(data);
+ });
+ });
+}
+
+/* packages/differences.html */
+filter_packages = function() {
+ // start with all rows, and then remove ones we shouldn't show
+ var rows = $("#tbody_differences").children();
+ var all_rows = rows;
+ if(!$('#id_multilib').is(':checked')) {
+ rows = rows.not(".multilib").not(".multilib-testing");
+ }
+ var arch = $("#id_archonly").val();
+ if(arch !== "all") {
+ rows = rows.filter("." + arch);
+ }
+ if(!$('#id_minor').is(':checked')) {
+ // this check is done last because it is the most expensive
+ var pat = /(.*)-(.+)/;
+ rows = rows.filter(function(index) {
+ var cells = $(this).children('td');
+
+ // all this just to get the split version out of the table cell
+ var ver_a = cells.eq(2).find('span').text().match(pat);
+ if(!ver_a) return true;
+
+ var ver_b = cells.eq(3).find('span').text().match(pat);
+ if(!ver_b) return true;
+
+ // first check pkgver
+ if(ver_a[1] !== ver_b[1]) return true;
+ // pkgver matched, so see if rounded pkgrel matches
+ if(Math.floor(parseFloat(ver_a[2])) ==
+ Math.floor(parseFloat(ver_b[2]))) return false;
+ // pkgrel didn't match, so keep the row
+ return true;
+ });
+ }
+ // hide all rows, then show the set we care about
+ all_rows.hide();
+ rows.show();
+ // make sure we update the odd/even styling from sorting
+ $('.results').trigger("applyWidgets");
+};
+filter_reset = function() {
+ $('#id_archonly').val("both");
+ $('#id_multilib').removeAttr("checked");
+ $('#id_minor').removeAttr("checked");
+ filter_packages();
+};
+
+/* todolists/view.html */
+todolist_flag = function() {
+ var link = this;
+ $.getJSON(link.href, function(data) {
+ if (data.complete) {
+ $(link).text('Complete').addClass('complete').removeClass('incomplete');
+ } else {
+ $(link).text('Incomplete').addClass('incomplete').removeClass('complete');
+ }
+ });
+ return false;
+};
diff --git a/media/asc.gif b/media/asc.gif
new file mode 100644
index 00000000..74157867
--- /dev/null
+++ b/media/asc.gif
Binary files differ
diff --git a/media/desc.gif b/media/desc.gif
new file mode 100644
index 00000000..3b30b3c5
--- /dev/null
+++ b/media/desc.gif
Binary files differ
diff --git a/media/devs/aurelien.png b/media/devs/aurelien.png
new file mode 100755
index 00000000..6b126f77
--- /dev/null
+++ b/media/devs/aurelien.png
Binary files differ
diff --git a/media/devs/gtklocker.png b/media/devs/gtklocker.png
new file mode 100755
index 00000000..58283900
--- /dev/null
+++ b/media/devs/gtklocker.png
Binary files differ
diff --git a/media/devs/me001-crop.png b/media/devs/me001-crop.png
new file mode 100755
index 00000000..8595008a
--- /dev/null
+++ b/media/devs/me001-crop.png
Binary files differ
diff --git a/media/django-jsi18n.js b/media/django-jsi18n.js
new file mode 100644
index 00000000..83562c1a
--- /dev/null
+++ b/media/django-jsi18n.js
@@ -0,0 +1,35 @@
+
+/* gettext library */
+
+var catalog = new Array();
+
+function pluralidx(count) { return (count == 1) ? 0 : 1; }
+
+
+function gettext(msgid) {
+ var value = catalog[msgid];
+ if (typeof(value) == 'undefined') {
+ return msgid;
+ } else {
+ return (typeof(value) == 'string') ? value : value[0];
+ }
+}
+
+function ngettext(singular, plural, count) {
+ value = catalog[singular];
+ if (typeof(value) == 'undefined') {
+ return (count == 1) ? singular : plural;
+ } else {
+ return value[pluralidx(count)];
+ }
+}
+
+function gettext_noop(msgid) { return msgid; }
+
+function interpolate(fmt, obj, named) {
+ if (named) {
+ return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])});
+ } else {
+ return fmt.replace(/%s/g, function(match){return String(obj.shift())});
+ }
+}
diff --git a/media/donate.gif b/media/donate.gif
new file mode 100644
index 00000000..d637428b
--- /dev/null
+++ b/media/donate.gif
Binary files differ
diff --git a/media/favicon.ico b/media/favicon.ico
new file mode 100644
index 00000000..3e43f9bd
--- /dev/null
+++ b/media/favicon.ico
Binary files differ
diff --git a/media/jquery-1.4.3.min.js b/media/jquery-1.4.3.min.js
new file mode 100644
index 00000000..c941a5f7
--- /dev/null
+++ b/media/jquery-1.4.3.min.js
@@ -0,0 +1,166 @@
+/*!
+ * jQuery JavaScript Library v1.4.3
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Thu Oct 14 23:10:06 2010 -0400
+ */
+(function(E,A){function U(){return false}function ba(){return true}function ja(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function Ga(a){var b,d,e=[],f=[],h,k,l,n,s,v,B,D;k=c.data(this,this.nodeType?"events":"__events__");if(typeof k==="function")k=k.events;if(!(a.liveFired===this||!k||!k.live||a.button&&a.type==="click")){if(a.namespace)D=RegExp("(^|\\.)"+a.namespace.split(".").join("\\.(?:.*\\.)?")+"(\\.|$)");a.liveFired=this;var H=k.live.slice(0);for(n=0;n<H.length;n++){k=H[n];k.origType.replace(X,
+"")===a.type?f.push(k.selector):H.splice(n--,1)}f=c(a.target).closest(f,a.currentTarget);s=0;for(v=f.length;s<v;s++){B=f[s];for(n=0;n<H.length;n++){k=H[n];if(B.selector===k.selector&&(!D||D.test(k.namespace))){l=B.elem;h=null;if(k.preType==="mouseenter"||k.preType==="mouseleave"){a.type=k.preType;h=c(a.relatedTarget).closest(k.selector)[0]}if(!h||h!==l)e.push({elem:l,handleObj:k,level:B.level})}}}s=0;for(v=e.length;s<v;s++){f=e[s];if(d&&f.level>d)break;a.currentTarget=f.elem;a.data=f.handleObj.data;
+a.handleObj=f.handleObj;D=f.handleObj.origHandler.apply(f.elem,arguments);if(D===false||a.isPropagationStopped()){d=f.level;if(D===false)b=false}}return b}}function Y(a,b){return(a&&a!=="*"?a+".":"")+b.replace(Ha,"`").replace(Ia,"&")}function ka(a,b,d){if(c.isFunction(b))return c.grep(a,function(f,h){return!!b.call(f,h,f)===d});else if(b.nodeType)return c.grep(a,function(f){return f===b===d});else if(typeof b==="string"){var e=c.grep(a,function(f){return f.nodeType===1});if(Ja.test(b))return c.filter(b,
+e,!d);else b=c.filter(b,e)}return c.grep(a,function(f){return c.inArray(f,b)>=0===d})}function la(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var e=c.data(a[d++]),f=c.data(this,e);if(e=e&&e.events){delete f.handle;f.events={};for(var h in e)for(var k in e[h])c.event.add(this,h,e[h][k],e[h][k].data)}}})}function Ka(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}
+function ma(a,b,d){var e=b==="width"?a.offsetWidth:a.offsetHeight;if(d==="border")return e;c.each(b==="width"?La:Ma,function(){d||(e-=parseFloat(c.css(a,"padding"+this))||0);if(d==="margin")e+=parseFloat(c.css(a,"margin"+this))||0;else e-=parseFloat(c.css(a,"border"+this+"Width"))||0});return e}function ca(a,b,d,e){if(c.isArray(b)&&b.length)c.each(b,function(f,h){d||Na.test(a)?e(a,h):ca(a+"["+(typeof h==="object"||c.isArray(h)?f:"")+"]",h,d,e)});else if(!d&&b!=null&&typeof b==="object")c.isEmptyObject(b)?
+e(a,""):c.each(b,function(f,h){ca(a+"["+f+"]",h,d,e)});else e(a,b)}function S(a,b){var d={};c.each(na.concat.apply([],na.slice(0,b)),function(){d[this]=a});return d}function oa(a){if(!da[a]){var b=c("<"+a+">").appendTo("body"),d=b.css("display");b.remove();if(d==="none"||d==="")d="block";da[a]=d}return da[a]}function ea(a){return c.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var u=E.document,c=function(){function a(){if(!b.isReady){try{u.documentElement.doScroll("left")}catch(i){setTimeout(a,
+1);return}b.ready()}}var b=function(i,r){return new b.fn.init(i,r)},d=E.jQuery,e=E.$,f,h=/^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]+)$)/,k=/\S/,l=/^\s+/,n=/\s+$/,s=/\W/,v=/\d/,B=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,D=/^[\],:{}\s]*$/,H=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,w=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,G=/(?:^|:|,)(?:\s*\[)+/g,M=/(webkit)[ \/]([\w.]+)/,g=/(opera)(?:.*version)?[ \/]([\w.]+)/,j=/(msie) ([\w.]+)/,o=/(mozilla)(?:.*? rv:([\w.]+))?/,m=navigator.userAgent,p=false,
+q=[],t,x=Object.prototype.toString,C=Object.prototype.hasOwnProperty,P=Array.prototype.push,N=Array.prototype.slice,R=String.prototype.trim,Q=Array.prototype.indexOf,L={};b.fn=b.prototype={init:function(i,r){var y,z,F;if(!i)return this;if(i.nodeType){this.context=this[0]=i;this.length=1;return this}if(i==="body"&&!r&&u.body){this.context=u;this[0]=u.body;this.selector="body";this.length=1;return this}if(typeof i==="string")if((y=h.exec(i))&&(y[1]||!r))if(y[1]){F=r?r.ownerDocument||r:u;if(z=B.exec(i))if(b.isPlainObject(r)){i=
+[u.createElement(z[1])];b.fn.attr.call(i,r,true)}else i=[F.createElement(z[1])];else{z=b.buildFragment([y[1]],[F]);i=(z.cacheable?z.fragment.cloneNode(true):z.fragment).childNodes}return b.merge(this,i)}else{if((z=u.getElementById(y[2]))&&z.parentNode){if(z.id!==y[2])return f.find(i);this.length=1;this[0]=z}this.context=u;this.selector=i;return this}else if(!r&&!s.test(i)){this.selector=i;this.context=u;i=u.getElementsByTagName(i);return b.merge(this,i)}else return!r||r.jquery?(r||f).find(i):b(r).find(i);
+else if(b.isFunction(i))return f.ready(i);if(i.selector!==A){this.selector=i.selector;this.context=i.context}return b.makeArray(i,this)},selector:"",jquery:"1.4.3",length:0,size:function(){return this.length},toArray:function(){return N.call(this,0)},get:function(i){return i==null?this.toArray():i<0?this.slice(i)[0]:this[i]},pushStack:function(i,r,y){var z=b();b.isArray(i)?P.apply(z,i):b.merge(z,i);z.prevObject=this;z.context=this.context;if(r==="find")z.selector=this.selector+(this.selector?" ":
+"")+y;else if(r)z.selector=this.selector+"."+r+"("+y+")";return z},each:function(i,r){return b.each(this,i,r)},ready:function(i){b.bindReady();if(b.isReady)i.call(u,b);else q&&q.push(i);return this},eq:function(i){return i===-1?this.slice(i):this.slice(i,+i+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(N.apply(this,arguments),"slice",N.call(arguments).join(","))},map:function(i){return this.pushStack(b.map(this,function(r,y){return i.call(r,
+y,r)}))},end:function(){return this.prevObject||b(null)},push:P,sort:[].sort,splice:[].splice};b.fn.init.prototype=b.fn;b.extend=b.fn.extend=function(){var i=arguments[0]||{},r=1,y=arguments.length,z=false,F,I,K,J,fa;if(typeof i==="boolean"){z=i;i=arguments[1]||{};r=2}if(typeof i!=="object"&&!b.isFunction(i))i={};if(y===r){i=this;--r}for(;r<y;r++)if((F=arguments[r])!=null)for(I in F){K=i[I];J=F[I];if(i!==J)if(z&&J&&(b.isPlainObject(J)||(fa=b.isArray(J)))){if(fa){fa=false;clone=K&&b.isArray(K)?K:[]}else clone=
+K&&b.isPlainObject(K)?K:{};i[I]=b.extend(z,clone,J)}else if(J!==A)i[I]=J}return i};b.extend({noConflict:function(i){E.$=e;if(i)E.jQuery=d;return b},isReady:false,readyWait:1,ready:function(i){i===true&&b.readyWait--;if(!b.readyWait||i!==true&&!b.isReady){if(!u.body)return setTimeout(b.ready,1);b.isReady=true;if(!(i!==true&&--b.readyWait>0)){if(q){for(var r=0;i=q[r++];)i.call(u,b);q=null}b.fn.triggerHandler&&b(u).triggerHandler("ready")}}},bindReady:function(){if(!p){p=true;if(u.readyState==="complete")return setTimeout(b.ready,
+1);if(u.addEventListener){u.addEventListener("DOMContentLoaded",t,false);E.addEventListener("load",b.ready,false)}else if(u.attachEvent){u.attachEvent("onreadystatechange",t);E.attachEvent("onload",b.ready);var i=false;try{i=E.frameElement==null}catch(r){}u.documentElement.doScroll&&i&&a()}}},isFunction:function(i){return b.type(i)==="function"},isArray:Array.isArray||function(i){return b.type(i)==="array"},isWindow:function(i){return i&&typeof i==="object"&&"setInterval"in i},isNaN:function(i){return i==
+null||!v.test(i)||isNaN(i)},type:function(i){return i==null?String(i):L[x.call(i)]||"object"},isPlainObject:function(i){if(!i||b.type(i)!=="object"||i.nodeType||b.isWindow(i))return false;if(i.constructor&&!C.call(i,"constructor")&&!C.call(i.constructor.prototype,"isPrototypeOf"))return false;for(var r in i);return r===A||C.call(i,r)},isEmptyObject:function(i){for(var r in i)return false;return true},error:function(i){throw i;},parseJSON:function(i){if(typeof i!=="string"||!i)return null;i=b.trim(i);
+if(D.test(i.replace(H,"@").replace(w,"]").replace(G,"")))return E.JSON&&E.JSON.parse?E.JSON.parse(i):(new Function("return "+i))();else b.error("Invalid JSON: "+i)},noop:function(){},globalEval:function(i){if(i&&k.test(i)){var r=u.getElementsByTagName("head")[0]||u.documentElement,y=u.createElement("script");y.type="text/javascript";if(b.support.scriptEval)y.appendChild(u.createTextNode(i));else y.text=i;r.insertBefore(y,r.firstChild);r.removeChild(y)}},nodeName:function(i,r){return i.nodeName&&i.nodeName.toUpperCase()===
+r.toUpperCase()},each:function(i,r,y){var z,F=0,I=i.length,K=I===A||b.isFunction(i);if(y)if(K)for(z in i){if(r.apply(i[z],y)===false)break}else for(;F<I;){if(r.apply(i[F++],y)===false)break}else if(K)for(z in i){if(r.call(i[z],z,i[z])===false)break}else for(y=i[0];F<I&&r.call(y,F,y)!==false;y=i[++F]);return i},trim:R?function(i){return i==null?"":R.call(i)}:function(i){return i==null?"":i.toString().replace(l,"").replace(n,"")},makeArray:function(i,r){var y=r||[];if(i!=null){var z=b.type(i);i.length==
+null||z==="string"||z==="function"||z==="regexp"||b.isWindow(i)?P.call(y,i):b.merge(y,i)}return y},inArray:function(i,r){if(r.indexOf)return r.indexOf(i);for(var y=0,z=r.length;y<z;y++)if(r[y]===i)return y;return-1},merge:function(i,r){var y=i.length,z=0;if(typeof r.length==="number")for(var F=r.length;z<F;z++)i[y++]=r[z];else for(;r[z]!==A;)i[y++]=r[z++];i.length=y;return i},grep:function(i,r,y){var z=[],F;y=!!y;for(var I=0,K=i.length;I<K;I++){F=!!r(i[I],I);y!==F&&z.push(i[I])}return z},map:function(i,
+r,y){for(var z=[],F,I=0,K=i.length;I<K;I++){F=r(i[I],I,y);if(F!=null)z[z.length]=F}return z.concat.apply([],z)},guid:1,proxy:function(i,r,y){if(arguments.length===2)if(typeof r==="string"){y=i;i=y[r];r=A}else if(r&&!b.isFunction(r)){y=r;r=A}if(!r&&i)r=function(){return i.apply(y||this,arguments)};if(i)r.guid=i.guid=i.guid||r.guid||b.guid++;return r},access:function(i,r,y,z,F,I){var K=i.length;if(typeof r==="object"){for(var J in r)b.access(i,J,r[J],z,F,y);return i}if(y!==A){z=!I&&z&&b.isFunction(y);
+for(J=0;J<K;J++)F(i[J],r,z?y.call(i[J],J,F(i[J],r)):y,I);return i}return K?F(i[0],r):A},now:function(){return(new Date).getTime()},uaMatch:function(i){i=i.toLowerCase();i=M.exec(i)||g.exec(i)||j.exec(i)||i.indexOf("compatible")<0&&o.exec(i)||[];return{browser:i[1]||"",version:i[2]||"0"}},browser:{}});b.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(i,r){L["[object "+r+"]"]=r.toLowerCase()});m=b.uaMatch(m);if(m.browser){b.browser[m.browser]=true;b.browser.version=
+m.version}if(b.browser.webkit)b.browser.safari=true;if(Q)b.inArray=function(i,r){return Q.call(r,i)};if(!/\s/.test("\u00a0")){l=/^[\s\xA0]+/;n=/[\s\xA0]+$/}f=b(u);if(u.addEventListener)t=function(){u.removeEventListener("DOMContentLoaded",t,false);b.ready()};else if(u.attachEvent)t=function(){if(u.readyState==="complete"){u.detachEvent("onreadystatechange",t);b.ready()}};return E.jQuery=E.$=b}();(function(){c.support={};var a=u.documentElement,b=u.createElement("script"),d=u.createElement("div"),
+e="script"+c.now();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";var f=d.getElementsByTagName("*"),h=d.getElementsByTagName("a")[0],k=u.createElement("select"),l=k.appendChild(u.createElement("option"));if(!(!f||!f.length||!h)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(h.getAttribute("style")),
+hrefNormalized:h.getAttribute("href")==="/a",opacity:/^0.55$/.test(h.style.opacity),cssFloat:!!h.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:l.selected,optDisabled:false,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null,inlineBlockNeedsLayout:false,shrinkWrapBlocks:false,reliableHiddenOffsets:true};k.disabled=true;c.support.optDisabled=!l.disabled;b.type="text/javascript";try{b.appendChild(u.createTextNode("window."+e+"=1;"))}catch(n){}a.insertBefore(b,
+a.firstChild);if(E[e]){c.support.scriptEval=true;delete E[e]}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function s(){c.support.noCloneEvent=false;d.detachEvent("onclick",s)});d.cloneNode(true).fireEvent("onclick")}d=u.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=u.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var s=u.createElement("div");
+s.style.width=s.style.paddingLeft="1px";u.body.appendChild(s);c.boxModel=c.support.boxModel=s.offsetWidth===2;if("zoom"in s.style){s.style.display="inline";s.style.zoom=1;c.support.inlineBlockNeedsLayout=s.offsetWidth===2;s.style.display="";s.innerHTML="<div style='width:4px;'></div>";c.support.shrinkWrapBlocks=s.offsetWidth!==2}s.innerHTML="<table><tr><td style='padding:0;display:none'></td><td>t</td></tr></table>";var v=s.getElementsByTagName("td");c.support.reliableHiddenOffsets=v[0].offsetHeight===
+0;v[0].style.display="";v[1].style.display="none";c.support.reliableHiddenOffsets=c.support.reliableHiddenOffsets&&v[0].offsetHeight===0;s.innerHTML="";u.body.removeChild(s).style.display="none"});a=function(s){var v=u.createElement("div");s="on"+s;var B=s in v;if(!B){v.setAttribute(s,"return;");B=typeof v[s]==="function"}return B};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=f=h=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",
+cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var pa={},Oa=/^(?:\{.*\}|\[.*\])$/;c.extend({cache:{},uuid:0,expando:"jQuery"+c.now(),noData:{embed:true,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:true},data:function(a,b,d){if(c.acceptData(a)){a=a==E?pa:a;var e=a.nodeType,f=e?a[c.expando]:null,h=c.cache;if(!(e&&!f&&typeof b==="string"&&d===A)){if(e)f||(a[c.expando]=f=++c.uuid);else h=a;if(typeof b==="object")if(e)h[f]=
+c.extend(h[f],b);else c.extend(h,b);else if(e&&!h[f])h[f]={};a=e?h[f]:h;if(d!==A)a[b]=d;return typeof b==="string"?a[b]:a}}},removeData:function(a,b){if(c.acceptData(a)){a=a==E?pa:a;var d=a.nodeType,e=d?a[c.expando]:a,f=c.cache,h=d?f[e]:e;if(b){if(h){delete h[b];d&&c.isEmptyObject(h)&&c.removeData(a)}}else if(d&&c.support.deleteExpando)delete a[c.expando];else if(a.removeAttribute)a.removeAttribute(c.expando);else if(d)delete f[e];else for(var k in a)delete a[k]}},acceptData:function(a){if(a.nodeName){var b=
+c.noData[a.nodeName.toLowerCase()];if(b)return!(b===true||a.getAttribute("classid")!==b)}return true}});c.fn.extend({data:function(a,b){if(typeof a==="undefined")return this.length?c.data(this[0]):null;else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===A){var e=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(e===A&&this.length){e=c.data(this[0],a);if(e===A&&this[0].nodeType===1){e=this[0].getAttribute("data-"+a);if(typeof e===
+"string")try{e=e==="true"?true:e==="false"?false:e==="null"?null:!c.isNaN(e)?parseFloat(e):Oa.test(e)?c.parseJSON(e):e}catch(f){}else e=A}}return e===A&&d[1]?this.data(d[0]):e}else return this.each(function(){var h=c(this),k=[d[0],b];h.triggerHandler("setData"+d[1]+"!",k);c.data(this,a,b);h.triggerHandler("changeData"+d[1]+"!",k)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var e=c.data(a,b);if(!d)return e||
+[];if(!e||c.isArray(d))e=c.data(a,b,c.makeArray(d));else e.push(d);return e}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),e=d.shift();if(e==="inprogress")e=d.shift();if(e){b==="fx"&&d.unshift("inprogress");e.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===A)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,
+a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var qa=/[\n\t]/g,ga=/\s+/,Pa=/\r/g,Qa=/^(?:href|src|style)$/,Ra=/^(?:button|input)$/i,Sa=/^(?:button|input|object|select|textarea)$/i,Ta=/^a(?:rea)?$/i,ra=/^(?:radio|checkbox)$/i;c.fn.extend({attr:function(a,b){return c.access(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,
+a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(s){var v=c(this);v.addClass(a.call(this,s,v.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ga),d=0,e=this.length;d<e;d++){var f=this[d];if(f.nodeType===1)if(f.className){for(var h=" "+f.className+" ",k=f.className,l=0,n=b.length;l<n;l++)if(h.indexOf(" "+b[l]+" ")<0)k+=" "+b[l];f.className=c.trim(k)}else f.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(n){var s=
+c(this);s.removeClass(a.call(this,n,s.attr("class")))});if(a&&typeof a==="string"||a===A)for(var b=(a||"").split(ga),d=0,e=this.length;d<e;d++){var f=this[d];if(f.nodeType===1&&f.className)if(a){for(var h=(" "+f.className+" ").replace(qa," "),k=0,l=b.length;k<l;k++)h=h.replace(" "+b[k]+" "," ");f.className=c.trim(h)}else f.className=""}return this},toggleClass:function(a,b){var d=typeof a,e=typeof b==="boolean";if(c.isFunction(a))return this.each(function(f){var h=c(this);h.toggleClass(a.call(this,
+f,h.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var f,h=0,k=c(this),l=b,n=a.split(ga);f=n[h++];){l=e?l:!k.hasClass(f);k[l?"addClass":"removeClass"](f)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(qa," ").indexOf(a)>-1)return true;return false},
+val:function(a){if(!arguments.length){var b=this[0];if(b){if(c.nodeName(b,"option")){var d=b.attributes.value;return!d||d.specified?b.value:b.text}if(c.nodeName(b,"select")){var e=b.selectedIndex;d=[];var f=b.options;b=b.type==="select-one";if(e<0)return null;var h=b?e:0;for(e=b?e+1:f.length;h<e;h++){var k=f[h];if(k.selected&&(c.support.optDisabled?!k.disabled:k.getAttribute("disabled")===null)&&(!k.parentNode.disabled||!c.nodeName(k.parentNode,"optgroup"))){a=c(k).val();if(b)return a;d.push(a)}}return d}if(ra.test(b.type)&&
+!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Pa,"")}return A}var l=c.isFunction(a);return this.each(function(n){var s=c(this),v=a;if(this.nodeType===1){if(l)v=a.call(this,n,s.val());if(v==null)v="";else if(typeof v==="number")v+="";else if(c.isArray(v))v=c.map(v,function(D){return D==null?"":D+""});if(c.isArray(v)&&ra.test(this.type))this.checked=c.inArray(s.val(),v)>=0;else if(c.nodeName(this,"select")){var B=c.makeArray(v);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),B)>=0});if(!B.length)this.selectedIndex=-1}else this.value=v}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,e){if(!a||a.nodeType===3||a.nodeType===8)return A;if(e&&b in c.attrFn)return c(a)[b](d);e=a.nodeType!==1||!c.isXMLDoc(a);var f=d!==A;b=e&&c.props[b]||b;if(a.nodeType===1){var h=Qa.test(b);if((b in a||a[b]!==A)&&e&&!h){if(f){b==="type"&&Ra.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+if(d===null)a.nodeType===1&&a.removeAttribute(b);else a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:Sa.test(a.nodeName)||Ta.test(a.nodeName)&&a.href?0:A;return a[b]}if(!c.support.style&&e&&b==="style"){if(f)a.style.cssText=""+d;return a.style.cssText}f&&a.setAttribute(b,""+d);if(!a.attributes[b]&&a.hasAttribute&&!a.hasAttribute(b))return A;a=!c.support.hrefNormalized&&e&&
+h?a.getAttribute(b,2):a.getAttribute(b);return a===null?A:a}}});var X=/\.(.*)$/,ha=/^(?:textarea|input|select)$/i,Ha=/\./g,Ia=/ /g,Ua=/[^\w\s.|`]/g,Va=function(a){return a.replace(Ua,"\\$&")},sa={focusin:0,focusout:0};c.event={add:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(c.isWindow(a)&&a!==E&&!a.frameElement)a=E;if(d===false)d=U;var f,h;if(d.handler){f=d;d=f.handler}if(!d.guid)d.guid=c.guid++;if(h=c.data(a)){var k=a.nodeType?"events":"__events__",l=h[k],n=h.handle;if(typeof l===
+"function"){n=l.handle;l=l.events}else if(!l){a.nodeType||(h[k]=h=function(){});h.events=l={}}if(!n)h.handle=n=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(n.elem,arguments):A};n.elem=a;b=b.split(" ");for(var s=0,v;k=b[s++];){h=f?c.extend({},f):{handler:d,data:e};if(k.indexOf(".")>-1){v=k.split(".");k=v.shift();h.namespace=v.slice(0).sort().join(".")}else{v=[];h.namespace=""}h.type=k;if(!h.guid)h.guid=d.guid;var B=l[k],D=c.event.special[k]||{};if(!B){B=l[k]=[];
+if(!D.setup||D.setup.call(a,e,v,n)===false)if(a.addEventListener)a.addEventListener(k,n,false);else a.attachEvent&&a.attachEvent("on"+k,n)}if(D.add){D.add.call(a,h);if(!h.handler.guid)h.handler.guid=d.guid}B.push(h);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,e){if(!(a.nodeType===3||a.nodeType===8)){if(d===false)d=U;var f,h,k=0,l,n,s,v,B,D,H=a.nodeType?"events":"__events__",w=c.data(a),G=w&&w[H];if(w&&G){if(typeof G==="function"){w=G;G=G.events}if(b&&b.type){d=b.handler;b=b.type}if(!b||
+typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(f in G)c.event.remove(a,f+b)}else{for(b=b.split(" ");f=b[k++];){v=f;l=f.indexOf(".")<0;n=[];if(!l){n=f.split(".");f=n.shift();s=RegExp("(^|\\.)"+c.map(n.slice(0).sort(),Va).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(B=G[f])if(d){v=c.event.special[f]||{};for(h=e||0;h<B.length;h++){D=B[h];if(d.guid===D.guid){if(l||s.test(D.namespace)){e==null&&B.splice(h--,1);v.remove&&v.remove.call(a,D)}if(e!=null)break}}if(B.length===0||e!=null&&B.length===1){if(!v.teardown||
+v.teardown.call(a,n)===false)c.removeEvent(a,f,w.handle);delete G[f]}}else for(h=0;h<B.length;h++){D=B[h];if(l||s.test(D.namespace)){c.event.remove(a,v,D.handler,h);B.splice(h--,1)}}}if(c.isEmptyObject(G)){if(b=w.handle)b.elem=null;delete w.events;delete w.handle;if(typeof w==="function")c.removeData(a,H);else c.isEmptyObject(w)&&c.removeData(a)}}}}},trigger:function(a,b,d,e){var f=a.type||a;if(!e){a=typeof a==="object"?a[c.expando]?a:c.extend(c.Event(f),a):c.Event(f);if(f.indexOf("!")>=0){a.type=
+f=f.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[f]&&c.each(c.cache,function(){this.events&&this.events[f]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return A;a.result=A;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(e=d.nodeType?c.data(d,"handle"):(c.data(d,"__events__")||{}).handle)&&e.apply(d,b);e=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+f]&&d["on"+f].apply(d,b)===
+false){a.result=false;a.preventDefault()}}catch(h){}if(!a.isPropagationStopped()&&e)c.event.trigger(a,b,e,true);else if(!a.isDefaultPrevented()){e=a.target;var k,l=f.replace(X,""),n=c.nodeName(e,"a")&&l==="click",s=c.event.special[l]||{};if((!s._default||s._default.call(d,a)===false)&&!n&&!(e&&e.nodeName&&c.noData[e.nodeName.toLowerCase()])){try{if(e[l]){if(k=e["on"+l])e["on"+l]=null;c.event.triggered=true;e[l]()}}catch(v){}if(k)e["on"+l]=k;c.event.triggered=false}}},handle:function(a){var b,d,e;
+d=[];var f,h=c.makeArray(arguments);a=h[0]=c.event.fix(a||E.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;if(!b){e=a.type.split(".");a.type=e.shift();d=e.slice(0).sort();e=RegExp("(^|\\.)"+d.join("\\.(?:.*\\.)?")+"(\\.|$)")}a.namespace=a.namespace||d.join(".");f=c.data(this,this.nodeType?"events":"__events__");if(typeof f==="function")f=f.events;d=(f||{})[a.type];if(f&&d){d=d.slice(0);f=0;for(var k=d.length;f<k;f++){var l=d[f];if(b||e.test(l.namespace)){a.handler=l.handler;a.data=
+l.data;a.handleObj=l;l=l.handler.apply(this,h);if(l!==A){a.result=l;if(l===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[c.expando])return a;var b=a;a=c.Event(b);for(var d=this.props.length,e;d;){e=this.props[--d];a[e]=b[e]}if(!a.target)a.target=a.srcElement||u;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=u.documentElement;d=u.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(a.which==null&&(a.charCode!=null||a.keyCode!=null))a.which=a.charCode!=null?a.charCode:a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==A)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,Y(a.origType,a.selector),c.extend({},a,{handler:Ga,guid:a.handler.guid}))},remove:function(a){c.event.remove(this,
+Y(a.origType,a.selector),a)}},beforeunload:{setup:function(a,b,d){if(c.isWindow(this))this.onbeforeunload=d},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};c.removeEvent=u.removeEventListener?function(a,b,d){a.removeEventListener&&a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent&&a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=a;this.type=a.type}else this.type=a;this.timeStamp=
+c.now();this[c.expando]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=ba;var a=this.originalEvent;if(a)if(a.preventDefault)a.preventDefault();else a.returnValue=false},stopPropagation:function(){this.isPropagationStopped=ba;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=ba;this.stopPropagation()},isDefaultPrevented:U,isPropagationStopped:U,isImmediatePropagationStopped:U};
+var ta=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},ua=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?ua:ta,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?ua:ta)}}});if(!c.support.submitBubbles)c.event.special.submit={setup:function(){if(this.nodeName.toLowerCase()!==
+"form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length){a.liveFired=A;return ja("submit",this,arguments)}});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13){a.liveFired=A;return ja("submit",this,arguments)}})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};if(!c.support.changeBubbles){var V,
+va=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(e){return e.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},Z=function(a,b){var d=a.target,e,f;if(!(!ha.test(d.nodeName)||d.readOnly)){e=c.data(d,"_change_data");f=va(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",f);if(!(e===A||f===e))if(e!=null||f){a.type="change";a.liveFired=
+A;return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:Z,beforedeactivate:Z,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return Z.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return Z.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,"_change_data",va(a))}},setup:function(){if(this.type===
+"file")return false;for(var a in V)c.event.add(this,a+".specialChange",V[a]);return ha.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return ha.test(this.nodeName)}};V=c.event.special.change.filters;V.focus=V.beforeactivate}u.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(e){e=c.event.fix(e);e.type=b;return c.event.trigger(e,null,e.target)}c.event.special[b]={setup:function(){sa[b]++===0&&u.addEventListener(a,d,true)},teardown:function(){--sa[b]===
+0&&u.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,e,f){if(typeof d==="object"){for(var h in d)this[b](h,e,d[h],f);return this}if(c.isFunction(e)||e===false){f=e;e=A}var k=b==="one"?c.proxy(f,function(n){c(this).unbind(n,k);return f.apply(this,arguments)}):f;if(d==="unload"&&b!=="one")this.one(d,e,f);else{h=0;for(var l=this.length;h<l;h++)c.event.add(this[h],d,k,e)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&!a.preventDefault)for(var d in a)this.unbind(d,
+a[d]);else{d=0;for(var e=this.length;d<e;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,e){return this.live(b,d,e,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){var d=c.Event(a);d.preventDefault();d.stopPropagation();c.event.trigger(d,b,this[0]);return d.result}},toggle:function(a){for(var b=arguments,d=
+1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(e){var f=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,f+1);e.preventDefault();return b[f].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var wa={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,e,f,h){var k,l=0,n,s,v=h||this.selector;h=h?this:c(this.context);if(typeof d===
+"object"&&!d.preventDefault){for(k in d)h[b](k,e,d[k],v);return this}if(c.isFunction(e)){f=e;e=A}for(d=(d||"").split(" ");(k=d[l++])!=null;){n=X.exec(k);s="";if(n){s=n[0];k=k.replace(X,"")}if(k==="hover")d.push("mouseenter"+s,"mouseleave"+s);else{n=k;if(k==="focus"||k==="blur"){d.push(wa[k]+s);k+=s}else k=(wa[k]||k)+s;if(b==="live"){s=0;for(var B=h.length;s<B;s++)c.event.add(h[s],"live."+Y(k,v),{data:e,selector:v,handler:f,origType:k,origHandler:f,preType:n})}else h.unbind("live."+Y(k,v),f)}}return this}});
+c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),function(a,b){c.fn[b]=function(d,e){if(e==null){e=d;d=null}return arguments.length>0?this.bind(b,d,e):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});E.attachEvent&&!E.addEventListener&&c(E).bind("unload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});
+(function(){function a(g,j,o,m,p,q){p=0;for(var t=m.length;p<t;p++){var x=m[p];if(x){x=x[g];for(var C=false;x;){if(x.sizcache===o){C=m[x.sizset];break}if(x.nodeType===1&&!q){x.sizcache=o;x.sizset=p}if(x.nodeName.toLowerCase()===j){C=x;break}x=x[g]}m[p]=C}}}function b(g,j,o,m,p,q){p=0;for(var t=m.length;p<t;p++){var x=m[p];if(x){x=x[g];for(var C=false;x;){if(x.sizcache===o){C=m[x.sizset];break}if(x.nodeType===1){if(!q){x.sizcache=o;x.sizset=p}if(typeof j!=="string"){if(x===j){C=true;break}}else if(l.filter(j,
+[x]).length>0){C=x;break}}x=x[g]}m[p]=C}}}var d=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,e=0,f=Object.prototype.toString,h=false,k=true;[0,0].sort(function(){k=false;return 0});var l=function(g,j,o,m){o=o||[];var p=j=j||u;if(j.nodeType!==1&&j.nodeType!==9)return[];if(!g||typeof g!=="string")return o;var q=[],t,x,C,P,N=true,R=l.isXML(j),Q=g,L;do{d.exec("");if(t=d.exec(Q)){Q=t[3];q.push(t[1]);if(t[2]){P=t[3];
+break}}}while(t);if(q.length>1&&s.exec(g))if(q.length===2&&n.relative[q[0]])x=M(q[0]+q[1],j);else for(x=n.relative[q[0]]?[j]:l(q.shift(),j);q.length;){g=q.shift();if(n.relative[g])g+=q.shift();x=M(g,x)}else{if(!m&&q.length>1&&j.nodeType===9&&!R&&n.match.ID.test(q[0])&&!n.match.ID.test(q[q.length-1])){t=l.find(q.shift(),j,R);j=t.expr?l.filter(t.expr,t.set)[0]:t.set[0]}if(j){t=m?{expr:q.pop(),set:D(m)}:l.find(q.pop(),q.length===1&&(q[0]==="~"||q[0]==="+")&&j.parentNode?j.parentNode:j,R);x=t.expr?l.filter(t.expr,
+t.set):t.set;if(q.length>0)C=D(x);else N=false;for(;q.length;){t=L=q.pop();if(n.relative[L])t=q.pop();else L="";if(t==null)t=j;n.relative[L](C,t,R)}}else C=[]}C||(C=x);C||l.error(L||g);if(f.call(C)==="[object Array]")if(N)if(j&&j.nodeType===1)for(g=0;C[g]!=null;g++){if(C[g]&&(C[g]===true||C[g].nodeType===1&&l.contains(j,C[g])))o.push(x[g])}else for(g=0;C[g]!=null;g++)C[g]&&C[g].nodeType===1&&o.push(x[g]);else o.push.apply(o,C);else D(C,o);if(P){l(P,p,o,m);l.uniqueSort(o)}return o};l.uniqueSort=function(g){if(w){h=
+k;g.sort(w);if(h)for(var j=1;j<g.length;j++)g[j]===g[j-1]&&g.splice(j--,1)}return g};l.matches=function(g,j){return l(g,null,null,j)};l.matchesSelector=function(g,j){return l(j,null,null,[g]).length>0};l.find=function(g,j,o){var m;if(!g)return[];for(var p=0,q=n.order.length;p<q;p++){var t=n.order[p],x;if(x=n.leftMatch[t].exec(g)){var C=x[1];x.splice(1,1);if(C.substr(C.length-1)!=="\\"){x[1]=(x[1]||"").replace(/\\/g,"");m=n.find[t](x,j,o);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=j.getElementsByTagName("*"));
+return{set:m,expr:g}};l.filter=function(g,j,o,m){for(var p=g,q=[],t=j,x,C,P=j&&j[0]&&l.isXML(j[0]);g&&j.length;){for(var N in n.filter)if((x=n.leftMatch[N].exec(g))!=null&&x[2]){var R=n.filter[N],Q,L;L=x[1];C=false;x.splice(1,1);if(L.substr(L.length-1)!=="\\"){if(t===q)q=[];if(n.preFilter[N])if(x=n.preFilter[N](x,t,o,q,m,P)){if(x===true)continue}else C=Q=true;if(x)for(var i=0;(L=t[i])!=null;i++)if(L){Q=R(L,x,i,t);var r=m^!!Q;if(o&&Q!=null)if(r)C=true;else t[i]=false;else if(r){q.push(L);C=true}}if(Q!==
+A){o||(t=q);g=g.replace(n.match[N],"");if(!C)return[];break}}}if(g===p)if(C==null)l.error(g);else break;p=g}return t};l.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=l.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+\-]*)\))?/,
+POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},relative:{"+":function(g,j){var o=typeof j==="string",m=o&&!/\W/.test(j);o=o&&!m;if(m)j=j.toLowerCase();m=0;for(var p=g.length,q;m<p;m++)if(q=g[m]){for(;(q=q.previousSibling)&&q.nodeType!==1;);g[m]=o||q&&q.nodeName.toLowerCase()===
+j?q||false:q===j}o&&l.filter(j,g,true)},">":function(g,j){var o=typeof j==="string",m,p=0,q=g.length;if(o&&!/\W/.test(j))for(j=j.toLowerCase();p<q;p++){if(m=g[p]){o=m.parentNode;g[p]=o.nodeName.toLowerCase()===j?o:false}}else{for(;p<q;p++)if(m=g[p])g[p]=o?m.parentNode:m.parentNode===j;o&&l.filter(j,g,true)}},"":function(g,j,o){var m=e++,p=b,q;if(typeof j==="string"&&!/\W/.test(j)){q=j=j.toLowerCase();p=a}p("parentNode",j,m,g,q,o)},"~":function(g,j,o){var m=e++,p=b,q;if(typeof j==="string"&&!/\W/.test(j)){q=
+j=j.toLowerCase();p=a}p("previousSibling",j,m,g,q,o)}},find:{ID:function(g,j,o){if(typeof j.getElementById!=="undefined"&&!o)return(g=j.getElementById(g[1]))&&g.parentNode?[g]:[]},NAME:function(g,j){if(typeof j.getElementsByName!=="undefined"){for(var o=[],m=j.getElementsByName(g[1]),p=0,q=m.length;p<q;p++)m[p].getAttribute("name")===g[1]&&o.push(m[p]);return o.length===0?null:o}},TAG:function(g,j){return j.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,j,o,m,p,q){g=" "+g[1].replace(/\\/g,
+"")+" ";if(q)return g;q=0;for(var t;(t=j[q])!=null;q++)if(t)if(p^(t.className&&(" "+t.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))o||m.push(t);else if(o)j[q]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},CHILD:function(g){if(g[1]==="nth"){var j=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=j[1]+(j[2]||1)-0;g[3]=j[3]-0}g[0]=e++;return g},ATTR:function(g,j,o,
+m,p,q){j=g[1].replace(/\\/g,"");if(!q&&n.attrMap[j])g[1]=n.attrMap[j];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,j,o,m,p){if(g[1]==="not")if((d.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=l(g[3],null,null,j);else{g=l.filter(g[3],j,o,true^p);o||m.push.apply(m,g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===
+true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,j,o){return!!l(o[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===
+g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},setFilters:{first:function(g,j){return j===0},last:function(g,j,o,m){return j===m.length-1},even:function(g,j){return j%2===0},odd:function(g,j){return j%2===1},lt:function(g,j,o){return j<o[3]-0},gt:function(g,j,o){return j>o[3]-0},nth:function(g,j,o){return o[3]-
+0===j},eq:function(g,j,o){return o[3]-0===j}},filter:{PSEUDO:function(g,j,o,m){var p=j[1],q=n.filters[p];if(q)return q(g,o,j,m);else if(p==="contains")return(g.textContent||g.innerText||l.getText([g])||"").indexOf(j[3])>=0;else if(p==="not"){j=j[3];o=0;for(m=j.length;o<m;o++)if(j[o]===g)return false;return true}else l.error("Syntax error, unrecognized expression: "+p)},CHILD:function(g,j){var o=j[1],m=g;switch(o){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(o===
+"first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":o=j[2];var p=j[3];if(o===1&&p===0)return true;var q=j[0],t=g.parentNode;if(t&&(t.sizcache!==q||!g.nodeIndex)){var x=0;for(m=t.firstChild;m;m=m.nextSibling)if(m.nodeType===1)m.nodeIndex=++x;t.sizcache=q}m=g.nodeIndex-p;return o===0?m===0:m%o===0&&m/o>=0}},ID:function(g,j){return g.nodeType===1&&g.getAttribute("id")===j},TAG:function(g,j){return j==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===
+j},CLASS:function(g,j){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(j)>-1},ATTR:function(g,j){var o=j[1];o=n.attrHandle[o]?n.attrHandle[o](g):g[o]!=null?g[o]:g.getAttribute(o);var m=o+"",p=j[2],q=j[4];return o==null?p==="!=":p==="="?m===q:p==="*="?m.indexOf(q)>=0:p==="~="?(" "+m+" ").indexOf(q)>=0:!q?m&&o!==false:p==="!="?m!==q:p==="^="?m.indexOf(q)===0:p==="$="?m.substr(m.length-q.length)===q:p==="|="?m===q||m.substr(0,q.length+1)===q+"-":false},POS:function(g,j,o,m){var p=n.setFilters[j[2]];
+if(p)return p(g,o,j,m)}}},s=n.match.POS,v=function(g,j){return"\\"+(j-0+1)},B;for(B in n.match){n.match[B]=RegExp(n.match[B].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[B]=RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[B].source.replace(/\\(\d+)/g,v))}var D=function(g,j){g=Array.prototype.slice.call(g,0);if(j){j.push.apply(j,g);return j}return g};try{Array.prototype.slice.call(u.documentElement.childNodes,0)}catch(H){D=function(g,j){var o=j||[],m=0;if(f.call(g)==="[object Array]")Array.prototype.push.apply(o,
+g);else if(typeof g.length==="number")for(var p=g.length;m<p;m++)o.push(g[m]);else for(;g[m];m++)o.push(g[m]);return o}}var w,G;if(u.documentElement.compareDocumentPosition)w=function(g,j){if(g===j){h=true;return 0}if(!g.compareDocumentPosition||!j.compareDocumentPosition)return g.compareDocumentPosition?-1:1;return g.compareDocumentPosition(j)&4?-1:1};else{w=function(g,j){var o=[],m=[],p=g.parentNode,q=j.parentNode,t=p;if(g===j){h=true;return 0}else if(p===q)return G(g,j);else if(p){if(!q)return 1}else return-1;
+for(;t;){o.unshift(t);t=t.parentNode}for(t=q;t;){m.unshift(t);t=t.parentNode}p=o.length;q=m.length;for(t=0;t<p&&t<q;t++)if(o[t]!==m[t])return G(o[t],m[t]);return t===p?G(g,m[t],-1):G(o[t],j,1)};G=function(g,j,o){if(g===j)return o;for(g=g.nextSibling;g;){if(g===j)return-1;g=g.nextSibling}return 1}}l.getText=function(g){for(var j="",o,m=0;g[m];m++){o=g[m];if(o.nodeType===3||o.nodeType===4)j+=o.nodeValue;else if(o.nodeType!==8)j+=l.getText(o.childNodes)}return j};(function(){var g=u.createElement("div"),
+j="script"+(new Date).getTime();g.innerHTML="<a name='"+j+"'/>";var o=u.documentElement;o.insertBefore(g,o.firstChild);if(u.getElementById(j)){n.find.ID=function(m,p,q){if(typeof p.getElementById!=="undefined"&&!q)return(p=p.getElementById(m[1]))?p.id===m[1]||typeof p.getAttributeNode!=="undefined"&&p.getAttributeNode("id").nodeValue===m[1]?[p]:A:[]};n.filter.ID=function(m,p){var q=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&q&&q.nodeValue===p}}o.removeChild(g);
+o=g=null})();(function(){var g=u.createElement("div");g.appendChild(u.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(j,o){var m=o.getElementsByTagName(j[1]);if(j[1]==="*"){for(var p=[],q=0;m[q];q++)m[q].nodeType===1&&p.push(m[q]);m=p}return m};g.innerHTML="<a href='#'></a>";if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(j){return j.getAttribute("href",2)};g=null})();u.querySelectorAll&&
+function(){var g=l,j=u.createElement("div");j.innerHTML="<p class='TEST'></p>";if(!(j.querySelectorAll&&j.querySelectorAll(".TEST").length===0)){l=function(m,p,q,t){p=p||u;if(!t&&!l.isXML(p))if(p.nodeType===9)try{return D(p.querySelectorAll(m),q)}catch(x){}else if(p.nodeType===1&&p.nodeName.toLowerCase()!=="object"){var C=p.id,P=p.id="__sizzle__";try{return D(p.querySelectorAll("#"+P+" "+m),q)}catch(N){}finally{if(C)p.id=C;else p.removeAttribute("id")}}return g(m,p,q,t)};for(var o in g)l[o]=g[o];
+j=null}}();(function(){var g=u.documentElement,j=g.matchesSelector||g.mozMatchesSelector||g.webkitMatchesSelector||g.msMatchesSelector,o=false;try{j.call(u.documentElement,":sizzle")}catch(m){o=true}if(j)l.matchesSelector=function(p,q){try{if(o||!n.match.PSEUDO.test(q))return j.call(p,q)}catch(t){}return l(q,null,null,[p]).length>0}})();(function(){var g=u.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===
+0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(j,o,m){if(typeof o.getElementsByClassName!=="undefined"&&!m)return o.getElementsByClassName(j[1])};g=null}}})();l.contains=u.documentElement.contains?function(g,j){return g!==j&&(g.contains?g.contains(j):true)}:function(g,j){return!!(g.compareDocumentPosition(j)&16)};l.isXML=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false};var M=function(g,
+j){for(var o=[],m="",p,q=j.nodeType?[j]:j;p=n.match.PSEUDO.exec(g);){m+=p[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;p=0;for(var t=q.length;p<t;p++)l(g,q[p],o);return l.filter(m,o)};c.find=l;c.expr=l.selectors;c.expr[":"]=c.expr.filters;c.unique=l.uniqueSort;c.text=l.getText;c.isXMLDoc=l.isXML;c.contains=l.contains})();var Wa=/Until$/,Xa=/^(?:parents|prevUntil|prevAll)/,Ya=/,/,Ja=/^.[^:#\[\.,]*$/,Za=Array.prototype.slice,$a=c.expr.match.POS;c.fn.extend({find:function(a){for(var b=this.pushStack("",
+"find",a),d=0,e=0,f=this.length;e<f;e++){d=b.length;c.find(a,this[e],b);if(e>0)for(var h=d;h<b.length;h++)for(var k=0;k<d;k++)if(b[k]===b[h]){b.splice(h--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,e=b.length;d<e;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(ka(this,a,false),"not",a)},filter:function(a){return this.pushStack(ka(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,
+b){var d=[],e,f,h=this[0];if(c.isArray(a)){var k={},l,n=1;if(h&&a.length){e=0;for(f=a.length;e<f;e++){l=a[e];k[l]||(k[l]=c.expr.match.POS.test(l)?c(l,b||this.context):l)}for(;h&&h.ownerDocument&&h!==b;){for(l in k){e=k[l];if(e.jquery?e.index(h)>-1:c(h).is(e))d.push({selector:l,elem:h,level:n})}h=h.parentNode;n++}}return d}k=$a.test(a)?c(a,b||this.context):null;e=0;for(f=this.length;e<f;e++)for(h=this[e];h;)if(k?k.index(h)>-1:c.find.matchesSelector(h,a)){d.push(h);break}else{h=h.parentNode;if(!h||
+!h.ownerDocument||h===b)break}d=d.length>1?c.unique(d):d;return this.pushStack(d,"closest",a)},index:function(a){if(!a||typeof a==="string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var d=typeof a==="string"?c(a,b||this.context):c.makeArray(a),e=c.merge(this.get(),d);return this.pushStack(!d[0]||!d[0].parentNode||d[0].parentNode.nodeType===11||!e[0]||!e[0].parentNode||e[0].parentNode.nodeType===11?e:c.unique(e))},andSelf:function(){return this.add(this.prevObject)}});
+c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",
+d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,e){var f=c.map(this,b,d);Wa.test(a)||(e=d);if(e&&typeof e==="string")f=c.filter(e,f);f=this.length>1?c.unique(f):f;if((this.length>1||Ya.test(e))&&Xa.test(a))f=f.reverse();return this.pushStack(f,a,Za.call(arguments).join(","))}});
+c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return b.length===1?c.find.matchesSelector(b[0],a)?[b[0]]:[]:c.find.matches(a,b)},dir:function(a,b,d){var e=[];for(a=a[b];a&&a.nodeType!==9&&(d===A||a.nodeType!==1||!c(a).is(d));){a.nodeType===1&&e.push(a);a=a[b]}return e},nth:function(a,b,d){b=b||1;for(var e=0;a;a=a[d])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var xa=/ jQuery\d+="(?:\d+|null)"/g,
+$=/^\s+/,ya=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,za=/<([\w:]+)/,ab=/<tbody/i,bb=/<|&#?\w+;/,Aa=/<(?:script|object|embed|option|style)/i,Ba=/checked\s*(?:[^=]|=\s*.checked.)/i,cb=/\=([^="'>\s]+\/)>/g,O={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],
+area:[1,"<map>","</map>"],_default:[0,"",""]};O.optgroup=O.option;O.tbody=O.tfoot=O.colgroup=O.caption=O.thead;O.th=O.td;if(!c.support.htmlSerialize)O._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==A)return this.empty().append((this[0]&&this[0].ownerDocument||u).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,
+d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},
+unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=
+c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,e;(e=this[d])!=null;d++)if(!a||c.filter(a,[e]).length){if(!b&&e.nodeType===1){c.cleanData(e.getElementsByTagName("*"));
+c.cleanData([e])}e.parentNode&&e.parentNode.removeChild(e)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,e=this.ownerDocument;if(!d){d=e.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(xa,"").replace(cb,'="$1">').replace($,
+"")],e)[0]}else return this.cloneNode(true)});if(a===true){la(this,b);la(this.find("*"),b.find("*"))}return b},html:function(a){if(a===A)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(xa,""):null;else if(typeof a==="string"&&!Aa.test(a)&&(c.support.leadingWhitespace||!$.test(a))&&!O[(za.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(ya,"<$1></$2>");try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(e){this.empty().append(a)}}else c.isFunction(a)?
+this.each(function(f){var h=c(this);h.html(a.call(this,f,h.html()))}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),e=d.html();d.replaceWith(a.call(this,b,e))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,
+true)},domManip:function(a,b,d){var e,f,h=a[0],k=[],l;if(!c.support.checkClone&&arguments.length===3&&typeof h==="string"&&Ba.test(h))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(h))return this.each(function(s){var v=c(this);a[0]=h.call(this,s,b?v.html():A);v.domManip(a,b,d)});if(this[0]){e=h&&h.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:c.buildFragment(a,this,k);l=e.fragment;if(f=l.childNodes.length===1?l=l.firstChild:
+l.firstChild){b=b&&c.nodeName(f,"tr");f=0;for(var n=this.length;f<n;f++)d.call(b?c.nodeName(this[f],"table")?this[f].getElementsByTagName("tbody")[0]||this[f].appendChild(this[f].ownerDocument.createElement("tbody")):this[f]:this[f],f>0||e.cacheable||this.length>1?l.cloneNode(true):l)}k.length&&c.each(k,Ka)}return this}});c.buildFragment=function(a,b,d){var e,f,h;b=b&&b[0]?b[0].ownerDocument||b[0]:u;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===u&&!Aa.test(a[0])&&(c.support.checkClone||
+!Ba.test(a[0]))){f=true;if(h=c.fragments[a[0]])if(h!==1)e=h}if(!e){e=b.createDocumentFragment();c.clean(a,b,e,d)}if(f)c.fragments[a[0]]=h?e:1;return{fragment:e,cacheable:f}};c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var e=[];d=c(d);var f=this.length===1&&this[0].parentNode;if(f&&f.nodeType===11&&f.childNodes.length===1&&d.length===1){d[b](this[0]);return this}else{f=0;for(var h=
+d.length;f<h;f++){var k=(f>0?this.clone(true):this).get();c(d[f])[b](k);e=e.concat(k)}return this.pushStack(e,a,d.selector)}}});c.extend({clean:function(a,b,d,e){b=b||u;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||u;for(var f=[],h=0,k;(k=a[h])!=null;h++){if(typeof k==="number")k+="";if(k){if(typeof k==="string"&&!bb.test(k))k=b.createTextNode(k);else if(typeof k==="string"){k=k.replace(ya,"<$1></$2>");var l=(za.exec(k)||["",""])[1].toLowerCase(),n=O[l]||O._default,
+s=n[0],v=b.createElement("div");for(v.innerHTML=n[1]+k+n[2];s--;)v=v.lastChild;if(!c.support.tbody){s=ab.test(k);l=l==="table"&&!s?v.firstChild&&v.firstChild.childNodes:n[1]==="<table>"&&!s?v.childNodes:[];for(n=l.length-1;n>=0;--n)c.nodeName(l[n],"tbody")&&!l[n].childNodes.length&&l[n].parentNode.removeChild(l[n])}!c.support.leadingWhitespace&&$.test(k)&&v.insertBefore(b.createTextNode($.exec(k)[0]),v.firstChild);k=v.childNodes}if(k.nodeType)f.push(k);else f=c.merge(f,k)}}if(d)for(h=0;f[h];h++)if(e&&
+c.nodeName(f[h],"script")&&(!f[h].type||f[h].type.toLowerCase()==="text/javascript"))e.push(f[h].parentNode?f[h].parentNode.removeChild(f[h]):f[h]);else{f[h].nodeType===1&&f.splice.apply(f,[h+1,0].concat(c.makeArray(f[h].getElementsByTagName("script"))));d.appendChild(f[h])}return f},cleanData:function(a){for(var b,d,e=c.cache,f=c.event.special,h=c.support.deleteExpando,k=0,l;(l=a[k])!=null;k++)if(!(l.nodeName&&c.noData[l.nodeName.toLowerCase()]))if(d=l[c.expando]){if((b=e[d])&&b.events)for(var n in b.events)f[n]?
+c.event.remove(l,n):c.removeEvent(l,n,b.handle);if(h)delete l[c.expando];else l.removeAttribute&&l.removeAttribute(c.expando);delete e[d]}}});var Ca=/alpha\([^)]*\)/i,db=/opacity=([^)]*)/,eb=/-([a-z])/ig,fb=/([A-Z])/g,Da=/^-?\d+(?:px)?$/i,gb=/^-?\d/,hb={position:"absolute",visibility:"hidden",display:"block"},La=["Left","Right"],Ma=["Top","Bottom"],W,ib=u.defaultView&&u.defaultView.getComputedStyle,jb=function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){if(arguments.length===2&&b===A)return this;
+return c.access(this,a,b,true,function(d,e,f){return f!==A?c.style(d,e,f):c.css(d,e)})};c.extend({cssHooks:{opacity:{get:function(a,b){if(b){var d=W(a,"opacity","opacity");return d===""?"1":d}else return a.style.opacity}}},cssNumber:{zIndex:true,fontWeight:true,opacity:true,zoom:true,lineHeight:true},cssProps:{"float":c.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,b,d,e){if(!(!a||a.nodeType===3||a.nodeType===8||!a.style)){var f,h=c.camelCase(b),k=a.style,l=c.cssHooks[h];b=c.cssProps[h]||
+h;if(d!==A){if(!(typeof d==="number"&&isNaN(d)||d==null)){if(typeof d==="number"&&!c.cssNumber[h])d+="px";if(!l||!("set"in l)||(d=l.set(a,d))!==A)try{k[b]=d}catch(n){}}}else{if(l&&"get"in l&&(f=l.get(a,false,e))!==A)return f;return k[b]}}},css:function(a,b,d){var e,f=c.camelCase(b),h=c.cssHooks[f];b=c.cssProps[f]||f;if(h&&"get"in h&&(e=h.get(a,true,d))!==A)return e;else if(W)return W(a,b,f)},swap:function(a,b,d){var e={},f;for(f in b){e[f]=a.style[f];a.style[f]=b[f]}d.call(a);for(f in b)a.style[f]=
+e[f]},camelCase:function(a){return a.replace(eb,jb)}});c.curCSS=c.css;c.each(["height","width"],function(a,b){c.cssHooks[b]={get:function(d,e,f){var h;if(e){if(d.offsetWidth!==0)h=ma(d,b,f);else c.swap(d,hb,function(){h=ma(d,b,f)});return h+"px"}},set:function(d,e){if(Da.test(e)){e=parseFloat(e);if(e>=0)return e+"px"}else return e}}});if(!c.support.opacity)c.cssHooks.opacity={get:function(a,b){return db.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":
+b?"1":""},set:function(a,b){var d=a.style;d.zoom=1;var e=c.isNaN(b)?"":"alpha(opacity="+b*100+")",f=d.filter||"";d.filter=Ca.test(f)?f.replace(Ca,e):d.filter+" "+e}};if(ib)W=function(a,b,d){var e;d=d.replace(fb,"-$1").toLowerCase();if(!(b=a.ownerDocument.defaultView))return A;if(b=b.getComputedStyle(a,null)){e=b.getPropertyValue(d);if(e===""&&!c.contains(a.ownerDocument.documentElement,a))e=c.style(a,d)}return e};else if(u.documentElement.currentStyle)W=function(a,b){var d,e,f=a.currentStyle&&a.currentStyle[b],
+h=a.style;if(!Da.test(f)&&gb.test(f)){d=h.left;e=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;h.left=b==="fontSize"?"1em":f||0;f=h.pixelLeft+"px";h.left=d;a.runtimeStyle.left=e}return f};if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=a.offsetHeight;return a.offsetWidth===0&&b===0||!c.support.reliableHiddenOffsets&&(a.style.display||c.css(a,"display"))==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var kb=c.now(),lb=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,
+mb=/^(?:select|textarea)/i,nb=/^(?:color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,ob=/^(?:GET|HEAD|DELETE)$/,Na=/\[\]$/,T=/\=\?(&|$)/,ia=/\?/,pb=/([?&])_=[^&]*/,qb=/^(\w+:)?\/\/([^\/?#]+)/,rb=/%20/g,sb=/#.*$/,Ea=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!=="string"&&Ea)return Ea.apply(this,arguments);else if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var f=a.slice(e,a.length);a=a.slice(0,e)}e="GET";if(b)if(c.isFunction(b)){d=
+b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);e="POST"}var h=this;c.ajax({url:a,type:e,dataType:"html",data:b,complete:function(k,l){if(l==="success"||l==="notmodified")h.html(f?c("<div>").append(k.responseText.replace(lb,"")).find(f):k.responseText);d&&h.each(d,[k.responseText,l,k])}});return this},serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&
+!this.disabled&&(this.checked||mb.test(this.nodeName)||nb.test(this.type))}).map(function(a,b){var d=c(this).val();return d==null?null:c.isArray(d)?c.map(d,function(e){return{name:b.name,value:e}}):{name:b.name,value:d}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:e})},
+getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,e){if(c.isFunction(b)){e=e||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:e})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return new E.XMLHttpRequest},accepts:{xml:"application/xml, text/xml",html:"text/html",
+script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},ajax:function(a){var b=c.extend(true,{},c.ajaxSettings,a),d,e,f,h=b.type.toUpperCase(),k=ob.test(h);b.url=b.url.replace(sb,"");b.context=a&&a.context!=null?a.context:b;if(b.data&&b.processData&&typeof b.data!=="string")b.data=c.param(b.data,b.traditional);if(b.dataType==="jsonp"){if(h==="GET")T.test(b.url)||(b.url+=(ia.test(b.url)?"&":"?")+(b.jsonp||"callback")+"=?");else if(!b.data||
+!T.test(b.data))b.data=(b.data?b.data+"&":"")+(b.jsonp||"callback")+"=?";b.dataType="json"}if(b.dataType==="json"&&(b.data&&T.test(b.data)||T.test(b.url))){d=b.jsonpCallback||"jsonp"+kb++;if(b.data)b.data=(b.data+"").replace(T,"="+d+"$1");b.url=b.url.replace(T,"="+d+"$1");b.dataType="script";var l=E[d];E[d]=function(m){f=m;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);if(c.isFunction(l))l(m);else{E[d]=A;try{delete E[d]}catch(p){}}v&&v.removeChild(B)}}if(b.dataType==="script"&&b.cache===null)b.cache=
+false;if(b.cache===false&&h==="GET"){var n=c.now(),s=b.url.replace(pb,"$1_="+n);b.url=s+(s===b.url?(ia.test(b.url)?"&":"?")+"_="+n:"")}if(b.data&&h==="GET")b.url+=(ia.test(b.url)?"&":"?")+b.data;b.global&&c.active++===0&&c.event.trigger("ajaxStart");n=(n=qb.exec(b.url))&&(n[1]&&n[1]!==location.protocol||n[2]!==location.host);if(b.dataType==="script"&&h==="GET"&&n){var v=u.getElementsByTagName("head")[0]||u.documentElement,B=u.createElement("script");if(b.scriptCharset)B.charset=b.scriptCharset;B.src=
+b.url;if(!d){var D=false;B.onload=B.onreadystatechange=function(){if(!D&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){D=true;c.handleSuccess(b,w,e,f);c.handleComplete(b,w,e,f);B.onload=B.onreadystatechange=null;v&&B.parentNode&&v.removeChild(B)}}}v.insertBefore(B,v.firstChild);return A}var H=false,w=b.xhr();if(w){b.username?w.open(h,b.url,b.async,b.username,b.password):w.open(h,b.url,b.async);try{if(b.data!=null&&!k||a&&a.contentType)w.setRequestHeader("Content-Type",
+b.contentType);if(b.ifModified){c.lastModified[b.url]&&w.setRequestHeader("If-Modified-Since",c.lastModified[b.url]);c.etag[b.url]&&w.setRequestHeader("If-None-Match",c.etag[b.url])}n||w.setRequestHeader("X-Requested-With","XMLHttpRequest");w.setRequestHeader("Accept",b.dataType&&b.accepts[b.dataType]?b.accepts[b.dataType]+", */*; q=0.01":b.accepts._default)}catch(G){}if(b.beforeSend&&b.beforeSend.call(b.context,w,b)===false){b.global&&c.active--===1&&c.event.trigger("ajaxStop");w.abort();return false}b.global&&
+c.triggerGlobal(b,"ajaxSend",[w,b]);var M=w.onreadystatechange=function(m){if(!w||w.readyState===0||m==="abort"){H||c.handleComplete(b,w,e,f);H=true;if(w)w.onreadystatechange=c.noop}else if(!H&&w&&(w.readyState===4||m==="timeout")){H=true;w.onreadystatechange=c.noop;e=m==="timeout"?"timeout":!c.httpSuccess(w)?"error":b.ifModified&&c.httpNotModified(w,b.url)?"notmodified":"success";var p;if(e==="success")try{f=c.httpData(w,b.dataType,b)}catch(q){e="parsererror";p=q}if(e==="success"||e==="notmodified")d||
+c.handleSuccess(b,w,e,f);else c.handleError(b,w,e,p);d||c.handleComplete(b,w,e,f);m==="timeout"&&w.abort();if(b.async)w=null}};try{var g=w.abort;w.abort=function(){w&&g.call&&g.call(w);M("abort")}}catch(j){}b.async&&b.timeout>0&&setTimeout(function(){w&&!H&&M("timeout")},b.timeout);try{w.send(k||b.data==null?null:b.data)}catch(o){c.handleError(b,w,null,o);c.handleComplete(b,w,e,f)}b.async||M();return w}},param:function(a,b){var d=[],e=function(h,k){k=c.isFunction(k)?k():k;d[d.length]=encodeURIComponent(h)+
+"="+encodeURIComponent(k)};if(b===A)b=c.ajaxSettings.traditional;if(c.isArray(a)||a.jquery)c.each(a,function(){e(this.name,this.value)});else for(var f in a)ca(f,a[f],b,e);return d.join("&").replace(rb,"+")}});c.extend({active:0,lastModified:{},etag:{},handleError:function(a,b,d,e){a.error&&a.error.call(a.context,b,d,e);a.global&&c.triggerGlobal(a,"ajaxError",[b,a,e])},handleSuccess:function(a,b,d,e){a.success&&a.success.call(a.context,e,d,b);a.global&&c.triggerGlobal(a,"ajaxSuccess",[b,a])},handleComplete:function(a,
+b,d){a.complete&&a.complete.call(a.context,b,d);a.global&&c.triggerGlobal(a,"ajaxComplete",[b,a]);a.global&&c.active--===1&&c.event.trigger("ajaxStop")},triggerGlobal:function(a,b,d){(a.context&&a.context.url==null?c(a.context):c.event).trigger(b,d)},httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===1223}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),e=a.getResponseHeader("Etag");
+if(d)c.lastModified[b]=d;if(e)c.etag[b]=e;return a.status===304},httpData:function(a,b,d){var e=a.getResponseHeader("content-type")||"",f=b==="xml"||!b&&e.indexOf("xml")>=0;a=f?a.responseXML:a.responseText;f&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b==="json"||!b&&e.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&e.indexOf("javascript")>=0)c.globalEval(a);return a}});if(E.ActiveXObject)c.ajaxSettings.xhr=
+function(){if(E.location.protocol!=="file:")try{return new E.XMLHttpRequest}catch(a){}try{return new E.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}};c.support.ajax=!!c.ajaxSettings.xhr();var da={},tb=/^(?:toggle|show|hide)$/,ub=/^([+\-]=)?([\d+.\-]+)(.*)$/,aa,na=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b,d){if(a||a===0)return this.animate(S("show",3),a,b,d);else{a=
+0;for(b=this.length;a<b;a++){if(!c.data(this[a],"olddisplay")&&this[a].style.display==="none")this[a].style.display="";this[a].style.display===""&&c.css(this[a],"display")==="none"&&c.data(this[a],"olddisplay",oa(this[a].nodeName))}for(a=0;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b,d){if(a||a===0)return this.animate(S("hide",3),a,b,d);else{a=0;for(b=this.length;a<b;a++){d=c.css(this[a],"display");d!=="none"&&c.data(this[a],"olddisplay",d)}for(a=
+0;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b,d){var e=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||e?this.each(function(){var f=e?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(S("toggle",3),a,b,d);return this},fadeTo:function(a,b,d,e){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d,e)},animate:function(a,b,d,e){var f=c.speed(b,d,e);if(c.isEmptyObject(a))return this.each(f.complete);
+return this[f.queue===false?"each":"queue"](function(){var h=c.extend({},f),k,l=this.nodeType===1,n=l&&c(this).is(":hidden"),s=this;for(k in a){var v=c.camelCase(k);if(k!==v){a[v]=a[k];delete a[k];k=v}if(a[k]==="hide"&&n||a[k]==="show"&&!n)return h.complete.call(this);if(l&&(k==="height"||k==="width")){h.overflow=[this.style.overflow,this.style.overflowX,this.style.overflowY];if(c.css(this,"display")==="inline"&&c.css(this,"float")==="none")if(c.support.inlineBlockNeedsLayout)if(oa(this.nodeName)===
+"inline")this.style.display="inline-block";else{this.style.display="inline";this.style.zoom=1}else this.style.display="inline-block"}if(c.isArray(a[k])){(h.specialEasing=h.specialEasing||{})[k]=a[k][1];a[k]=a[k][0]}}if(h.overflow!=null)this.style.overflow="hidden";h.curAnim=c.extend({},a);c.each(a,function(B,D){var H=new c.fx(s,h,B);if(tb.test(D))H[D==="toggle"?n?"show":"hide":D](a);else{var w=ub.exec(D),G=H.cur(true)||0;if(w){var M=parseFloat(w[2]),g=w[3]||"px";if(g!=="px"){c.style(s,B,(M||1)+g);
+G=(M||1)/H.cur(true)*G;c.style(s,B,G+g)}if(w[1])M=(w[1]==="-="?-1:1)*M+G;H.custom(G,M,g)}else H.custom(G,D,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);this.each(function(){for(var e=d.length-1;e>=0;e--)if(d[e].elem===this){b&&d[e](true);d.splice(e,1)}});b||this.dequeue();return this}});c.each({slideDown:S("show",1),slideUp:S("hide",1),slideToggle:S("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,e,f){return this.animate(b,
+d,e,f)}});c.extend({speed:function(a,b,d){var e=a&&typeof a==="object"?c.extend({},a):{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};e.duration=c.fx.off?0:typeof e.duration==="number"?e.duration:e.duration in c.fx.speeds?c.fx.speeds[e.duration]:c.fx.speeds._default;e.old=e.complete;e.complete=function(){e.queue!==false&&c(this).dequeue();c.isFunction(e.old)&&e.old.call(this)};return e},easing:{linear:function(a,b,d,e){return d+e*a},swing:function(a,b,d,e){return(-Math.cos(a*
+Math.PI)/2+0.5)*e+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||c.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a=parseFloat(c.css(this.elem,this.prop));return a&&a>-1E4?a:0},custom:function(a,b,d){function e(h){return f.step(h)}
+this.startTime=c.now();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;this.pos=this.state=0;var f=this;a=c.fx;e.elem=this.elem;if(e()&&c.timers.push(e)&&!aa)aa=setInterval(a.tick,a.interval)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;
+this.custom(this.cur(),0)},step:function(a){var b=c.now(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var e in this.options.curAnim)if(this.options.curAnim[e]!==true)d=false;if(d){if(this.options.overflow!=null&&!c.support.shrinkWrapBlocks){var f=this.elem,h=this.options;c.each(["","X","Y"],function(l,n){f.style["overflow"+n]=h.overflow[l]})}this.options.hide&&c(this.elem).hide();if(this.options.hide||
+this.options.show)for(var k in this.options.curAnim)c.style(this.elem,k,this.options.orig[k]);this.options.complete.call(this.elem)}return false}else{a=b-this.startTime;this.state=a/this.options.duration;b=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||b](this.state,a,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=
+c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||c.fx.stop()},interval:13,stop:function(){clearInterval(aa);aa=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===
+b.elem}).length};var vb=/^t(?:able|d|h)$/i,Fa=/^(?:body|html)$/i;c.fn.offset="getBoundingClientRect"in u.documentElement?function(a){var b=this[0],d;if(a)return this.each(function(k){c.offset.setOffset(this,a,k)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);try{d=b.getBoundingClientRect()}catch(e){}var f=b.ownerDocument,h=f.documentElement;if(!d||!c.contains(h,b))return d||{top:0,left:0};b=f.body;f=ea(f);return{top:d.top+(f.pageYOffset||c.support.boxModel&&
+h.scrollTop||b.scrollTop)-(h.clientTop||b.clientTop||0),left:d.left+(f.pageXOffset||c.support.boxModel&&h.scrollLeft||b.scrollLeft)-(h.clientLeft||b.clientLeft||0)}}:function(a){var b=this[0];if(a)return this.each(function(s){c.offset.setOffset(this,a,s)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,e=b.ownerDocument,f,h=e.documentElement,k=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;
+for(var l=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==k&&b!==h;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;f=e?e.getComputedStyle(b,null):b.currentStyle;l-=b.scrollTop;n-=b.scrollLeft;if(b===d){l+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&vb.test(b.nodeName))){l+=parseFloat(f.borderTopWidth)||0;n+=parseFloat(f.borderLeftWidth)||0}d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&f.overflow!=="visible"){l+=
+parseFloat(f.borderTopWidth)||0;n+=parseFloat(f.borderLeftWidth)||0}f=f}if(f.position==="relative"||f.position==="static"){l+=k.offsetTop;n+=k.offsetLeft}if(c.offset.supportsFixedPosition&&f.position==="fixed"){l+=Math.max(h.scrollTop,k.scrollTop);n+=Math.max(h.scrollLeft,k.scrollLeft)}return{top:l,left:n}};c.offset={initialize:function(){var a=u.body,b=u.createElement("div"),d,e,f,h=parseFloat(c.css(a,"marginTop"))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",
+height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";a.insertBefore(b,a.firstChild);d=b.firstChild;e=d.firstChild;f=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=e.offsetTop!==5;this.doesAddBorderForTableAndCells=
+f.offsetTop===5;e.style.position="fixed";e.style.top="20px";this.supportsFixedPosition=e.offsetTop===20||e.offsetTop===15;e.style.position=e.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=e.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==h;a.removeChild(b);c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.css(a,
+"marginTop"))||0;d+=parseFloat(c.css(a,"marginLeft"))||0}return{top:b,left:d}},setOffset:function(a,b,d){var e=c.css(a,"position");if(e==="static")a.style.position="relative";var f=c(a),h=f.offset(),k=c.css(a,"top"),l=c.css(a,"left"),n=e==="absolute"&&c.inArray("auto",[k,l])>-1;e={};var s={};if(n)s=f.position();k=n?s.top:parseInt(k,10)||0;l=n?s.left:parseInt(l,10)||0;if(c.isFunction(b))b=b.call(a,d,h);if(b.top!=null)e.top=b.top-h.top+k;if(b.left!=null)e.left=b.left-h.left+l;"using"in b?b.using.call(a,
+e):f.css(e)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),e=Fa.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.css(a,"marginTop"))||0;d.left-=parseFloat(c.css(a,"marginLeft"))||0;e.top+=parseFloat(c.css(b[0],"borderTopWidth"))||0;e.left+=parseFloat(c.css(b[0],"borderLeftWidth"))||0;return{top:d.top-e.top,left:d.left-e.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||u.body;a&&!Fa.test(a.nodeName)&&
+c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(e){var f=this[0],h;if(!f)return null;if(e!==A)return this.each(function(){if(h=ea(this))h.scrollTo(!a?e:c(h).scrollLeft(),a?e:c(h).scrollTop());else this[d]=e});else return(h=ea(f))?"pageXOffset"in h?h[a?"pageYOffset":"pageXOffset"]:c.support.boxModel&&h.document.documentElement[d]||h.document.body[d]:f[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();
+c.fn["inner"+b]=function(){return this[0]?parseFloat(c.css(this[0],d,"padding")):null};c.fn["outer"+b]=function(e){return this[0]?parseFloat(c.css(this[0],d,e?"margin":"border")):null};c.fn[d]=function(e){var f=this[0];if(!f)return e==null?null:this;if(c.isFunction(e))return this.each(function(h){var k=c(this);k[d](e.call(this,h,k[d]()))});return c.isWindow(f)?f.document.compatMode==="CSS1Compat"&&f.document.documentElement["client"+b]||f.document.body["client"+b]:f.nodeType===9?Math.max(f.documentElement["client"+
+b],f.body["scroll"+b],f.documentElement["scroll"+b],f.body["offset"+b],f.documentElement["offset"+b]):e===A?parseFloat(c.css(f,d)):this.css(d,typeof e==="string"?e:e+"px")}})})(window);
diff --git a/media/jquery.tablesorter.min.js b/media/jquery.tablesorter.min.js
new file mode 100644
index 00000000..64c70071
--- /dev/null
+++ b/media/jquery.tablesorter.min.js
@@ -0,0 +1,2 @@
+
+(function($){$.extend({tablesorter:new function(){var parsers=[],widgets=[];this.defaults={cssHeader:"header",cssAsc:"headerSortUp",cssDesc:"headerSortDown",sortInitialOrder:"asc",sortMultiSortKey:"shiftKey",sortForce:null,sortAppend:null,textExtraction:"simple",parsers:{},widgets:[],widgetZebra:{css:["even","odd"]},headers:{},widthFixed:false,cancelSelection:true,sortList:[],headerList:[],dateFormat:"us",decimal:'.',debug:false};function benchmark(s,d){log(s+","+(new Date().getTime()-d.getTime())+"ms");}this.benchmark=benchmark;function log(s){if(typeof console!="undefined"&&typeof console.debug!="undefined"){console.log(s);}else{alert(s);}}function buildParserCache(table,$headers){if(table.config.debug){var parsersDebug="";}var rows=table.tBodies[0].rows;if(table.tBodies[0].rows[0]){var list=[],cells=rows[0].cells,l=cells.length;for(var i=0;i<l;i++){var p=false;if($.metadata&&($($headers[i]).metadata()&&$($headers[i]).metadata().sorter)){p=getParserById($($headers[i]).metadata().sorter);}else if((table.config.headers[i]&&table.config.headers[i].sorter)){p=getParserById(table.config.headers[i].sorter);}if(!p){p=detectParserForColumn(table,cells[i]);}if(table.config.debug){parsersDebug+="column:"+i+" parser:"+p.id+"\n";}list.push(p);}}if(table.config.debug){log(parsersDebug);}return list;};function detectParserForColumn(table,node){var l=parsers.length;for(var i=1;i<l;i++){if(parsers[i].is($.trim(getElementText(table.config,node)),table,node)){return parsers[i];}}return parsers[0];}function getParserById(name){var l=parsers.length;for(var i=0;i<l;i++){if(parsers[i].id.toLowerCase()==name.toLowerCase()){return parsers[i];}}return false;}function buildCache(table){if(table.config.debug){var cacheTime=new Date();}var totalRows=(table.tBodies[0]&&table.tBodies[0].rows.length)||0,totalCells=(table.tBodies[0].rows[0]&&table.tBodies[0].rows[0].cells.length)||0,parsers=table.config.parsers,cache={row:[],normalized:[]};for(var i=0;i<totalRows;++i){var c=table.tBodies[0].rows[i],cols=[];cache.row.push($(c));for(var j=0;j<totalCells;++j){cols.push(parsers[j].format(getElementText(table.config,c.cells[j]),table,c.cells[j]));}cols.push(i);cache.normalized.push(cols);cols=null;};if(table.config.debug){benchmark("Building cache for "+totalRows+" rows:",cacheTime);}return cache;};function getElementText(config,node){if(!node)return"";var t="";if(config.textExtraction=="simple"){if(node.childNodes[0]&&node.childNodes[0].hasChildNodes()){t=node.childNodes[0].innerHTML;}else{t=node.innerHTML;}}else{if(typeof(config.textExtraction)=="function"){t=config.textExtraction(node);}else{t=$(node).text();}}return t;}function appendToTable(table,cache){if(table.config.debug){var appendTime=new Date()}var c=cache,r=c.row,n=c.normalized,totalRows=n.length,checkCell=(n[0].length-1),tableBody=$(table.tBodies[0]),rows=[];for(var i=0;i<totalRows;i++){rows.push(r[n[i][checkCell]]);if(!table.config.appender){var o=r[n[i][checkCell]];var l=o.length;for(var j=0;j<l;j++){tableBody[0].appendChild(o[j]);}}}if(table.config.appender){table.config.appender(table,rows);}rows=null;if(table.config.debug){benchmark("Rebuilt table:",appendTime);}applyWidget(table);setTimeout(function(){$(table).trigger("sortEnd");},0);};function buildHeaders(table){if(table.config.debug){var time=new Date();}var meta=($.metadata)?true:false,tableHeadersRows=[];for(var i=0;i<table.tHead.rows.length;i++){tableHeadersRows[i]=0;};$tableHeaders=$("thead th",table);$tableHeaders.each(function(index){this.count=0;this.column=index;this.order=formatSortingOrder(table.config.sortInitialOrder);if(checkHeaderMetadata(this)||checkHeaderOptions(table,index))this.sortDisabled=true;if(!this.sortDisabled){$(this).addClass(table.config.cssHeader);}table.config.headerList[index]=this;});if(table.config.debug){benchmark("Built headers:",time);log($tableHeaders);}return $tableHeaders;};function checkCellColSpan(table,rows,row){var arr=[],r=table.tHead.rows,c=r[row].cells;for(var i=0;i<c.length;i++){var cell=c[i];if(cell.colSpan>1){arr=arr.concat(checkCellColSpan(table,headerArr,row++));}else{if(table.tHead.length==1||(cell.rowSpan>1||!r[row+1])){arr.push(cell);}}}return arr;};function checkHeaderMetadata(cell){if(($.metadata)&&($(cell).metadata().sorter===false)){return true;};return false;}function checkHeaderOptions(table,i){if((table.config.headers[i])&&(table.config.headers[i].sorter===false)){return true;};return false;}function applyWidget(table){var c=table.config.widgets;var l=c.length;for(var i=0;i<l;i++){getWidgetById(c[i]).format(table);}}function getWidgetById(name){var l=widgets.length;for(var i=0;i<l;i++){if(widgets[i].id.toLowerCase()==name.toLowerCase()){return widgets[i];}}};function formatSortingOrder(v){if(typeof(v)!="Number"){i=(v.toLowerCase()=="desc")?1:0;}else{i=(v==(0||1))?v:0;}return i;}function isValueInArray(v,a){var l=a.length;for(var i=0;i<l;i++){if(a[i][0]==v){return true;}}return false;}function setHeadersCss(table,$headers,list,css){$headers.removeClass(css[0]).removeClass(css[1]);var h=[];$headers.each(function(offset){if(!this.sortDisabled){h[this.column]=$(this);}});var l=list.length;for(var i=0;i<l;i++){h[list[i][0]].addClass(css[list[i][1]]);}}function fixColumnWidth(table,$headers){var c=table.config;if(c.widthFixed){var colgroup=$('<colgroup>');$("tr:first td",table.tBodies[0]).each(function(){colgroup.append($('<col>').css('width',$(this).width()));});$(table).prepend(colgroup);};}function updateHeaderSortCount(table,sortList){var c=table.config,l=sortList.length;for(var i=0;i<l;i++){var s=sortList[i],o=c.headerList[s[0]];o.count=s[1];o.count++;}}function multisort(table,sortList,cache){if(table.config.debug){var sortTime=new Date();}var dynamicExp="var sortWrapper = function(a,b) {",l=sortList.length;for(var i=0;i<l;i++){var c=sortList[i][0];var order=sortList[i][1];var s=(getCachedSortType(table.config.parsers,c)=="text")?((order==0)?"sortText":"sortTextDesc"):((order==0)?"sortNumeric":"sortNumericDesc");var e="e"+i;dynamicExp+="var "+e+" = "+s+"(a["+c+"],b["+c+"]); ";dynamicExp+="if("+e+") { return "+e+"; } ";dynamicExp+="else { ";}var orgOrderCol=cache.normalized[0].length-1;dynamicExp+="return a["+orgOrderCol+"]-b["+orgOrderCol+"];";for(var i=0;i<l;i++){dynamicExp+="}; ";}dynamicExp+="return 0; ";dynamicExp+="}; ";eval(dynamicExp);cache.normalized.sort(sortWrapper);if(table.config.debug){benchmark("Sorting on "+sortList.toString()+" and dir "+order+" time:",sortTime);}return cache;};function sortText(a,b){return((a<b)?-1:((a>b)?1:0));};function sortTextDesc(a,b){return((b<a)?-1:((b>a)?1:0));};function sortNumeric(a,b){return a-b;};function sortNumericDesc(a,b){return b-a;};function getCachedSortType(parsers,i){return parsers[i].type;};this.construct=function(settings){return this.each(function(){if(!this.tHead||!this.tBodies)return;var $this,$document,$headers,cache,config,shiftDown=0,sortOrder;this.config={};config=$.extend(this.config,$.tablesorter.defaults,settings);$this=$(this);$headers=buildHeaders(this);this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);var sortCSS=[config.cssDesc,config.cssAsc];fixColumnWidth(this);$headers.click(function(e){$this.trigger("sortStart");var totalRows=($this[0].tBodies[0]&&$this[0].tBodies[0].rows.length)||0;if(!this.sortDisabled&&totalRows>0){var $cell=$(this);var i=this.column;this.order=this.count++%2;if(!e[config.sortMultiSortKey]){config.sortList=[];if(config.sortForce!=null){var a=config.sortForce;for(var j=0;j<a.length;j++){if(a[j][0]!=i){config.sortList.push(a[j]);}}}config.sortList.push([i,this.order]);}else{if(isValueInArray(i,config.sortList)){for(var j=0;j<config.sortList.length;j++){var s=config.sortList[j],o=config.headerList[s[0]];if(s[0]==i){o.count=s[1];o.count++;s[1]=o.count%2;}}}else{config.sortList.push([i,this.order]);}};setTimeout(function(){setHeadersCss($this[0],$headers,config.sortList,sortCSS);appendToTable($this[0],multisort($this[0],config.sortList,cache));},1);return false;}}).mousedown(function(){if(config.cancelSelection){this.onselectstart=function(){return false};return false;}});$this.bind("update",function(){this.config.parsers=buildParserCache(this,$headers);cache=buildCache(this);}).bind("sorton",function(e,list){$(this).trigger("sortStart");config.sortList=list;var sortList=config.sortList;updateHeaderSortCount(this,sortList);setHeadersCss(this,$headers,sortList,sortCSS);appendToTable(this,multisort(this,sortList,cache));}).bind("appendCache",function(){appendToTable(this,cache);}).bind("applyWidgetId",function(e,id){getWidgetById(id).format(this);}).bind("applyWidgets",function(){applyWidget(this);});if($.metadata&&($(this).metadata()&&$(this).metadata().sortlist)){config.sortList=$(this).metadata().sortlist;}if(config.sortList.length>0){$this.trigger("sorton",[config.sortList]);}applyWidget(this);});};this.addParser=function(parser){var l=parsers.length,a=true;for(var i=0;i<l;i++){if(parsers[i].id.toLowerCase()==parser.id.toLowerCase()){a=false;}}if(a){parsers.push(parser);};};this.addWidget=function(widget){widgets.push(widget);};this.formatFloat=function(s){var i=parseFloat(s);return(isNaN(i))?0:i;};this.formatInt=function(s){var i=parseInt(s);return(isNaN(i))?0:i;};this.isDigit=function(s,config){var DECIMAL='\\'+config.decimal;var exp='/(^[+]?0('+DECIMAL+'0+)?$)|(^([-+]?[1-9][0-9]*)$)|(^([-+]?((0?|[1-9][0-9]*)'+DECIMAL+'(0*[1-9][0-9]*)))$)|(^[-+]?[1-9]+[0-9]*'+DECIMAL+'0+$)/';return RegExp(exp).test($.trim(s));};this.clearTableBody=function(table){if($.browser.msie){function empty(){while(this.firstChild)this.removeChild(this.firstChild);}empty.apply(table.tBodies[0]);}else{table.tBodies[0].innerHTML="";}};}});$.fn.extend({tablesorter:$.tablesorter.construct});var ts=$.tablesorter;ts.addParser({id:"text",is:function(s){return true;},format:function(s){return $.trim(s.toLowerCase());},type:"text"});ts.addParser({id:"digit",is:function(s,table){var c=table.config;return $.tablesorter.isDigit(s,c);},format:function(s){return $.tablesorter.formatFloat(s);},type:"numeric"});ts.addParser({id:"currency",is:function(s){return/^[£$€?.]/.test(s);},format:function(s){return $.tablesorter.formatFloat(s.replace(new RegExp(/[^0-9.]/g),""));},type:"numeric"});ts.addParser({id:"ipAddress",is:function(s){return/^\d{2,3}[\.]\d{2,3}[\.]\d{2,3}[\.]\d{2,3}$/.test(s);},format:function(s){var a=s.split("."),r="",l=a.length;for(var i=0;i<l;i++){var item=a[i];if(item.length==2){r+="0"+item;}else{r+=item;}}return $.tablesorter.formatFloat(r);},type:"numeric"});ts.addParser({id:"url",is:function(s){return/^(https?|ftp|file):\/\/$/.test(s);},format:function(s){return jQuery.trim(s.replace(new RegExp(/(https?|ftp|file):\/\//),''));},type:"text"});ts.addParser({id:"isoDate",is:function(s){return/^\d{4}[\/-]\d{1,2}[\/-]\d{1,2}$/.test(s);},format:function(s){return $.tablesorter.formatFloat((s!="")?new Date(s.replace(new RegExp(/-/g),"/")).getTime():"0");},type:"numeric"});ts.addParser({id:"percent",is:function(s){return/\%$/.test($.trim(s));},format:function(s){return $.tablesorter.formatFloat(s.replace(new RegExp(/%/g),""));},type:"numeric"});ts.addParser({id:"usLongDate",is:function(s){return s.match(new RegExp(/^[A-Za-z]{3,10}\.? [0-9]{1,2}, ([0-9]{4}|'?[0-9]{2}) (([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(AM|PM)))$/));},format:function(s){return $.tablesorter.formatFloat(new Date(s).getTime());},type:"numeric"});ts.addParser({id:"shortDate",is:function(s){return/\d{1,2}[\/\-]\d{1,2}[\/\-]\d{2,4}/.test(s);},format:function(s,table){var c=table.config;s=s.replace(/\-/g,"/");if(c.dateFormat=="us"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/,"$3/$1/$2");}else if(c.dateFormat=="uk"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/,"$3/$2/$1");}else if(c.dateFormat=="dd/mm/yy"||c.dateFormat=="dd-mm-yy"){s=s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{2})/,"$1/$2/$3");}return $.tablesorter.formatFloat(new Date(s).getTime());},type:"numeric"});ts.addParser({id:"time",is:function(s){return/^(([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(am|pm)))$/.test(s);},format:function(s){return $.tablesorter.formatFloat(new Date("2000/01/01 "+s).getTime());},type:"numeric"});ts.addParser({id:"metadata",is:function(s){return false;},format:function(s,table,cell){var c=table.config,p=(!c.parserMetadataName)?'sortValue':c.parserMetadataName;return $(cell).metadata()[p];},type:"numeric"});ts.addWidget({id:"zebra",format:function(table){if(table.config.debug){var time=new Date();}$("tr:visible",table.tBodies[0]).filter(':even').removeClass(table.config.widgetZebra.css[1]).addClass(table.config.widgetZebra.css[0]).end().filter(':odd').removeClass(table.config.widgetZebra.css[0]).addClass(table.config.widgetZebra.css[1]);if(table.config.debug){$.tablesorter.benchmark("Applying Zebra widget",time);}}});})(jQuery); \ No newline at end of file
diff --git a/media/logos/archlinux-logo-black-1200dpi.png b/media/logos/archlinux-logo-black-1200dpi.png
new file mode 100644
index 00000000..a3082c39
--- /dev/null
+++ b/media/logos/archlinux-logo-black-1200dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-black-90dpi.png b/media/logos/archlinux-logo-black-90dpi.png
new file mode 100644
index 00000000..6948b795
--- /dev/null
+++ b/media/logos/archlinux-logo-black-90dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-black-scalable.svg b/media/logos/archlinux-logo-black-scalable.svg
new file mode 100644
index 00000000..10d6c4af
--- /dev/null
+++ b/media/logos/archlinux-logo-black-scalable.svg
@@ -0,0 +1,153 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ version="1.0"
+ width="600"
+ height="199.41692"
+ id="svg2424">
+ <defs
+ id="defs2426">
+ <linearGradient
+ x1="112.49854"
+ y1="6.1372099"
+ x2="112.49853"
+ y2="129.3468"
+ id="path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="translate(287,-83)">
+ <stop
+ id="stop193"
+ style="stop-color:#ffffff;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop195"
+ style="stop-color:#ffffff;stop-opacity:0.27450982"
+ offset="1" />
+ <midPointStop
+ offset="0"
+ style="stop-color:#FFFFFF"
+ id="midPointStop197" />
+ <midPointStop
+ offset="0.5"
+ style="stop-color:#FFFFFF"
+ id="midPointStop199" />
+ <midPointStop
+ offset="1"
+ style="stop-color:#000000"
+ id="midPointStop201" />
+ </linearGradient>
+ <linearGradient
+ x1="541.33502"
+ y1="104.50665"
+ x2="606.91248"
+ y2="303.14029"
+ id="linearGradient2544"
+ xlink:href="#path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(-0.3937741,0,0,0.393752,357.51969,122.00151)" />
+ <linearGradient
+ id="linearGradient3388">
+ <stop
+ id="stop3390"
+ style="stop-color:#000000;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop3392"
+ style="stop-color:#000000;stop-opacity:0.37113401"
+ offset="1" />
+ </linearGradient>
+ <linearGradient
+ x1="490.72305"
+ y1="237.72447"
+ x2="490.72305"
+ y2="183.9644"
+ id="linearGradient4416"
+ xlink:href="#linearGradient3388"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.749107,0,0,0.749107,-35.459862,91.44108)" />
+ </defs>
+ <g
+ transform="translate(-34.777313,-129.80241)"
+ id="layer1">
+ <g
+ transform="matrix(0.8746356,0,0,0.8746356,14.730518,23.408954)"
+ id="g2424"
+ style="fill:#000000">
+ <g
+ transform="matrix(0.6378586,0,0,0.6378586,36.486487,2.17139)"
+ id="g2809"
+ style="fill:#000000;fill-opacity:1">
+ <path
+ d="m 339.96875,309.09375 c -14.47141,-0.0239 -26.4812,2.94367 -31.125,4.5625 l -4.78125,25.8125 c -0.0116,0.0951 23.79543,-6.34855 34.28125,-5.96875 17.36158,0.62381 18.95948,6.63541 18.65625,14.75 0.29595,0.47462 -4.47933,-7.33192 -19.5,-7.59375 -18.94961,-0.32687 -45.69284,6.70947 -45.65625,35.3125 -0.51086,32.17412 24.03361,41.63882 40.75,41.8125 15.02821,-0.27364 22.0777,-5.69136 25.9375,-8.59375 5.07124,-5.30236 10.87308,-10.63447 16.40625,-17.03125 -5.23567,9.51278 -9.77472,16.0898 -14.5,21.125 l 0,4.25 22.84375,-3.84375 0.15625,-62.09375 c -0.23141,-8.78839 5.04123,-42.41827 -43.46875,-42.5 z m -3.28125,54.0625 c 9.46889,0.12995 20.32788,4.79708 20.34375,16.03125 0.049,10.21821 -12.80005,15.71183 -21.15625,15.625 -8.35976,-0.0868 -19.45093,-6.56982 -19.5,-16.53125 0.16016,-8.90444 10.45953,-15.35418 20.3125,-15.125 z"
+ id="path2284"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 398.50106,314.83145 -0.15505,102.82693 26.61213,-5.12724 0.0449,-58.30157 c 0.006,-8.68089 12.40554,-18.82451 27.9627,-18.66287 3.30202,-5.97408 9.5087,-21.24219 11.02088,-24.71514 -34.75649,-0.0833 -35.19897,9.98993 -41.24398,14.94517 -0.0631,-9.45285 -0.0213,-15.12741 -0.0213,-15.12741 l -24.2202,4.16213 z"
+ id="path2286"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 548.2688,328.33058 c -0.25696,-0.12068 -13.87938,-15.93419 -41.26638,-16.0589 -25.65249,-0.42638 -54.42578,9.51895 -54.88631,52.5328 0.22457,37.81852 27.6402,52.59809 55.0314,52.88627 29.31292,0.30451 40.97654,-18.32947 41.67615,-18.79124 -3.49762,-3.0321 -16.59792,-16.0131 -16.59792,-16.0131 0,0 -8.18236,11.65102 -24.05802,11.79913 -15.87942,0.1512 -29.68245,-12.27325 -29.87805,-29.60905 -0.20349,-17.33595 12.68881,-26.72821 29.99725,-27.48687 14.98466,-0.003 23.6297,9.67334 23.6297,9.67334 l 16.35218,-18.93238 z"
+ id="path2288"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 581.8125,278.84375 -25.125,5.90625 0.1875,133.9375 24.75,-4.46875 0.28125,-63.03125 c 0.0529,-6.60927 9.56127,-16.75916 25.4375,-16.4375 15.17973,0.15775 18.57236,10.11767 18.53125,11.375 l 0.4375,72.96875 24.40625,-4.3125 0.0937,-77.375 c 0.1607,-7.44539 -16.30833,-23.16954 -42.78125,-23.28125 -12.58087,0.0202 -19.54815,2.86825 -23.09375,4.96875 -6.06656,4.68565 -12.9998,9.17543 -19.8125,14.90625 6.29809,-8.09099 11.58551,-13.68516 16.75,-17.84375 l -0.0625,-37.3125 z"
+ id="path2290"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <g
+ transform="matrix(0.9443373,0,0.01336345,0.9443373,78.345657,-412.48879)"
+ id="g5326"
+ style="fill:#000000;fill-opacity:1;stroke:none">
+ <path
+ d="m 400.67581,629.79609 7.68167,-1.91575 -0.92851,91.20792 -7.79574,1.32426 1.04258,-90.61643 z"
+ id="path2292"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 421.10266,657.01757 6.75064,-2.9867 -0.86808,65.39931 -6.49779,1.33915 0.61523,-63.75176 z m -1.26059,-23.58316 5.47167,-4.41533 4.42261,4.99952 -5.47558,4.53221 -4.4187,-5.1164 z"
+ id="path2294"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 440.44273,655.82614 7.67755,-1.56201 -0.1573,13.6722 c -0.007,0.58717 4.4194,-15.27364 24.68502,-14.92094 19.67986,0.10952 22.68401,15.34634 22.5291,18.76237 l -0.43759,48.0783 -6.73044,1.45631 0.63316,-47.489 c 0.0974,-1.38684 -2.88144,-13.11441 -16.78906,-13.15754 -13.90509,-0.0404 -23.68364,10.10048 -23.75821,16.57937 l -0.48127,41.83477 -7.80388,2.0313 0.63292,-65.28513 z"
+ id="path2296"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 561.53301,720.20203 -7.6776,1.56186 0.15737,-13.67198 c 0.007,-0.58742 -4.42201,15.27361 -24.68504,14.92086 -19.67983,-0.10944 -22.68399,-15.34626 -22.52908,-18.76229 l 0.43757,-48.07861 8.15674,-1.64226 -0.54644,47.48988 c -0.0149,1.29682 1.36845,13.29979 15.27604,13.3426 13.90511,0.0405 23.76622,-8.37359 24.01453,-21.04416 l 0.43105,-37.46902 7.5978,-1.93195 -0.63294,65.28507 z"
+ id="path2298"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 577.45461,655.28678 -5.42715,4.20017 20.19894,26.93328 -22.39092,31.11622 5.63499,4.226 21.04365,-28.8967 20.8779,29.58159 5.32727,-4.20103 -22.37578,-31.62866 18.56963,-25.5775 -5.53193,-4.73429 -16.92109,23.66778 -19.00551,-24.68686 z"
+ id="path2300"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <path
+ d="m 105.8125,16.625 c -7.39687,18.135158 -11.858304,29.997682 -20.09375,47.59375 5.04936,5.35232 11.247211,11.585364 21.3125,18.625 C 96.210077,78.390904 88.828713,73.920352 83.3125,69.28125 72.7727,91.274163 56.259864,122.60209 22.75,182.8125 49.087628,167.60733 69.504089,158.23318 88.53125,154.65625 87.714216,151.1422 87.2497,147.34107 87.28125,143.375 l 0.03125,-0.84375 c 0.417917,-16.87382 9.195665,-29.84979 19.59375,-28.96875 10.39809,0.88104 18.48041,15.28242 18.0625,32.15625 -0.0786,3.17512 -0.43674,6.22955 -1.0625,9.0625 18.82058,3.68164 39.01873,13.03179 65,28.03125 -5.123,-9.4318 -9.69572,-17.93388 -14.0625,-26.03125 -6.87839,-5.33121 -14.05289,-12.2698 -28.6875,-19.78125 10.05899,2.61375 17.2611,5.62932 22.875,9 C 124.63297,63.338161 121.03766,52.354109 105.8125,16.625 z"
+ transform="matrix(1.1433333,0,0,1.1433333,22.920168,121.64318)"
+ id="path2518"
+ style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <g
+ id="text2634"
+ style="font-size:8.44138241px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 685.46692,263.83624 0,-5.32944 -1.99082,0 0,-0.71307 4.7895,0 0,0.71307 -1.99906,0 0,5.32944 -0.79962,0"
+ id="path4714" />
+ <path
+ d="m 689.0982,263.83624 0,-6.04251 1.20355,0 1.43026,4.2784 c 0.13189,0.39843 0.22806,0.69658 0.28852,0.89442 0.0687,-0.21983 0.17586,-0.5427 0.3215,-0.96862 l 1.44674,-4.2042 1.07578,0 0,6.04251 -0.77077,0 0,-5.05741 -1.75587,5.05741 -0.72131,0 -1.74763,-5.14396 0,5.14396 -0.77077,0"
+ id="path4716" />
+ </g>
+ <g
+ id="text2638"
+ style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0"
+ id="path4719" />
+ <path
+ d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0"
+ id="path4721" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/media/logos/archlinux-logo-dark-1200dpi.png b/media/logos/archlinux-logo-dark-1200dpi.png
new file mode 100644
index 00000000..24a5cefa
--- /dev/null
+++ b/media/logos/archlinux-logo-dark-1200dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-dark-90dpi.png b/media/logos/archlinux-logo-dark-90dpi.png
new file mode 100644
index 00000000..f3757c61
--- /dev/null
+++ b/media/logos/archlinux-logo-dark-90dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-dark-scalable.svg b/media/logos/archlinux-logo-dark-scalable.svg
new file mode 100644
index 00000000..5a80cc4d
--- /dev/null
+++ b/media/logos/archlinux-logo-dark-scalable.svg
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ version="1.0"
+ width="600"
+ height="199.41692"
+ id="svg2424">
+ <defs
+ id="defs2426">
+ <linearGradient
+ x1="112.49854"
+ y1="6.1372099"
+ x2="112.49853"
+ y2="129.3468"
+ id="path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="translate(287,-83)">
+ <stop
+ id="stop193"
+ style="stop-color:#ffffff;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop195"
+ style="stop-color:#ffffff;stop-opacity:0.27450982"
+ offset="1" />
+ <midPointStop
+ offset="0"
+ style="stop-color:#FFFFFF"
+ id="midPointStop197" />
+ <midPointStop
+ offset="0.5"
+ style="stop-color:#FFFFFF"
+ id="midPointStop199" />
+ <midPointStop
+ offset="1"
+ style="stop-color:#000000"
+ id="midPointStop201" />
+ </linearGradient>
+ <linearGradient
+ x1="541.33502"
+ y1="104.50665"
+ x2="606.91248"
+ y2="303.14029"
+ id="linearGradient2544"
+ xlink:href="#path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(-0.3937741,0,0,0.393752,357.51969,122.00151)" />
+ <linearGradient
+ id="linearGradient3388">
+ <stop
+ id="stop3390"
+ style="stop-color:#000000;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop3392"
+ style="stop-color:#000000;stop-opacity:0.37113401"
+ offset="1" />
+ </linearGradient>
+ <linearGradient
+ x1="490.72305"
+ y1="237.72447"
+ x2="490.72305"
+ y2="183.9644"
+ id="linearGradient4416"
+ xlink:href="#linearGradient3388"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.749107,0,0,0.749107,-35.459862,91.44108)" />
+ </defs>
+ <g
+ transform="translate(-34.777313,-129.80241)"
+ id="layer1">
+ <g
+ transform="matrix(0.8746356,0,0,0.8746356,14.730518,23.408954)"
+ id="g2424">
+ <g
+ transform="matrix(0.6378586,0,0,0.6378586,36.486487,2.17139)"
+ id="g2809"
+ style="fill:#4d4d4d;fill-opacity:1">
+ <path
+ d="m 339.96875,309.09375 c -14.47141,-0.0239 -26.4812,2.94367 -31.125,4.5625 l -4.78125,25.8125 c -0.0116,0.0951 23.79543,-6.34855 34.28125,-5.96875 17.36158,0.62381 18.95948,6.63541 18.65625,14.75 0.29595,0.47462 -4.47933,-7.33192 -19.5,-7.59375 -18.94961,-0.32687 -45.69284,6.70947 -45.65625,35.3125 -0.51086,32.17412 24.03361,41.63882 40.75,41.8125 15.02821,-0.27364 22.0777,-5.69136 25.9375,-8.59375 5.07124,-5.30236 10.87308,-10.63447 16.40625,-17.03125 -5.23567,9.51278 -9.77472,16.0898 -14.5,21.125 l 0,4.25 22.84375,-3.84375 0.15625,-62.09375 c -0.23141,-8.78839 5.04123,-42.41827 -43.46875,-42.5 z m -3.28125,54.0625 c 9.46889,0.12995 20.32788,4.79708 20.34375,16.03125 0.049,10.21821 -12.80005,15.71183 -21.15625,15.625 -8.35976,-0.0868 -19.45093,-6.56982 -19.5,-16.53125 0.16016,-8.90444 10.45953,-15.35418 20.3125,-15.125 z"
+ id="path2284"
+ style="fill:#4d4d4d;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 398.50106,314.83145 -0.15505,102.82693 26.61213,-5.12724 0.0449,-58.30157 c 0.006,-8.68089 12.40554,-18.82451 27.9627,-18.66287 3.30202,-5.97408 9.5087,-21.24219 11.02088,-24.71514 -34.75649,-0.0833 -35.19897,9.98993 -41.24398,14.94517 -0.0631,-9.45285 -0.0213,-15.12741 -0.0213,-15.12741 l -24.2202,4.16213 z"
+ id="path2286"
+ style="fill:#4d4d4d;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 548.2688,328.33058 c -0.25696,-0.12068 -13.87938,-15.93419 -41.26638,-16.0589 -25.65249,-0.42638 -54.42578,9.51895 -54.88631,52.5328 0.22457,37.81852 27.6402,52.59809 55.0314,52.88627 29.31292,0.30451 40.97654,-18.32947 41.67615,-18.79124 -3.49762,-3.0321 -16.59792,-16.0131 -16.59792,-16.0131 0,0 -8.18236,11.65102 -24.05802,11.79913 -15.87942,0.1512 -29.68245,-12.27325 -29.87805,-29.60905 -0.20349,-17.33595 12.68881,-26.72821 29.99725,-27.48687 14.98466,-0.003 23.6297,9.67334 23.6297,9.67334 l 16.35218,-18.93238 z"
+ id="path2288"
+ style="fill:#4d4d4d;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 581.8125,278.84375 -25.125,5.90625 0.1875,133.9375 24.75,-4.46875 0.28125,-63.03125 c 0.0529,-6.60927 9.56127,-16.75916 25.4375,-16.4375 15.17973,0.15775 18.57236,10.11767 18.53125,11.375 l 0.4375,72.96875 24.40625,-4.3125 0.0937,-77.375 c 0.1607,-7.44539 -16.30833,-23.16954 -42.78125,-23.28125 -12.58087,0.0202 -19.54815,2.86825 -23.09375,4.96875 -6.06656,4.68565 -12.9998,9.17543 -19.8125,14.90625 6.29809,-8.09099 11.58551,-13.68516 16.75,-17.84375 l -0.0625,-37.3125 z"
+ id="path2290"
+ style="fill:#4d4d4d;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <g
+ transform="matrix(0.9443373,0,0.01336345,0.9443373,78.345657,-412.48879)"
+ id="g5326"
+ style="fill:#1793d1;fill-opacity:1;stroke:none">
+ <path
+ d="m 400.67581,629.79609 7.68167,-1.91575 -0.92851,91.20792 -7.79574,1.32426 1.04258,-90.61643 z"
+ id="path2292"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 421.10266,657.01757 6.75064,-2.9867 -0.86808,65.39931 -6.49779,1.33915 0.61523,-63.75176 z m -1.26059,-23.58316 5.47167,-4.41533 4.42261,4.99952 -5.47558,4.53221 -4.4187,-5.1164 z"
+ id="path2294"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 440.44273,655.82614 7.67755,-1.56201 -0.1573,13.6722 c -0.007,0.58717 4.4194,-15.27364 24.68502,-14.92094 19.67986,0.10952 22.68401,15.34634 22.5291,18.76237 l -0.43759,48.0783 -6.73044,1.45631 0.63316,-47.489 c 0.0974,-1.38684 -2.88144,-13.11441 -16.78906,-13.15754 -13.90509,-0.0404 -23.68364,10.10048 -23.75821,16.57937 l -0.48127,41.83477 -7.80388,2.0313 0.63292,-65.28513 z"
+ id="path2296"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 561.53301,720.20203 -7.6776,1.56186 0.15737,-13.67198 c 0.007,-0.58742 -4.42201,15.27361 -24.68504,14.92086 -19.67983,-0.10944 -22.68399,-15.34626 -22.52908,-18.76229 l 0.43757,-48.07861 8.15674,-1.64226 -0.54644,47.48988 c -0.0149,1.29682 1.36845,13.29979 15.27604,13.3426 13.90511,0.0405 23.76622,-8.37359 24.01453,-21.04416 l 0.43105,-37.46902 7.5978,-1.93195 -0.63294,65.28507 z"
+ id="path2298"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 577.45461,655.28678 -5.42715,4.20017 20.19894,26.93328 -22.39092,31.11622 5.63499,4.226 21.04365,-28.8967 20.8779,29.58159 5.32727,-4.20103 -22.37578,-31.62866 18.56963,-25.5775 -5.53193,-4.73429 -16.92109,23.66778 -19.00551,-24.68686 z"
+ id="path2300"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <path
+ d="m 105.8125,16.625 c -7.39687,18.135158 -11.858304,29.997682 -20.09375,47.59375 5.04936,5.35232 11.247211,11.585364 21.3125,18.625 C 96.210077,78.390904 88.828713,73.920352 83.3125,69.28125 72.7727,91.274163 56.259864,122.60209 22.75,182.8125 49.087628,167.60733 69.504089,158.23318 88.53125,154.65625 87.714216,151.1422 87.2497,147.34107 87.28125,143.375 l 0.03125,-0.84375 c 0.417917,-16.87382 9.195665,-29.84979 19.59375,-28.96875 10.39809,0.88104 18.48041,15.28242 18.0625,32.15625 -0.0786,3.17512 -0.43674,6.22955 -1.0625,9.0625 18.82058,3.68164 39.01873,13.03179 65,28.03125 -5.123,-9.4318 -9.69572,-17.93388 -14.0625,-26.03125 -6.87839,-5.33121 -14.05289,-12.2698 -28.6875,-19.78125 10.05899,2.61375 17.2611,5.62932 22.875,9 C 124.63297,63.338161 121.03766,52.354109 105.8125,16.625 z"
+ transform="matrix(1.1433333,0,0,1.1433333,22.920168,121.64318)"
+ id="path2518"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <g
+ id="text2634"
+ style="font-size:8.44138241px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 685.46692,263.83624 0,-5.32944 -1.99082,0 0,-0.71307 4.7895,0 0,0.71307 -1.99906,0 0,5.32944 -0.79962,0"
+ id="path3945"
+ style="fill:#1793d1;fill-opacity:1" />
+ <path
+ d="m 689.0982,263.83624 0,-6.04251 1.20355,0 1.43026,4.2784 c 0.13189,0.39843 0.22806,0.69658 0.28852,0.89442 0.0687,-0.21983 0.17586,-0.5427 0.3215,-0.96862 l 1.44674,-4.2042 1.07578,0 0,6.04251 -0.77077,0 0,-5.05741 -1.75587,5.05741 -0.72131,0 -1.74763,-5.14396 0,5.14396 -0.77077,0"
+ id="path3947"
+ style="fill:#1793d1;fill-opacity:1" />
+ </g>
+ <g
+ id="text2638"
+ style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#1793d1;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0"
+ id="path3940"
+ style="fill:#1793d1;fill-opacity:1" />
+ <path
+ d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0"
+ id="path3942"
+ style="fill:#1793d1;fill-opacity:1" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/media/logos/archlinux-logo-light-1200dpi.png b/media/logos/archlinux-logo-light-1200dpi.png
new file mode 100644
index 00000000..79e0a0f1
--- /dev/null
+++ b/media/logos/archlinux-logo-light-1200dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-light-90dpi.png b/media/logos/archlinux-logo-light-90dpi.png
new file mode 100644
index 00000000..95803309
--- /dev/null
+++ b/media/logos/archlinux-logo-light-90dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-light-scalable.svg b/media/logos/archlinux-logo-light-scalable.svg
new file mode 100644
index 00000000..5fd0716f
--- /dev/null
+++ b/media/logos/archlinux-logo-light-scalable.svg
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ version="1.0"
+ width="600"
+ height="199.41692"
+ id="svg2424">
+ <defs
+ id="defs2426">
+ <linearGradient
+ x1="112.49854"
+ y1="6.1372099"
+ x2="112.49853"
+ y2="129.3468"
+ id="path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="translate(287,-83)">
+ <stop
+ id="stop193"
+ style="stop-color:#ffffff;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop195"
+ style="stop-color:#ffffff;stop-opacity:0.27450982"
+ offset="1" />
+ <midPointStop
+ offset="0"
+ style="stop-color:#FFFFFF"
+ id="midPointStop197" />
+ <midPointStop
+ offset="0.5"
+ style="stop-color:#FFFFFF"
+ id="midPointStop199" />
+ <midPointStop
+ offset="1"
+ style="stop-color:#000000"
+ id="midPointStop201" />
+ </linearGradient>
+ <linearGradient
+ x1="541.33502"
+ y1="104.50665"
+ x2="606.91248"
+ y2="303.14029"
+ id="linearGradient2544"
+ xlink:href="#path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(-0.3937741,0,0,0.393752,357.51969,122.00151)" />
+ <linearGradient
+ id="linearGradient3388">
+ <stop
+ id="stop3390"
+ style="stop-color:#000000;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop3392"
+ style="stop-color:#000000;stop-opacity:0.37113401"
+ offset="1" />
+ </linearGradient>
+ <linearGradient
+ x1="490.72305"
+ y1="237.72447"
+ x2="490.72305"
+ y2="183.9644"
+ id="linearGradient4416"
+ xlink:href="#linearGradient3388"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.749107,0,0,0.749107,-35.459862,91.44108)" />
+ </defs>
+ <g
+ transform="translate(-34.777313,-129.80241)"
+ id="layer1">
+ <g
+ transform="matrix(0.8746356,0,0,0.8746356,14.730518,23.408954)"
+ id="g2424">
+ <g
+ transform="matrix(0.6378586,0,0,0.6378586,36.486487,2.17139)"
+ id="g2809"
+ style="fill:#ffffff;fill-opacity:1">
+ <path
+ d="m 339.96875,309.09375 c -14.47141,-0.0239 -26.4812,2.94367 -31.125,4.5625 l -4.78125,25.8125 c -0.0116,0.0951 23.79543,-6.34855 34.28125,-5.96875 17.36158,0.62381 18.95948,6.63541 18.65625,14.75 0.29595,0.47462 -4.47933,-7.33192 -19.5,-7.59375 -18.94961,-0.32687 -45.69284,6.70947 -45.65625,35.3125 -0.51086,32.17412 24.03361,41.63882 40.75,41.8125 15.02821,-0.27364 22.0777,-5.69136 25.9375,-8.59375 5.07124,-5.30236 10.87308,-10.63447 16.40625,-17.03125 -5.23567,9.51278 -9.77472,16.0898 -14.5,21.125 l 0,4.25 22.84375,-3.84375 0.15625,-62.09375 c -0.23141,-8.78839 5.04123,-42.41827 -43.46875,-42.5 z m -3.28125,54.0625 c 9.46889,0.12995 20.32788,4.79708 20.34375,16.03125 0.049,10.21821 -12.80005,15.71183 -21.15625,15.625 -8.35976,-0.0868 -19.45093,-6.56982 -19.5,-16.53125 0.16016,-8.90444 10.45953,-15.35418 20.3125,-15.125 z"
+ id="path2284"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 398.50106,314.83145 -0.15505,102.82693 26.61213,-5.12724 0.0449,-58.30157 c 0.006,-8.68089 12.40554,-18.82451 27.9627,-18.66287 3.30202,-5.97408 9.5087,-21.24219 11.02088,-24.71514 -34.75649,-0.0833 -35.19897,9.98993 -41.24398,14.94517 -0.0631,-9.45285 -0.0213,-15.12741 -0.0213,-15.12741 l -24.2202,4.16213 z"
+ id="path2286"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 548.2688,328.33058 c -0.25696,-0.12068 -13.87938,-15.93419 -41.26638,-16.0589 -25.65249,-0.42638 -54.42578,9.51895 -54.88631,52.5328 0.22457,37.81852 27.6402,52.59809 55.0314,52.88627 29.31292,0.30451 40.97654,-18.32947 41.67615,-18.79124 -3.49762,-3.0321 -16.59792,-16.0131 -16.59792,-16.0131 0,0 -8.18236,11.65102 -24.05802,11.79913 -15.87942,0.1512 -29.68245,-12.27325 -29.87805,-29.60905 -0.20349,-17.33595 12.68881,-26.72821 29.99725,-27.48687 14.98466,-0.003 23.6297,9.67334 23.6297,9.67334 l 16.35218,-18.93238 z"
+ id="path2288"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 581.8125,278.84375 -25.125,5.90625 0.1875,133.9375 24.75,-4.46875 0.28125,-63.03125 c 0.0529,-6.60927 9.56127,-16.75916 25.4375,-16.4375 15.17973,0.15775 18.57236,10.11767 18.53125,11.375 l 0.4375,72.96875 24.40625,-4.3125 0.0937,-77.375 c 0.1607,-7.44539 -16.30833,-23.16954 -42.78125,-23.28125 -12.58087,0.0202 -19.54815,2.86825 -23.09375,4.96875 -6.06656,4.68565 -12.9998,9.17543 -19.8125,14.90625 6.29809,-8.09099 11.58551,-13.68516 16.75,-17.84375 l -0.0625,-37.3125 z"
+ id="path2290"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <g
+ transform="matrix(0.9443373,0,0.01336345,0.9443373,78.345657,-412.48879)"
+ id="g5326"
+ style="fill:#1793d1;fill-opacity:1;stroke:none">
+ <path
+ d="m 400.67581,629.79609 7.68167,-1.91575 -0.92851,91.20792 -7.79574,1.32426 1.04258,-90.61643 z"
+ id="path2292"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 421.10266,657.01757 6.75064,-2.9867 -0.86808,65.39931 -6.49779,1.33915 0.61523,-63.75176 z m -1.26059,-23.58316 5.47167,-4.41533 4.42261,4.99952 -5.47558,4.53221 -4.4187,-5.1164 z"
+ id="path2294"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 440.44273,655.82614 7.67755,-1.56201 -0.1573,13.6722 c -0.007,0.58717 4.4194,-15.27364 24.68502,-14.92094 19.67986,0.10952 22.68401,15.34634 22.5291,18.76237 l -0.43759,48.0783 -6.73044,1.45631 0.63316,-47.489 c 0.0974,-1.38684 -2.88144,-13.11441 -16.78906,-13.15754 -13.90509,-0.0404 -23.68364,10.10048 -23.75821,16.57937 l -0.48127,41.83477 -7.80388,2.0313 0.63292,-65.28513 z"
+ id="path2296"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 561.53301,720.20203 -7.6776,1.56186 0.15737,-13.67198 c 0.007,-0.58742 -4.42201,15.27361 -24.68504,14.92086 -19.67983,-0.10944 -22.68399,-15.34626 -22.52908,-18.76229 l 0.43757,-48.07861 8.15674,-1.64226 -0.54644,47.48988 c -0.0149,1.29682 1.36845,13.29979 15.27604,13.3426 13.90511,0.0405 23.76622,-8.37359 24.01453,-21.04416 l 0.43105,-37.46902 7.5978,-1.93195 -0.63294,65.28507 z"
+ id="path2298"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 577.45461,655.28678 -5.42715,4.20017 20.19894,26.93328 -22.39092,31.11622 5.63499,4.226 21.04365,-28.8967 20.8779,29.58159 5.32727,-4.20103 -22.37578,-31.62866 18.56963,-25.5775 -5.53193,-4.73429 -16.92109,23.66778 -19.00551,-24.68686 z"
+ id="path2300"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <path
+ d="m 105.8125,16.625 c -7.39687,18.135158 -11.858304,29.997682 -20.09375,47.59375 5.04936,5.35232 11.247211,11.585364 21.3125,18.625 C 96.210077,78.390904 88.828713,73.920352 83.3125,69.28125 72.7727,91.274163 56.259864,122.60209 22.75,182.8125 49.087628,167.60733 69.504089,158.23318 88.53125,154.65625 87.714216,151.1422 87.2497,147.34107 87.28125,143.375 l 0.03125,-0.84375 c 0.417917,-16.87382 9.195665,-29.84979 19.59375,-28.96875 10.39809,0.88104 18.48041,15.28242 18.0625,32.15625 -0.0786,3.17512 -0.43674,6.22955 -1.0625,9.0625 18.82058,3.68164 39.01873,13.03179 65,28.03125 -5.123,-9.4318 -9.69572,-17.93388 -14.0625,-26.03125 -6.87839,-5.33121 -14.05289,-12.2698 -28.6875,-19.78125 10.05899,2.61375 17.2611,5.62932 22.875,9 C 124.63297,63.338161 121.03766,52.354109 105.8125,16.625 z"
+ transform="matrix(1.1433333,0,0,1.1433333,22.920168,121.64318)"
+ id="path2518"
+ style="fill:#1793d1;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <g
+ id="text2634"
+ style="font-size:8.44138241px;font-style:normal;font-weight:normal;fill:#ffffff;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 685.46692,263.83624 0,-5.32944 -1.99082,0 0,-0.71307 4.7895,0 0,0.71307 -1.99906,0 0,5.32944 -0.79962,0"
+ id="path3660"
+ style="fill:#ffffff;fill-opacity:1" />
+ <path
+ d="m 689.0982,263.83624 0,-6.04251 1.20355,0 1.43026,4.2784 c 0.13189,0.39843 0.22806,0.69658 0.28852,0.89442 0.0687,-0.21983 0.17586,-0.5427 0.3215,-0.96862 l 1.44674,-4.2042 1.07578,0 0,6.04251 -0.77077,0 0,-5.05741 -1.75587,5.05741 -0.72131,0 -1.74763,-5.14396 0,5.14396 -0.77077,0"
+ id="path3662"
+ style="fill:#ffffff;fill-opacity:1" />
+ </g>
+ <g
+ id="text2638"
+ style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#ffffff;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0"
+ id="path2883"
+ style="fill:#ffffff;fill-opacity:1" />
+ <path
+ d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0"
+ id="path2885"
+ style="fill:#ffffff;fill-opacity:1" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/media/logos/archlinux-logo-white-1200dpi.png b/media/logos/archlinux-logo-white-1200dpi.png
new file mode 100644
index 00000000..50e700cf
--- /dev/null
+++ b/media/logos/archlinux-logo-white-1200dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-white-90dpi.png b/media/logos/archlinux-logo-white-90dpi.png
new file mode 100644
index 00000000..86679601
--- /dev/null
+++ b/media/logos/archlinux-logo-white-90dpi.png
Binary files differ
diff --git a/media/logos/archlinux-logo-white-scalable.svg b/media/logos/archlinux-logo-white-scalable.svg
new file mode 100644
index 00000000..70eb2dfe
--- /dev/null
+++ b/media/logos/archlinux-logo-white-scalable.svg
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ version="1.0"
+ width="600"
+ height="199.41692"
+ id="svg2424">
+ <defs
+ id="defs2426">
+ <linearGradient
+ x1="112.49854"
+ y1="6.1372099"
+ x2="112.49853"
+ y2="129.3468"
+ id="path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="translate(287,-83)">
+ <stop
+ id="stop193"
+ style="stop-color:#ffffff;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop195"
+ style="stop-color:#ffffff;stop-opacity:0.27450982"
+ offset="1" />
+ <midPointStop
+ offset="0"
+ style="stop-color:#FFFFFF"
+ id="midPointStop197" />
+ <midPointStop
+ offset="0.5"
+ style="stop-color:#FFFFFF"
+ id="midPointStop199" />
+ <midPointStop
+ offset="1"
+ style="stop-color:#000000"
+ id="midPointStop201" />
+ </linearGradient>
+ <linearGradient
+ x1="541.33502"
+ y1="104.50665"
+ x2="606.91248"
+ y2="303.14029"
+ id="linearGradient2544"
+ xlink:href="#path1082_2_"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(-0.3937741,0,0,0.393752,357.51969,122.00151)" />
+ <linearGradient
+ id="linearGradient3388">
+ <stop
+ id="stop3390"
+ style="stop-color:#000000;stop-opacity:0"
+ offset="0" />
+ <stop
+ id="stop3392"
+ style="stop-color:#000000;stop-opacity:0.37113401"
+ offset="1" />
+ </linearGradient>
+ <linearGradient
+ x1="490.72305"
+ y1="237.72447"
+ x2="490.72305"
+ y2="183.9644"
+ id="linearGradient4416"
+ xlink:href="#linearGradient3388"
+ gradientUnits="userSpaceOnUse"
+ gradientTransform="matrix(0.749107,0,0,0.749107,-35.459862,91.44108)" />
+ </defs>
+ <g
+ transform="translate(-34.777313,-129.80241)"
+ id="layer1">
+ <g
+ transform="matrix(0.8746356,0,0,0.8746356,14.730518,23.408954)"
+ id="g2424"
+ style="fill:#1a1a1a">
+ <g
+ transform="matrix(0.6378586,0,0,0.6378586,36.486487,2.17139)"
+ id="g2809"
+ style="fill:#ffffff;fill-opacity:1">
+ <path
+ d="m 339.96875,309.09375 c -14.47141,-0.0239 -26.4812,2.94367 -31.125,4.5625 l -4.78125,25.8125 c -0.0116,0.0951 23.79543,-6.34855 34.28125,-5.96875 17.36158,0.62381 18.95948,6.63541 18.65625,14.75 0.29595,0.47462 -4.47933,-7.33192 -19.5,-7.59375 -18.94961,-0.32687 -45.69284,6.70947 -45.65625,35.3125 -0.51086,32.17412 24.03361,41.63882 40.75,41.8125 15.02821,-0.27364 22.0777,-5.69136 25.9375,-8.59375 5.07124,-5.30236 10.87308,-10.63447 16.40625,-17.03125 -5.23567,9.51278 -9.77472,16.0898 -14.5,21.125 l 0,4.25 22.84375,-3.84375 0.15625,-62.09375 c -0.23141,-8.78839 5.04123,-42.41827 -43.46875,-42.5 z m -3.28125,54.0625 c 9.46889,0.12995 20.32788,4.79708 20.34375,16.03125 0.049,10.21821 -12.80005,15.71183 -21.15625,15.625 -8.35976,-0.0868 -19.45093,-6.56982 -19.5,-16.53125 0.16016,-8.90444 10.45953,-15.35418 20.3125,-15.125 z"
+ id="path2284"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 398.50106,314.83145 -0.15505,102.82693 26.61213,-5.12724 0.0449,-58.30157 c 0.006,-8.68089 12.40554,-18.82451 27.9627,-18.66287 3.30202,-5.97408 9.5087,-21.24219 11.02088,-24.71514 -34.75649,-0.0833 -35.19897,9.98993 -41.24398,14.94517 -0.0631,-9.45285 -0.0213,-15.12741 -0.0213,-15.12741 l -24.2202,4.16213 z"
+ id="path2286"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 548.2688,328.33058 c -0.25696,-0.12068 -13.87938,-15.93419 -41.26638,-16.0589 -25.65249,-0.42638 -54.42578,9.51895 -54.88631,52.5328 0.22457,37.81852 27.6402,52.59809 55.0314,52.88627 29.31292,0.30451 40.97654,-18.32947 41.67615,-18.79124 -3.49762,-3.0321 -16.59792,-16.0131 -16.59792,-16.0131 0,0 -8.18236,11.65102 -24.05802,11.79913 -15.87942,0.1512 -29.68245,-12.27325 -29.87805,-29.60905 -0.20349,-17.33595 12.68881,-26.72821 29.99725,-27.48687 14.98466,-0.003 23.6297,9.67334 23.6297,9.67334 l 16.35218,-18.93238 z"
+ id="path2288"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 581.8125,278.84375 -25.125,5.90625 0.1875,133.9375 24.75,-4.46875 0.28125,-63.03125 c 0.0529,-6.60927 9.56127,-16.75916 25.4375,-16.4375 15.17973,0.15775 18.57236,10.11767 18.53125,11.375 l 0.4375,72.96875 24.40625,-4.3125 0.0937,-77.375 c 0.1607,-7.44539 -16.30833,-23.16954 -42.78125,-23.28125 -12.58087,0.0202 -19.54815,2.86825 -23.09375,4.96875 -6.06656,4.68565 -12.9998,9.17543 -19.8125,14.90625 6.29809,-8.09099 11.58551,-13.68516 16.75,-17.84375 l -0.0625,-37.3125 z"
+ id="path2290"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <g
+ transform="matrix(0.9443373,0,0.01336345,0.9443373,78.345657,-412.48879)"
+ id="g5326"
+ style="fill:#ffffff;fill-opacity:1;stroke:none">
+ <path
+ d="m 400.67581,629.79609 7.68167,-1.91575 -0.92851,91.20792 -7.79574,1.32426 1.04258,-90.61643 z"
+ id="path2292"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 421.10266,657.01757 6.75064,-2.9867 -0.86808,65.39931 -6.49779,1.33915 0.61523,-63.75176 z m -1.26059,-23.58316 5.47167,-4.41533 4.42261,4.99952 -5.47558,4.53221 -4.4187,-5.1164 z"
+ id="path2294"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 440.44273,655.82614 7.67755,-1.56201 -0.1573,13.6722 c -0.007,0.58717 4.4194,-15.27364 24.68502,-14.92094 19.67986,0.10952 22.68401,15.34634 22.5291,18.76237 l -0.43759,48.0783 -6.73044,1.45631 0.63316,-47.489 c 0.0974,-1.38684 -2.88144,-13.11441 -16.78906,-13.15754 -13.90509,-0.0404 -23.68364,10.10048 -23.75821,16.57937 l -0.48127,41.83477 -7.80388,2.0313 0.63292,-65.28513 z"
+ id="path2296"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 561.53301,720.20203 -7.6776,1.56186 0.15737,-13.67198 c 0.007,-0.58742 -4.42201,15.27361 -24.68504,14.92086 -19.67983,-0.10944 -22.68399,-15.34626 -22.52908,-18.76229 l 0.43757,-48.07861 8.15674,-1.64226 -0.54644,47.48988 c -0.0149,1.29682 1.36845,13.29979 15.27604,13.3426 13.90511,0.0405 23.76622,-8.37359 24.01453,-21.04416 l 0.43105,-37.46902 7.5978,-1.93195 -0.63294,65.28507 z"
+ id="path2298"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <path
+ d="m 577.45461,655.28678 -5.42715,4.20017 20.19894,26.93328 -22.39092,31.11622 5.63499,4.226 21.04365,-28.8967 20.8779,29.58159 5.32727,-4.20103 -22.37578,-31.62866 18.56963,-25.5775 -5.53193,-4.73429 -16.92109,23.66778 -19.00551,-24.68686 z"
+ id="path2300"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ </g>
+ <path
+ d="m 105.8125,16.625 c -7.39687,18.135158 -11.858304,29.997682 -20.09375,47.59375 5.04936,5.35232 11.247211,11.585364 21.3125,18.625 C 96.210077,78.390904 88.828713,73.920352 83.3125,69.28125 72.7727,91.274163 56.259864,122.60209 22.75,182.8125 49.087628,167.60733 69.504089,158.23318 88.53125,154.65625 87.714216,151.1422 87.2497,147.34107 87.28125,143.375 l 0.03125,-0.84375 c 0.417917,-16.87382 9.195665,-29.84979 19.59375,-28.96875 10.39809,0.88104 18.48041,15.28242 18.0625,32.15625 -0.0786,3.17512 -0.43674,6.22955 -1.0625,9.0625 18.82058,3.68164 39.01873,13.03179 65,28.03125 -5.123,-9.4318 -9.69572,-17.93388 -14.0625,-26.03125 -6.87839,-5.33121 -14.05289,-12.2698 -28.6875,-19.78125 10.05899,2.61375 17.2611,5.62932 22.875,9 C 124.63297,63.338161 121.03766,52.354109 105.8125,16.625 z"
+ transform="matrix(1.1433333,0,0,1.1433333,22.920168,121.64318)"
+ id="path2518"
+ style="fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:none" />
+ <g
+ id="text2634"
+ style="font-size:8.44138241px;font-style:normal;font-weight:normal;fill:#ffffff;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 685.46692,263.83624 0,-5.32944 -1.99082,0 0,-0.71307 4.7895,0 0,0.71307 -1.99906,0 0,5.32944 -0.79962,0"
+ id="path7858"
+ style="fill:#ffffff" />
+ <path
+ d="m 689.0982,263.83624 0,-6.04251 1.20355,0 1.43026,4.2784 c 0.13189,0.39843 0.22806,0.69658 0.28852,0.89442 0.0687,-0.21983 0.17586,-0.5427 0.3215,-0.96862 l 1.44674,-4.2042 1.07578,0 0,6.04251 -0.77077,0 0,-5.05741 -1.75587,5.05741 -0.72131,0 -1.74763,-5.14396 0,5.14396 -0.77077,0"
+ id="path7860"
+ style="fill:#ffffff" />
+ </g>
+ <g
+ id="text2638"
+ style="font-size:8.25130367px;font-style:normal;font-weight:normal;fill:#ffffff;fill-opacity:1;stroke:none;font-family:DejaVu Sans Mono">
+ <path
+ d="m 239.84053,313.69965 0,-5.20945 -1.94598,0 0,-0.697 4.68164,0 0,0.697 -1.95404,0 0,5.20945 -0.78162,0"
+ id="path7853"
+ style="fill:#ffffff" />
+ <path
+ d="m 243.39004,313.69965 0,-5.90645 1.17646,0 1.39805,4.18205 c 0.12892,0.38947 0.22293,0.6809 0.28202,0.87429 0.0671,-0.21488 0.1719,-0.53048 0.31426,-0.94681 l 1.41417,-4.10953 1.05155,0 0,5.90645 -0.75341,0 0,-4.94353 -1.71634,4.94353 -0.70506,0 -1.70828,-5.02814 0,5.02814 -0.75342,0"
+ id="path7855"
+ style="fill:#ffffff" />
+ </g>
+ </g>
+ </g>
+</svg>
diff --git a/media/new.png b/media/new.png
new file mode 100644
index 00000000..6a9bf037
--- /dev/null
+++ b/media/new.png
Binary files differ
diff --git a/media/nosort.gif b/media/nosort.gif
new file mode 100644
index 00000000..fac668fc
--- /dev/null
+++ b/media/nosort.gif
Binary files differ
diff --git a/media/rss.png b/media/rss.png
new file mode 100644
index 00000000..c9164592
--- /dev/null
+++ b/media/rss.png
Binary files differ
diff --git a/media/sevenl_button.png b/media/sevenl_button.png
new file mode 100644
index 00000000..93adcdf0
--- /dev/null
+++ b/media/sevenl_button.png
Binary files differ
diff --git a/media/silhouette.png b/media/silhouette.png
new file mode 100644
index 00000000..afa87cd1
--- /dev/null
+++ b/media/silhouette.png
Binary files differ
diff --git a/media/vnet_button.png b/media/vnet_button.png
new file mode 100644
index 00000000..22cfa9e4
--- /dev/null
+++ b/media/vnet_button.png
Binary files differ
diff --git a/repos/community.db b/repos/community.db
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/community.db
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/community.db.1 b/repos/community.db.1
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/community.db.1
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/community.db.2 b/repos/community.db.2
new file mode 100644
index 00000000..a9308daa
--- /dev/null
+++ b/repos/community.db.2
Binary files differ
diff --git a/repos/core.db b/repos/core.db
new file mode 100644
index 00000000..6aa35b96
--- /dev/null
+++ b/repos/core.db
Binary files differ
diff --git a/repos/core.db.1 b/repos/core.db.1
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/core.db.1
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/core.db.2 b/repos/core.db.2
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/core.db.2
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/extra.db b/repos/extra.db
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/extra.db
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/extra.db.1 b/repos/extra.db.1
new file mode 100644
index 00000000..6fa6be72
--- /dev/null
+++ b/repos/extra.db.1
Binary files differ
diff --git a/repos/extra.db.2 b/repos/extra.db.2
new file mode 100644
index 00000000..bbf8f6b8
--- /dev/null
+++ b/repos/extra.db.2
@@ -0,0 +1,130 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="es-ES">
+
+ <head profile="http://gmpg.org/xfn/11">
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+
+ <title>Parabola GNU/Linux</title>
+
+ <link rel="stylesheet" href="http://www.parabolagnulinux.org/wp-content/themes/parabola/style.css" type="text/css" media="screen" />
+ <link rel="pingback" href="http://www.parabolagnulinux.org/xmlrpc.php" />
+
+ <style type="text/css" media="screen">
+
+ </style>
+
+
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; Feed" href="http://www.parabolagnulinux.org/?feed=rss2" />
+ <link rel="alternate" type="application/rss+xml" title="Parabola GNU/Linux &raquo; RSS de los comentarios" href="http://www.parabolagnulinux.org/?feed=comments-rss2" />
+ <link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.parabolagnulinux.org/xmlrpc.php?rsd" />
+ <link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.parabolagnulinux.org/wp-includes/wlwmanifest.xml" />
+ <link rel='index' title='Parabola GNU/Linux' href='http://www.parabolagnulinux.org' />
+ </head>
+ <body class="home blog">
+ <div id="page">
+
+
+ <div id="header" role="banner">
+ <div id="headerimg">
+ <h1 id="parabolatitle">
+ <a href="http://www.parabolagnulinux.org/">Parabola GNU/Linux</a>
+ </h1>
+ </div>
+ <div id="main_nav">
+ <ul>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/?page_id=3" title="About">About</a>
+ </li>
+ <li class="page_item">
+ <a href="http://wiki.parabolagnulinux.org/" title="Wiki">Wiki</a>
+ </li>
+ <li class="current_page_item page_item">
+ <a href="http://repo.parabolagnulinux.org/" title="Repository">Repository</a>
+ <li>
+
+ </li>
+ <li class="page_item">
+ <a href="http://www.parabolagnulinux.org/" title="Home">Home</a>
+ </li>
+ </ul>
+</div>
+
+<div id="submenu">
+ <ul>
+ <li class='page_item'><a href='http://repo.parabolagnulinux.org/core/os'>core</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/extra/os'>extra</a></li><li class='page_item'><a href='http://repo.parabolagnulinux.org/community/os'>community</a></li> </ul>
+</div>
+
+<div class="clr">
+</div>
+ </div>
+ <hr/>
+ <div>
+ <!-- Sidebar -->
+ <div id="sidebar" role="complementary">
+ <ul>
+ <li>
+ <img align="left" style="margin-right: 10px;" src="http://www.parabolagnulinux.org/files/multilingual.jpg"></img>
+ <div>
+ <p>
+ <span lang="en">
+ <b>This web is available in: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ English</a> and <a href="http://www.parabolagnulinux.org/lang/es">Spanish</a>.
+ </span>
+ <span lang="es">
+ <b>Esta pagina esta disponible en: </b><a href="http://www.parabolagnulinux.org/lang/en">
+ ingles</a> y <a href="http://www.parabolagnulinux.org/lang/es">español</a>.
+ </li>
+
+ <li id="text-3" class="widget widget_text"> <div class="textwidget"><p>Parabola GNU/Linux is a non-profit project that depends on supporters that see in this distribution an important contribution to free software.</p>
+ <center>
+ <p>
+<img src="http://parabolagnulinux.org/files/venenux.png" />
+ </p>
+ <p><a href="http://www.venenux.org/">Venenux GNU/Linux</a> host our main software repository and web site.</p>
+
+ <p>
+<img src="http://parabolagnulinux.org/files/gnuchile.jpg" />
+ </p>
+ <p><a href="http://www.gnuchile.cl/">GNUCHILE</a> provides our domain.</p>
+ </center></div>
+ </li> </ul>
+ </div>
+
+ <!-- Content -->
+ <div id="content" class="narrowcolumn" role="main">
+ <!-- "About" box -->
+ <div class="box">
+ <table style="width: 100%;" class="repo_menu">
+ <tr>
+ <td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/core/os'><div>core</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/extra/os'><div>extra</div></a></td><td style='width:33.3%; text-align: center;'><a href='http://repo.parabolagnulinux.org/community/os'><div>community</div></a></td> </tr>
+ </table>
+ </div>
+
+ <div style="position: relative; bottom: 5px; text-align: center; width: 100%;">
+ <div id="footer" role="contentinfo">
+ <p>
+ Parabola GNU/Linux is proudly powered by
+ <a href="http://wordpress.org/">WordPress</a>
+ <br /><a href="http://www.parabolagnulinux.org/?feed=rss2">Entries (RSS)</a>
+ and <a href="http://www.parabolagnulinux.org/?feed=comments-rss2">Comments (RSS)</a>.
+ </p>
+ </div>
+
+ <div class="license_block">
+ <div class="wp_license">
+ <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/"><img src="http://i.creativecommons.org/l/by-sa/3.0/88x31.png" alt="Creative Commons Attribution-ShareAlike 3.0 Unported"/></a>
+ <br/>
+ This work by <a xmlns:cc="http://creativecommons.org/ns#" href="http://www.parabolagnulinux.org" property="cc:attributionName" rel="cc:attributionURL">Parabola Team</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 Unported</a>.
+ </div>
+ </div>
+ </div>
+
+ </div>
+ </div>
+ <!-- Footer -->
+ </div>
+
+ <hr />
+
+ </body>
+</html>
diff --git a/repos/i686/core.db.tar.gz b/repos/i686/core.db.tar.gz
new file mode 100644
index 00000000..eebd29d7
--- /dev/null
+++ b/repos/i686/core.db.tar.gz
Binary files differ
diff --git a/repos/x86_64/core.db.tar.gz b/repos/x86_64/core.db.tar.gz
new file mode 100644
index 00000000..cd1710d3
--- /dev/null
+++ b/repos/x86_64/core.db.tar.gz
Binary files differ