fusionner
authorcavaliet
Fri, 08 Apr 2011 16:33:01 +0200
changeset 56 125fc6df230d
parent 55 2e2989c3072c (current diff)
parent 53 0556c56ef5eb (diff)
child 57 795f01224eef
fusionner
src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html
src/ldt/ldt/settings.py
src/ldt/ldt/utils/context_processors.py
virtualenv/res/src/django-piston-0.2.2-modified.tar.gz
virtualenv/res/src/facebook-python-sdk-322930c.tar.gz
virtualenv/res/src/oauth-1.0.1.tar.gz
virtualenv/res/src/pylinkedin-0.3.tar.bz2
virtualenv/res/src/python-openid-2.2.5.tar.gz
virtualenv/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz
virtualenv/setup/res/src/distribute-0.6.14.tar.gz
virtualenv/setup/res/src/mercurial-1.7.5.tar.gz
virtualenv/web/res/lib/patch.py
virtualenv/web/res/patch/piston.diff
virtualenv/web/res/src/Django-1.2.4.tar.gz
virtualenv/web/res/src/Imaging-1.1.7.tar.gz
virtualenv/web/res/src/JCC-2.6-py2.6-win32.egg
virtualenv/web/res/src/MySQL-python-1.2.3.tar.gz
virtualenv/web/res/src/PIL-1.1.7.win32-py2.6.exe
virtualenv/web/res/src/distribute-0.6.14.tar.gz
virtualenv/web/res/src/django-extensions-0.6.tar.gz
virtualenv/web/res/src/django-oauth-plus.tar.gz
virtualenv/web/res/src/django-piston-0.2.2-modified.tar.gz
virtualenv/web/res/src/django-registration.tar.gz
virtualenv/web/res/src/django-tagging-0.3.1.tar.gz
virtualenv/web/res/src/facebook-python-sdk-322930c.tar.gz
virtualenv/web/res/src/httplib2-0.6.0.tar.gz
virtualenv/web/res/src/jogging-0.2.2.tar.gz
virtualenv/web/res/src/lucene-3.0.2-py2.6-win32.egg
virtualenv/web/res/src/lxml-2.2.8-py2.6-win32.egg
virtualenv/web/res/src/lxml-2.2.8.tar.gz
virtualenv/web/res/src/oauth-1.0.1.tar.gz
virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip
virtualenv/web/res/src/psycopg2-2.3.2.tar.gz
virtualenv/web/res/src/pylinkedin-0.3.tar.bz2
virtualenv/web/res/src/pylucene-3.0.3-1-src.tar.gz
virtualenv/web/res/src/python-oauth2-1.2.1-modified.tar.gz
virtualenv/web/res/src/python-openid-2.2.5.tar.gz
virtualenv/web/res/src/setuptools_hg-0.2.tar.gz
virtualenv/web/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz
web/ldtplatform/settings.py
web/ldtplatform/socialauthsettings.py
web/ldtplatform/templates/registration/login.html
web/ldtplatform/urls.py
web/ldtplatform/utils/context_processors.py
web/static/socialauth/css/openid.css
web/static/socialauth/css/socialauth.css
web/static/socialauth/images/Thumbs.db
web/static/socialauth/images/aol.gif
web/static/socialauth/images/blogger.ico
web/static/socialauth/images/claimid.ico
web/static/socialauth/images/facebook.gif
web/static/socialauth/images/flickr.ico
web/static/socialauth/images/google.gif
web/static/socialauth/images/linkedin.jpg
web/static/socialauth/images/livejournal.ico
web/static/socialauth/images/myopenid.ico
web/static/socialauth/images/openid-inputicon.gif
web/static/socialauth/images/openid.gif
web/static/socialauth/images/technorati.ico
web/static/socialauth/images/twitter.png
web/static/socialauth/images/verisign.ico
web/static/socialauth/images/vidoop.ico
web/static/socialauth/images/wordpress.ico
web/static/socialauth/images/yahoo.gif
web/static/socialauth/js/jquery-1.2.6.min.js
web/static/socialauth/js/openid-jquery.js
--- a/.hgignore	Wed Mar 23 17:34:36 2011 +0100
+++ b/.hgignore	Fri Apr 08 16:33:01 2011 +0200
@@ -32,4 +32,10 @@
 syntax: regexp
 ^src/ldt/build$
 syntax: regexp
-^src/ldt/ldt\.egg-info$
\ No newline at end of file
+^src/ldt/ldt\.egg-info$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14\.tar\.gz$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14-py2\.6\.egg$
+syntax: regexp
+^src/ldt/MANIFEST\.in$
\ No newline at end of file
--- a/.pydevproject	Wed Mar 23 17:34:36 2011 +0100
+++ b/.pydevproject	Fri Apr 08 16:33:01 2011 +0200
@@ -1,11 +1,11 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<?eclipse-pydev version="1.0"?>
-
-<pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python_platform</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
-<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/platform/src/ldt</path>
-<path>/platform/web</path>
-</pydev_pathproperty>
-</pydev_project>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?>
+
+<pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python_live</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/platform/src/ldt</path>
+<path>/platform/web</path>
+</pydev_pathproperty>
+</pydev_project>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/ldt/distribute_setup.py	Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,485 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+    from distribute_setup import use_setuptools
+    use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+from distutils import log
+
+try:
+    from site import USER_SITE
+except ImportError:
+    USER_SITE = None
+
+try:
+    import subprocess
+
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        return subprocess.call(args) == 0
+
+except ImportError:
+    # will be used for python 2.3
+    def _python_cmd(*args):
+        args = (sys.executable,) + args
+        # quoting arguments if windows
+        if sys.platform == 'win32':
+            def quote(arg):
+                if ' ' in arg:
+                    return '"%s"' % arg
+                return arg
+            args = [quote(arg) for arg in args]
+        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.14"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # installing
+        log.warn('Installing Distribute')
+        if not _python_cmd('setup.py', 'install'):
+            log.warn('Something went wrong during the installation.')
+            log.warn('See the error message above.')
+    finally:
+        os.chdir(old_wd)
+
+
+def _build_egg(egg, tarball, to_dir):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # building an egg
+        log.warn('Building a Distribute egg in %s', to_dir)
+        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+    finally:
+        os.chdir(old_wd)
+    # returning the result
+    log.warn(egg)
+    if not os.path.exists(egg):
+        raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+                       % (version, sys.version_info[0], sys.version_info[1]))
+    if not os.path.exists(egg):
+        tarball = download_setuptools(version, download_base,
+                                      to_dir, download_delay)
+        _build_egg(egg, tarball, to_dir)
+    sys.path.insert(0, egg)
+    import setuptools
+    setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                   to_dir=os.curdir, download_delay=15, no_fake=True):
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    was_imported = 'pkg_resources' in sys.modules or \
+        'setuptools' in sys.modules
+    try:
+        try:
+            import pkg_resources
+            if not hasattr(pkg_resources, '_distribute'):
+                if not no_fake:
+                    _fake_setuptools()
+                raise ImportError
+        except ImportError:
+            return _do_download(version, download_base, to_dir, download_delay)
+        try:
+            pkg_resources.require("distribute>="+version)
+            return
+        except pkg_resources.VersionConflict:
+            e = sys.exc_info()[1]
+            if was_imported:
+                sys.stderr.write(
+                "The required version of distribute (>=%s) is not available,\n"
+                "and can't be installed while this script is running. Please\n"
+                "install a more recent version first, using\n"
+                "'easy_install -U distribute'."
+                "\n\n(Currently using %r)\n" % (version, e.args[0]))
+                sys.exit(2)
+            else:
+                del pkg_resources, sys.modules['pkg_resources']    # reload ok
+                return _do_download(version, download_base, to_dir,
+                                    download_delay)
+        except pkg_resources.DistributionNotFound:
+            return _do_download(version, download_base, to_dir,
+                                download_delay)
+    finally:
+        if not no_fake:
+            _create_fake_setuptools_pkg_info(to_dir)
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                        to_dir=os.curdir, delay=15):
+    """Download distribute from a specified location and return its filename
+
+    `version` should be a valid distribute version number that is available
+    as an egg for download under the `download_base` URL (which should end
+    with a '/'). `to_dir` is the directory where the egg will be downloaded.
+    `delay` is the number of seconds to pause before an actual download
+    attempt.
+    """
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    try:
+        from urllib.request import urlopen
+    except ImportError:
+        from urllib2 import urlopen
+    tgz_name = "distribute-%s.tar.gz" % version
+    url = download_base + tgz_name
+    saveto = os.path.join(to_dir, tgz_name)
+    src = dst = None
+    if not os.path.exists(saveto):  # Avoid repeated downloads
+        try:
+            log.warn("Downloading %s", url)
+            src = urlopen(url)
+            # Read/write all in one block, so we don't create a corrupt file
+            # if the download is interrupted.
+            data = src.read()
+            dst = open(saveto, "wb")
+            dst.write(data)
+        finally:
+            if src:
+                src.close()
+            if dst:
+                dst.close()
+    return os.path.realpath(saveto)
+
+def _no_sandbox(function):
+    def __no_sandbox(*args, **kw):
+        try:
+            from setuptools.sandbox import DirectorySandbox
+            if not hasattr(DirectorySandbox, '_old'):
+                def violation(*args):
+                    pass
+                DirectorySandbox._old = DirectorySandbox._violation
+                DirectorySandbox._violation = violation
+                patched = True
+            else:
+                patched = False
+        except ImportError:
+            patched = False
+
+        try:
+            return function(*args, **kw)
+        finally:
+            if patched:
+                DirectorySandbox._violation = DirectorySandbox._old
+                del DirectorySandbox._old
+
+    return __no_sandbox
+
+def _patch_file(path, content):
+    """Will backup the file then patch it"""
+    existing_content = open(path).read()
+    if existing_content == content:
+        # already patched
+        log.warn('Already patched.')
+        return False
+    log.warn('Patching...')
+    _rename_path(path)
+    f = open(path, 'w')
+    try:
+        f.write(content)
+    finally:
+        f.close()
+    return True
+
+_patch_file = _no_sandbox(_patch_file)
+
+def _same_content(path, content):
+    return open(path).read() == content
+
+def _rename_path(path):
+    new_name = path + '.OLD.%s' % time.time()
+    log.warn('Renaming %s into %s', path, new_name)
+    os.rename(path, new_name)
+    return new_name
+
+def _remove_flat_installation(placeholder):
+    if not os.path.isdir(placeholder):
+        log.warn('Unkown installation at %s', placeholder)
+        return False
+    found = False
+    for file in os.listdir(placeholder):
+        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+            found = True
+            break
+    if not found:
+        log.warn('Could not locate setuptools*.egg-info')
+        return
+
+    log.warn('Removing elements out of the way...')
+    pkg_info = os.path.join(placeholder, file)
+    if os.path.isdir(pkg_info):
+        patched = _patch_egg_dir(pkg_info)
+    else:
+        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+    if not patched:
+        log.warn('%s already patched.', pkg_info)
+        return False
+    # now let's move the files out of the way
+    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+        element = os.path.join(placeholder, element)
+        if os.path.exists(element):
+            _rename_path(element)
+        else:
+            log.warn('Could not find the %s element of the '
+                     'Setuptools distribution', element)
+    return True
+
+_remove_flat_installation = _no_sandbox(_remove_flat_installation)
+
+def _after_install(dist):
+    log.warn('After install bootstrap.')
+    placeholder = dist.get_command_obj('install').install_purelib
+    _create_fake_setuptools_pkg_info(placeholder)
+
+def _create_fake_setuptools_pkg_info(placeholder):
+    if not placeholder or not os.path.exists(placeholder):
+        log.warn('Could not find the install location')
+        return
+    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+            (SETUPTOOLS_FAKED_VERSION, pyver)
+    pkg_info = os.path.join(placeholder, setuptools_file)
+    if os.path.exists(pkg_info):
+        log.warn('%s already exists', pkg_info)
+        return
+
+    log.warn('Creating %s', pkg_info)
+    f = open(pkg_info, 'w')
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+
+    pth_file = os.path.join(placeholder, 'setuptools.pth')
+    log.warn('Creating %s', pth_file)
+    f = open(pth_file, 'w')
+    try:
+        f.write(os.path.join(os.curdir, setuptools_file))
+    finally:
+        f.close()
+
+_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
+
+def _patch_egg_dir(path):
+    # let's check if it's already patched
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    if os.path.exists(pkg_info):
+        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+            log.warn('%s already patched.', pkg_info)
+            return False
+    _rename_path(path)
+    os.mkdir(path)
+    os.mkdir(os.path.join(path, 'EGG-INFO'))
+    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+    f = open(pkg_info, 'w')
+    try:
+        f.write(SETUPTOOLS_PKG_INFO)
+    finally:
+        f.close()
+    return True
+
+_patch_egg_dir = _no_sandbox(_patch_egg_dir)
+
+def _before_install():
+    log.warn('Before install bootstrap.')
+    _fake_setuptools()
+
+
+def _under_prefix(location):
+    if 'install' not in sys.argv:
+        return True
+    args = sys.argv[sys.argv.index('install')+1:]
+    for index, arg in enumerate(args):
+        for option in ('--root', '--prefix'):
+            if arg.startswith('%s=' % option):
+                top_dir = arg.split('root=')[-1]
+                return location.startswith(top_dir)
+            elif arg == option:
+                if len(args) > index:
+                    top_dir = args[index+1]
+                    return location.startswith(top_dir)
+        if arg == '--user' and USER_SITE is not None:
+            return location.startswith(USER_SITE)
+    return True
+
+
+def _fake_setuptools():
+    log.warn('Scanning installed packages')
+    try:
+        import pkg_resources
+    except ImportError:
+        # we're cool
+        log.warn('Setuptools or Distribute does not seem to be installed.')
+        return
+    ws = pkg_resources.working_set
+    try:
+        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
+                                  replacement=False))
+    except TypeError:
+        # old distribute API
+        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+
+    if setuptools_dist is None:
+        log.warn('No setuptools distribution found')
+        return
+    # detecting if it was already faked
+    setuptools_location = setuptools_dist.location
+    log.warn('Setuptools installation detected at %s', setuptools_location)
+
+    # if --root or --preix was provided, and if
+    # setuptools is not located in them, we don't patch it
+    if not _under_prefix(setuptools_location):
+        log.warn('Not patching, --root or --prefix is installing Distribute'
+                 ' in another location')
+        return
+
+    # let's see if its an egg
+    if not setuptools_location.endswith('.egg'):
+        log.warn('Non-egg installation')
+        res = _remove_flat_installation(setuptools_location)
+        if not res:
+            return
+    else:
+        log.warn('Egg installation')
+        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+        if (os.path.exists(pkg_info) and
+            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+            log.warn('Already patched.')
+            return
+        log.warn('Patching...')
+        # let's create a fake egg replacing setuptools one
+        res = _patch_egg_dir(setuptools_location)
+        if not res:
+            return
+    log.warn('Patched done.')
+    _relaunch()
+
+
+def _relaunch():
+    log.warn('Relaunching...')
+    # we have to relaunch the process
+    # pip marker to avoid a relaunch bug
+    if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
+        sys.argv[0] = 'setup.py'
+    args = [sys.executable] + sys.argv
+    sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+    """Extract all members from the archive to the current working
+       directory and set owner, modification time and permissions on
+       directories afterwards. `path' specifies a different directory
+       to extract to. `members' is optional and must be a subset of the
+       list returned by getmembers().
+    """
+    import copy
+    import operator
+    from tarfile import ExtractError
+    directories = []
+
+    if members is None:
+        members = self
+
+    for tarinfo in members:
+        if tarinfo.isdir():
+            # Extract directories with a safe mode.
+            directories.append(tarinfo)
+            tarinfo = copy.copy(tarinfo)
+            tarinfo.mode = 448 # decimal for oct 0700
+        self.extract(tarinfo, path)
+
+    # Reverse sort directories.
+    if sys.version_info < (2, 4):
+        def sorter(dir1, dir2):
+            return cmp(dir1.name, dir2.name)
+        directories.sort(sorter)
+        directories.reverse()
+    else:
+        directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+    # Set correct owner, mtime and filemode on directories.
+    for tarinfo in directories:
+        dirpath = os.path.join(path, tarinfo.name)
+        try:
+            self.chown(tarinfo, dirpath)
+            self.utime(tarinfo, dirpath)
+            self.chmod(tarinfo, dirpath)
+        except ExtractError:
+            e = sys.exc_info()[1]
+            if self.errorlevel > 1:
+                raise
+            else:
+                self._dbg(1, "tarfile: %s" % e)
+
+
+def main(argv, version=DEFAULT_VERSION):
+    """Install or upgrade setuptools and EasyInstall"""
+    tarball = download_setuptools()
+    _install(tarball)
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])
--- a/src/ldt/ldt/ldt_utils/projectserializer.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/ldt_utils/projectserializer.py	Fri Apr 08 16:33:01 2011 +0200
@@ -147,6 +147,7 @@
                 element_duration = element_node.attrib[u"dur"]
                 element_media = content.iri_id
                 element_color = element_node.attrib[u"color"]
+                element_ldt_src = element_node.attrib.get(u"src", "")
                 
                 element_title = reduce_text_node(element_node, "title/text()")        
                 element_description = reduce_text_node(element_node, "abstract/text()")                
@@ -211,11 +212,15 @@
                     "end": int(element_begin) + int(element_duration),
                     "id": element_id,
                     "media": element_media,
+                    "color": element_color,
                     "content": {
                         "mimetype": "application/x-ldt-structured",
                         "title": element_title,
                         "description": element_description,
                         "color": element_color,
+                        "img": {
+                            "src": element_ldt_src, 
+                        },
                         "audio": {
                             "src" : element_audio_src,
                             "mimetype": "audio/mp3",
--- a/src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html	Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html	Fri Apr 08 16:33:01 2011 +0200
@@ -18,9 +18,9 @@
         <td class="cellimg"><div class="cellimgdiv"><img src="{{LDT_MEDIA_PREFIX}}img/plugin.gif" href="{{WEB_URL}}{{json_url_id}}" id="player_project_{{project.ldt_id}}" class="ldt_link_embed" alt="{% trans 'link json by id' %}" title="{% trans 'link json by id' %}"/></div></td>
         <td class="cellimg">
         {% ifequal project.state 2 %}
-        <img src="{{BASE_URL}}static/admin/img/admin/icon-yes.gif" alt="{% trans 'Project published, click to unpublish' %}" title="{% trans 'Project published, click to unpublish' %}" class="publishedproject" id="project_{{project.ldt_id}}" />
+        <img src="{{ADMIN_MEDIA_PREFIX}}img/admin/icon-yes.gif" alt="{% trans 'Project published, click to unpublish' %}" title="{% trans 'Project published, click to unpublish' %}" class="publishedproject" id="project_{{project.ldt_id}}" />
         {% else %}
-        <img src="{{BASE_URL}}static/admin/img/admin/icon-no.gif" alt="{% trans 'Project not published, click to publish' %}" title="{% trans 'Project not published, click to publish' %}" class="unpublishedproject" id="project_{{project.ldt_id}}" />
+        <img src="{{ADMIN_MEDIA_PREFIX}}img/admin/icon-no.gif" alt="{% trans 'Project not published, click to publish' %}" title="{% trans 'Project not published, click to publish' %}" class="unpublishedproject" id="project_{{project.ldt_id}}" />
         {% endifequal %}
         </td>
         <td class="projecttitle">
Binary file src/ldt/ldt/media/swf/ldt/LignesDeTempsFlex.swf has changed
--- a/src/ldt/ldt/settings.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/settings.py	Fri Apr 08 16:33:01 2011 +0200
@@ -28,6 +28,7 @@
     'ldt.user',
     'ldt.management',
     'oauth_provider',
+    'social_auth',
 )
 
 MIDDLEWARE_CLASSES = (
@@ -41,18 +42,6 @@
     'jogging.middleware.LoggingMiddleware',
 )
 
-TEMPLATE_CONTEXT_PROCESSORS = (
-    "django.core.context_processors.request",
-    "django.core.context_processors.auth",
-    "django.core.context_processors.debug",
-    "django.core.context_processors.i18n",
-    "django.core.context_processors.media",
-    "ldt.utils.context_processors.ldt",
-    "ldt.utils.context_processors.base",
-    "ldt.utils.context_processors.web",
-    "ldt.utils.context_processors.version",
-)
-
 
 
 WEB_URL = getattr(settings, 'WEB_URL', '')
--- a/src/ldt/ldt/utils/context_processors.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/utils/context_processors.py	Fri Apr 08 16:33:01 2011 +0200
@@ -2,15 +2,8 @@
 
 import ldt
 
-def ldt(request):
-    return {'LDT_MEDIA_PREFIX': settings.LDT_MEDIA_PREFIX }
-
-def base(request):
-    return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL }
+def ldtcontext(request):
+    return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL, 'TC1': 'TC1', \
+            'LDT_MEDIA_PREFIX': settings.LDT_MEDIA_PREFIX, 'WEB_URL': settings.WEB_URL, \
+             'ADMIN_MEDIA_PREFIX': settings.ADMIN_MEDIA_PREFIX, 'VERSION': ldt.get_version() }
 
-def web(request):
-    return {'WEB_URL': settings.WEB_URL }
-
-def version(request):
-    return {'VERSION': ldt.get_version() }
-
--- a/src/ldt/setup.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/setup.py	Fri Apr 08 16:33:01 2011 +0200
@@ -1,5 +1,9 @@
 import os
-from setuptools import setup, find_packages
+
+from distribute_setup import use_setuptools
+use_setuptools()
+
+from setuptools import setup
 
 ROOT_DIR = os.path.dirname(__file__)
 SOURCE_DIR = os.path.join(ROOT_DIR, 'ldt')
@@ -43,13 +47,22 @@
             new_data_files.extend([os.path.join(ldirpath[len(base_path):], f) for f in lfilenames])
         data_files.setdefault(key,[]).extend(new_data_files)
 
+#write MANIFEST.in
+
+m = open("MANIFEST.in", "w")
+
+m.write("exclude MANIFEST.in\n")
+for key,file_list in data_files.iteritems():
+    for filename in file_list:
+        m.write("include %s/%s\n" % (key.replace(".","/"), filename))
+m.close()
 
 setup(
     name='ldt',
     version=version,
     author='Yves-Marie Haussonne (IRI)',
     author_email='contact@iri.centrepompidou.fr',
-    packages = find_packages(),
+    packages = packages,
     package_data = data_files,
     scripts=[],
     url='https://www.iri.centrepompidou.fr/dev/hg/platform',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/lib_create_env.py	Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,292 @@
+import sys
+import os
+import os.path
+import shutil
+import tarfile
+import zipfile
+import urllib
+import platform
+import patch
+
+join = os.path.join
+system_str = platform.system()
+
+
+URLS = {
+    'DISTRIBUTE': {'setup': 'distribute', 'url':'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local':"distribute-0.6.14.tar.gz"},
+    'DJANGO': {'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local':"Django-1.2.4.tar.gz"},
+    'JOGGING': {'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local':"jogging-0.2.2.tar.gz"},
+    'DJANGO-EXTENSIONS': { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':"django-extensions-0.6.tar.gz"},
+    'DJANGO-REGISTRATION': { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':"django-registration.tar.gz"},
+    'DJANGO-TAGGING': { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':"django-tagging-0.3.1.tar.gz"},
+    'DJANGO-PISTON': { 'setup': 'django-piston', 'url':"django-piston-0.2.2-modified.tar.gz", 'local':"django-piston-0.2.2-modified.tar.gz"},
+    'OAUTH2': { 'setup': 'python-oauth2', 'url':"python-oauth2-1.2.1-modified.tar.gz", 'local':"python-oauth2-1.2.1-modified.tar.gz"},
+    'HTTPLIB2': { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':"httplib2-0.6.0.tar.gz"},
+    'DJANGO-OAUTH-PLUS': { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':"django-oauth-plus.tar.gz"},
+    'MYSQL': { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local':"MySQL-python-1.2.3.tar.gz"},
+    'SETUPTOOLS-HG': { 'setup':'mercurial_hg', 'url':'http://pypi.python.org/packages/source/s/setuptools_hg/setuptools_hg-0.2.tar.gz', 'local':"setuptools_hg-0.2.tar.gz"},
+    'MERCURIAL': {'setup':'mercurial', 'url':'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local':"mercurial-1.7.5.tar.gz"}
+}
+
+if system_str == 'Windows':
+    URLS.update({
+        'PSYCOPG2': {'setup': 'psycopg2','url': 'psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local':"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip"},
+        'JCC': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local':"JCC-2.6-py2.6-win32.egg"},
+        'PYLUCENE': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local':"lucene-3.0.2-py2.6-win32.egg"},
+        'PIL': {'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local':"PIL-1.1.7.win32-py2.6.exe"},
+        'LXML': {'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local':"lxml-2.2.8-py2.6-win32.egg"}
+    })
+else:
+    URLS.update({
+        'PSYCOPG2': {'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local':"psycopg2-2.3.2.tar.gz"},
+        'PYLUCENE': {'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local':"pylucene-3.0.3-1-src.tar.gz"},
+        'PIL': {'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local':"Imaging-1.1.7.tar.gz"},
+        'LXML': {'setup': 'lxml', 'url':"lxml_2.2.8.tar.gz", 'local':"lxml-2.2.8.tar.gz"}
+    })
+
+
+
+class ResourcesEnv(object):
+
+    def __init__(self, src_base, urls, normal_installs):
+        self.src_base = src_base
+        self.URLS = {}
+        self.__init_url(urls)
+        self.NORMAL_INSTALL = normal_installs
+
+    def get_src_base_path(self, fpath):
+        return os.path.abspath(os.path.join(self.src_base, fpath)).replace("\\","/")
+    
+    def __add_package_def(self, key, setup, url, local):
+        self.URLS[key] = {'setup':setup, 'url':url, 'local':self.get_src_base_path(local)}
+
+    def __init_url(self, urls):
+        for key, url_dict in urls.items():
+            url = url_dict['url']
+            if not url.startswith("http://"):
+                url = self.get_src_base_path(url)
+            self.__add_package_def(key, url_dict["setup"], url, url_dict["local"])
+
+def ensure_dir(dir, logger):
+    if not os.path.exists(dir):
+        logger.notify('Creating directory %s' % dir)
+        os.makedirs(dir)
+
+def extend_parser(parser):    
+    parser.add_option(
+        '--index-url',
+        metavar='INDEX_URL',
+        dest='index_url',
+        default='http://pypi.python.org/simple/',
+        help='base URL of Python Package Index')
+    parser.add_option(
+        '--type-install',
+        metavar='type_install',
+        dest='type_install',
+        default='local',
+        help='type install : local, url, setup')
+    parser.add_option(
+        '--ignore-packages',
+        metavar='ignore_packages',
+        dest='ignore_packages',
+        default=None,
+        help='list of comma separated keys for package to ignore')
+
+def adjust_options(options, args):
+    pass
+
+
+def install_pylucene(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+    
+    logger.notify("Get Pylucene from %s " % res_env.URLS['PYLUCENE'][res_source_key])
+    pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
+    if res_source_key == 'local':
+        shutil.copy(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+    else:
+        urllib.urlretrieve(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+    tf = tarfile.open(pylucene_src,'r:gz')
+    pylucene_base_path = os.path.join(src_dir,"pylucene") 
+    logger.notify("Extract Pylucene to %s " % pylucene_base_path)
+    tf.extractall(pylucene_base_path)
+    tf.close()
+    
+    pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
+    jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
+    
+    #install jcc
+
+    #patch for linux
+    if system_str == 'Linux' :
+        olddir = os.getcwd()
+        patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
+        if os.path.isfile(patch_dest_path):
+            # must unzip egg
+            # rename file and etract all
+            shutil.move(patch_dest_path, patch_dest_path + ".zip")
+            zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
+            zf.extractall(patch_dest_path)
+            os.remove(patch_dest_path + ".zip")
+        logger.notify("Patch jcc : %s " % (patch_dest_path))
+        os.chdir(patch_dest_path)
+        p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
+        p.apply()
+        os.chdir(olddir)
+
+    logger.notify("Install jcc")
+    call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
+                    cwd=jcc_src_path,
+                    filter_stdout=filter_python_develop,
+                    show_stdout=True)
+    #install pylucene
+    
+    logger.notify("Install pylucene")
+    #modify makefile
+    makefile_path = os.path.join(pylucene_src_path,"Makefile")
+    logger.notify("Modify makefile %s " % makefile_path)
+    shutil.move( makefile_path, makefile_path+"~" )
+
+    destination= open( makefile_path, "w" )
+    source= open( makefile_path+"~", "r" )
+    destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\n")
+    destination.write("ANT=ant\n")
+    destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\n")
+    
+    if system_str == "Darwin":
+        if sys.version_info >= (2,6):
+            destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+        else:
+            destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\n")
+        destination.write("NUM_FILES=2\n")
+    elif system_str == "Windows":
+        destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+        destination.write("NUM_FILES=2\n")
+    else:
+        if sys.version_info >= (2,6) and sys.version_info <= (2,7):
+            destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\n")
+        else:
+            destination.write("JCC=$(PYTHON) -m jcc --shared\n")
+        destination.write("NUM_FILES=2\n")
+    for line in source:
+        destination.write( line )
+    source.close()
+    destination.close()
+    os.remove(makefile_path+"~" )
+
+    logger.notify("pylucene make")
+    call_subprocess(['make'],
+                    cwd=os.path.abspath(pylucene_src_path),
+                    filter_stdout=filter_python_develop,
+                    show_stdout=True)
+
+    logger.notify("pylucene make install")
+    call_subprocess(['make', 'install'],
+                    cwd=os.path.abspath(pylucene_src_path),
+                    filter_stdout=filter_python_develop,
+                    show_stdout=True)
+    
+
+def install_psycopg2(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+    psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
+    shutil.copy(res_env.URLS['PSYCOPG2'][res_source_key], psycopg2_src)
+    #extract psycopg2
+    zf = zipfile.ZipFile(psycopg2_src)
+    psycopg2_base_path = os.path.join(src_dir,"psycopg2")
+    zf.extractall(psycopg2_base_path)
+    zf.close()
+    
+    psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
+    shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
+    shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
+
+
+
+def lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, normal_installs, urls=None):
+    
+    all_urls = URLS.copy()
+    if urls is not None:
+        all_urls.update(urls)
+        
+    res_env = ResourcesEnv(src_base, all_urls, normal_installs)
+
+    def filter_python_develop(line):
+        if not line.strip():
+            return Logger.DEBUG
+        for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
+                       'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
+                       'creating ', 'Copying ']:
+            if line.startswith(prefix):
+                return Logger.DEBUG
+        return Logger.NOTIFY
+    
+    
+    def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess):
+        logger.notify("Install %s from %s with %s" % (key,res_env.URLS[key][res_source_key],method))
+        if method == 'pip':
+            if sys.platform == 'win32':
+                args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+            else:
+                args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+            if option_str :
+                args.insert(4,option_str)
+            call_subprocess(args,
+                    cwd=os.path.abspath(tmp_dir),
+                    filter_stdout=filter_python_develop,
+                    show_stdout=True,
+                    extra_env=extra_env)
+        else:
+            if sys.platform == 'win32':
+                args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), res_env.URLS[key][res_source_key]]
+            else:
+                args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), res_env.URLS[key][res_source_key]]
+            if option_str :
+                args.insert(1,option_str)
+            call_subprocess(args,
+                    cwd=os.path.abspath(tmp_dir),
+                    filter_stdout=filter_python_develop,
+                    show_stdout=True,
+                    extra_env=extra_env)            
+ 
+    
+    def after_install(options, home_dir):
+        
+        global logger
+        
+        verbosity = options.verbose - options.quiet
+        logger = Logger([(Logger.level_for_integer(2-verbosity), sys.stdout)])
+
+        
+        home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+        base_dir = os.path.dirname(home_dir)
+        src_dir = os.path.join(home_dir, 'src')
+        tmp_dir = os.path.join(home_dir, 'tmp')
+        ensure_dir(src_dir, logger)
+        ensure_dir(tmp_dir, logger)
+        system_str = platform.system()
+        
+        res_source_key = options.type_install
+        
+        ignore_packages = []
+        
+        if options.ignore_packages :
+            ignore_packages = options.ignore_packages.split(",")
+        
+        logger.indent += 2
+        try:    
+            for key, method, option_str, extra_env in res_env.NORMAL_INSTALL:
+                if key not in ignore_packages:
+                    if callable(method):
+                        method(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop)
+                    else:
+                        normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess)
+                            
+            logger.notify("Clear source dir")
+            shutil.rmtree(src_dir)
+    
+        finally:
+            logger.indent -= 2
+        script_dir = join(base_dir, bin_dir)
+        logger.notify('Run "%s Package" to install new packages that provide builds'
+                      % join(script_dir, 'easy_install'))
+    
+
+    return adjust_options, extend_parser, after_install
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/patch.py	Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,589 @@
+""" Patch utility to apply unified diffs
+
+    Brute-force line-by-line non-recursive parsing 
+
+    Copyright (c) 2008-2010 anatoly techtonik
+    Available under the terms of MIT license
+
+    Project home: http://code.google.com/p/python-patch/
+
+
+    $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
+    $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+"""
+
+__author__ = "techtonik.rainforce.org"
+__version__ = "10.04"
+
+import copy
+import logging
+import re
+# cStringIO doesn't support unicode in 2.5
+from StringIO import StringIO
+from logging import debug, info, warning
+
+from os.path import exists, isfile, abspath
+from os import unlink
+
+
+#------------------------------------------------
+# Logging is controlled by "python_patch" logger
+
+debugmode = False
+
+logger = logging.getLogger("python_patch")
+loghandler = logging.StreamHandler()
+logger.addHandler(loghandler)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+#: disable library logging by default
+logger.setLevel(logging.CRITICAL)
+
+#------------------------------------------------
+
+
+def fromfile(filename):
+  """ Parse patch file and return Patch() object
+  """
+
+  info("reading patch from file %s" % filename)
+  fp = open(filename, "rb")
+  patch = Patch(fp)
+  fp.close()
+  return patch
+
+
+def fromstring(s):
+  """ Parse text string and return Patch() object
+  """
+
+  return Patch(
+           StringIO.StringIO(s)    
+         )
+
+
+
+class HunkInfo(object):
+  """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+  def __init__(self):
+    self.startsrc=None #: line count starts with 1
+    self.linessrc=None
+    self.starttgt=None
+    self.linestgt=None
+    self.invalid=False
+    self.text=[]
+
+  def copy(self):
+    return copy.copy(self)
+
+#  def apply(self, estream):
+#    """ write hunk data into enumerable stream
+#        return strings one by one until hunk is
+#        over
+#
+#        enumerable stream are tuples (lineno, line)
+#        where lineno starts with 0
+#    """
+#    pass
+
+
+
+class Patch(object):
+
+  def __init__(self, stream=None):
+
+    # define Patch data members
+    # table with a row for every source file
+
+    #: list of source filenames
+    self.source=None
+    self.target=None
+    #: list of lists of hunks
+    self.hunks=None
+    #: file endings statistics for every hunk
+    self.hunkends=None
+
+    if stream:
+      self.parse(stream)
+
+  def copy(self):
+    return copy.copy(self)
+
+  def parse(self, stream):
+    """ parse unified diff """
+    self.source = []
+    self.target = []
+    self.hunks = []
+    self.hunkends = []
+
+    # define possible file regions that will direct the parser flow
+    header = False    # comments before the patch body
+    filenames = False # lines starting with --- and +++
+
+    hunkhead = False  # @@ -R +R @@ sequence
+    hunkbody = False  #
+    hunkskip = False  # skipping invalid hunk mode
+
+    header = True
+    lineends = dict(lf=0, crlf=0, cr=0)
+    nextfileno = 0
+    nexthunkno = 0    #: even if index starts with 0 user messages number hunks from 1
+
+    # hunkinfo holds parsed values, hunkactual - calculated
+    hunkinfo = HunkInfo()
+    hunkactual = dict(linessrc=None, linestgt=None)
+
+    fe = enumerate(stream)
+    for lineno, line in fe:
+
+      # analyze state
+      if header and line.startswith("--- "):
+        header = False
+        # switch to filenames state
+        filenames = True
+      #: skip hunkskip and hunkbody code until you read definition of hunkhead
+      if hunkbody:
+        # process line first
+        if re.match(r"^[- \+\\]", line):
+            # gather stats about line endings
+            if line.endswith("\r\n"):
+              self.hunkends[nextfileno-1]["crlf"] += 1
+            elif line.endswith("\n"):
+              self.hunkends[nextfileno-1]["lf"] += 1
+            elif line.endswith("\r"):
+              self.hunkends[nextfileno-1]["cr"] += 1
+              
+            if line.startswith("-"):
+              hunkactual["linessrc"] += 1
+            elif line.startswith("+"):
+              hunkactual["linestgt"] += 1
+            elif not line.startswith("\\"):
+              hunkactual["linessrc"] += 1
+              hunkactual["linestgt"] += 1
+            hunkinfo.text.append(line)
+            # todo: handle \ No newline cases
+        else:
+            warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+            # add hunk status node
+            self.hunks[nextfileno-1].append(hunkinfo.copy())
+            self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+            # switch to hunkskip state
+            hunkbody = False
+            hunkskip = True
+
+        # check exit conditions
+        if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
+            warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+            # add hunk status node
+            self.hunks[nextfileno-1].append(hunkinfo.copy())
+            self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+            # switch to hunkskip state
+            hunkbody = False
+            hunkskip = True
+        elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
+            self.hunks[nextfileno-1].append(hunkinfo.copy())
+            # switch to hunkskip state
+            hunkbody = False
+            hunkskip = True
+
+            # detect mixed window/unix line ends
+            ends = self.hunkends[nextfileno-1]
+            if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
+              warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
+            if debugmode:
+              debuglines = dict(ends)
+              debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
+              debug("crlf: %(crlf)d  lf: %(lf)d  cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+
+      if hunkskip:
+        match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+        if match:
+          # switch to hunkhead state
+          hunkskip = False
+          hunkhead = True
+        elif line.startswith("--- "):
+          # switch to filenames state
+          hunkskip = False
+          filenames = True
+          if debugmode and len(self.source) > 0:
+            debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+      if filenames:
+        if line.startswith("--- "):
+          if nextfileno in self.source:
+            warning("skipping invalid patch for %s" % self.source[nextfileno])
+            del self.source[nextfileno]
+            # double source filename line is encountered
+            # attempt to restart from this second line
+          re_filename = "^--- ([^\t]+)"
+          match = re.match(re_filename, line)
+          # todo: support spaces in filenames
+          if match:
+            self.source.append(match.group(1).strip())
+          else:
+            warning("skipping invalid filename at line %d" % lineno)
+            # switch back to header state
+            filenames = False
+            header = True
+        elif not line.startswith("+++ "):
+          if nextfileno in self.source:
+            warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
+            del self.source[nextfileno]
+          else:
+            # this should be unreachable
+            warning("skipping invalid target patch")
+          filenames = False
+          header = True
+        else:
+          if nextfileno in self.target:
+            warning("skipping invalid patch - double target at line %d" % lineno)
+            del self.source[nextfileno]
+            del self.target[nextfileno]
+            nextfileno -= 1
+            # double target filename line is encountered
+            # switch back to header state
+            filenames = False
+            header = True
+          else:
+            re_filename = "^\+\+\+ ([^\t]+)"
+            match = re.match(re_filename, line)
+            if not match:
+              warning("skipping invalid patch - no target filename at line %d" % lineno)
+              # switch back to header state
+              filenames = False
+              header = True
+            else:
+              self.target.append(match.group(1).strip())
+              nextfileno += 1
+              # switch to hunkhead state
+              filenames = False
+              hunkhead = True
+              nexthunkno = 0
+              self.hunks.append([])
+              self.hunkends.append(lineends.copy())
+              continue
+
+      if hunkhead:
+        match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+        if not match:
+          if nextfileno-1 not in self.hunks:
+            warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
+            # switch to header state
+            hunkhead = False
+            header = True
+            continue
+          else:
+            # switch to header state
+            hunkhead = False
+            header = True
+        else:
+          hunkinfo.startsrc = int(match.group(1))
+          hunkinfo.linessrc = 1
+          if match.group(3): hunkinfo.linessrc = int(match.group(3))
+          hunkinfo.starttgt = int(match.group(4))
+          hunkinfo.linestgt = 1
+          if match.group(6): hunkinfo.linestgt = int(match.group(6))
+          hunkinfo.invalid = False
+          hunkinfo.text = []
+
+          hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+          # switch to hunkbody state
+          hunkhead = False
+          hunkbody = True
+          nexthunkno += 1
+          continue
+    else:
+      if not hunkskip:
+        warning("patch file incomplete - %s" % filename)
+        # sys.exit(?)
+      else:
+        # duplicated message when an eof is reached
+        if debugmode and len(self.source) > 0:
+            debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+    info("total files: %d  total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
+
+
+  def apply(self):
+    """ apply parsed patch """
+
+    total = len(self.source)
+    for fileno, filename in enumerate(self.source):
+
+      f2patch = filename
+      if not exists(f2patch):
+        f2patch = self.target[fileno]
+        if not exists(f2patch):
+          warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
+          continue
+      if not isfile(f2patch):
+        warning("not a file - %s" % f2patch)
+        continue
+      filename = f2patch
+
+      info("processing %d/%d:\t %s" % (fileno+1, total, filename))
+
+      # validate before patching
+      f2fp = open(filename)
+      hunkno = 0
+      hunk = self.hunks[fileno][hunkno]
+      hunkfind = []
+      hunkreplace = []
+      validhunks = 0
+      canpatch = False
+      for lineno, line in enumerate(f2fp):
+        if lineno+1 < hunk.startsrc:
+          continue
+        elif lineno+1 == hunk.startsrc:
+          hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+          hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
+          #pprint(hunkreplace)
+          hunklineno = 0
+
+          # todo \ No newline at end of file
+
+        # check hunks in source file
+        if lineno+1 < hunk.startsrc+len(hunkfind)-1:
+          if line.rstrip("\r\n") == hunkfind[hunklineno]:
+            hunklineno+=1
+          else:
+            debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
+            # file may be already patched, but we will check other hunks anyway
+            hunkno += 1
+            if hunkno < len(self.hunks[fileno]):
+              hunk = self.hunks[fileno][hunkno]
+              continue
+            else:
+              break
+
+        # check if processed line is the last line
+        if lineno+1 == hunk.startsrc+len(hunkfind)-1:
+          debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
+          hunkno+=1
+          validhunks+=1
+          if hunkno < len(self.hunks[fileno]):
+            hunk = self.hunks[fileno][hunkno]
+          else:
+            if validhunks == len(self.hunks[fileno]):
+              # patch file
+              canpatch = True
+              break
+      else:
+        if hunkno < len(self.hunks[fileno]):
+          warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
+
+      f2fp.close()
+
+      if validhunks < len(self.hunks[fileno]):
+        if self._match_file_hunks(filename, self.hunks[fileno]):
+          warning("already patched  %s" % filename)
+        else:
+          warning("source file is different - %s" % filename)
+      if canpatch:
+        backupname = filename+".orig"
+        if exists(backupname):
+          warning("can't backup original file to %s - aborting" % backupname)
+        else:
+          import shutil
+          shutil.move(filename, backupname)
+          if self.write_hunks(backupname, filename, self.hunks[fileno]):
+            warning("successfully patched %s" % filename)
+            unlink(backupname)
+          else:
+            warning("error patching file %s" % filename)
+            shutil.copy(filename, filename+".invalid")
+            warning("invalid version is saved to %s" % filename+".invalid")
+            # todo: proper rejects
+            shutil.move(backupname, filename)
+
+    # todo: check for premature eof
+
+
+  def can_patch(self, filename):
+    """ Check if specified filename can be patched. Returns None if file can
+    not be found among source filenames. False if patch can not be applied
+    clearly. True otherwise.
+
+    :returns: True, False or None
+    """
+    idx = self._get_file_idx(filename, source=True)
+    if idx == None:
+      return None
+    return self._match_file_hunks(filename, self.hunks[idx])
+    
+
+  def _match_file_hunks(self, filepath, hunks):
+    matched = True
+    fp = open(abspath(filepath))
+
+    class NoMatch(Exception):
+      pass
+
+    lineno = 1
+    line = fp.readline()
+    hno = None
+    try:
+      for hno, h in enumerate(hunks):
+        # skip to first line of the hunk
+        while lineno < h.starttgt:
+          if not len(line): # eof
+            debug("check failed - premature eof before hunk: %d" % (hno+1))
+            raise NoMatch
+          line = fp.readline()
+          lineno += 1
+        for hline in h.text:
+          if hline.startswith("-"):
+            continue
+          if not len(line):
+            debug("check failed - premature eof on hunk: %d" % (hno+1))
+            # todo: \ No newline at the end of file
+            raise NoMatch
+          if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+            debug("file is not patched - failed hunk: %d" % (hno+1))
+            raise NoMatch
+          line = fp.readline()
+          lineno += 1
+
+    except NoMatch:
+      matched = False
+      # todo: display failed hunk, i.e. expected/found
+
+    fp.close()
+    return matched
+
+
+  def patch_stream(self, instream, hunks):
+    """ Generator that yields stream patched with hunks iterable
+    
+        Converts lineends in hunk lines to the best suitable format
+        autodetected from input
+    """
+
+    # todo: At the moment substituted lineends may not be the same
+    #       at the start and at the end of patching. Also issue a
+    #       warning/throw about mixed lineends (is it really needed?)
+
+    hunks = iter(hunks)
+
+    srclineno = 1
+
+    lineends = {'\n':0, '\r\n':0, '\r':0}
+    def get_line():
+      """
+      local utility function - return line from source stream
+      collecting line end statistics on the way
+      """
+      line = instream.readline()
+        # 'U' mode works only with text files
+      if line.endswith("\r\n"):
+        lineends["\r\n"] += 1
+      elif line.endswith("\n"):
+        lineends["\n"] += 1
+      elif line.endswith("\r"):
+        lineends["\r"] += 1
+      return line
+
+    for hno, h in enumerate(hunks):
+      debug("hunk %d" % (hno+1))
+      # skip to line just before hunk starts
+      while srclineno < h.startsrc:
+        yield get_line()
+        srclineno += 1
+
+      for hline in h.text:
+        # todo: check \ No newline at the end of file
+        if hline.startswith("-") or hline.startswith("\\"):
+          get_line()
+          srclineno += 1
+          continue
+        else:
+          if not hline.startswith("+"):
+            get_line()
+            srclineno += 1
+          line2write = hline[1:]
+          # detect if line ends are consistent in source file
+          if sum([bool(lineends[x]) for x in lineends]) == 1:
+            newline = [x for x in lineends if lineends[x] != 0][0]
+            yield line2write.rstrip("\r\n")+newline
+          else: # newlines are mixed
+            yield line2write
+     
+    for line in instream:
+      yield line
+
+
+  def write_hunks(self, srcname, tgtname, hunks):
+    src = open(srcname, "rb")
+    tgt = open(tgtname, "wb")
+
+    debug("processing target file %s" % tgtname)
+
+    tgt.writelines(self.patch_stream(src, hunks))
+
+    tgt.close()
+    src.close()
+    return True
+  
+
+  def _get_file_idx(self, filename, source=None):
+    """ Detect index of given filename within patch.
+
+        :param filename:
+        :param source: search filename among sources (True),
+                       targets (False), or both (None)
+        :returns: int or None
+    """
+    filename = abspath(filename)
+    if source == True or source == None:
+      for i,fnm in enumerate(self.source):
+        if filename == abspath(fnm):
+          return i  
+    if source == False or source == None:
+      for i,fnm in enumerate(self.target):
+        if filename == abspath(fnm):
+          return i  
+
+
+
+
+from optparse import OptionParser
+from os.path import exists
+import sys
+
+if __name__ == "__main__":
+  opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
+  opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+  (options, args) = opt.parse_args()
+
+  if not args:
+    opt.print_version()
+    opt.print_help()
+    sys.exit()
+  debugmode = options.debugmode
+  patchfile = args[0]
+  if not exists(patchfile) or not isfile(patchfile):
+    sys.exit("patch file does not exist - %s" % patchfile)
+
+
+  if debugmode:
+    loglevel = logging.DEBUG
+    logformat = "%(levelname)8s %(message)s"
+  else:
+    loglevel = logging.INFO
+    logformat = "%(message)s"
+  logger.setLevel(loglevel)
+  loghandler.setFormatter(logging.Formatter(logformat))
+
+
+
+  patch = fromfile(patchfile)
+  #pprint(patch)
+  patch.apply()
+
+  # todo: document and test line ends handling logic - patch.py detects proper line-endings
+  #       for inserted hunks and issues a warning if patched file has incosistent line ends
Binary file virtualenv/res/src/Django-1.2.4.tar.gz has changed
Binary file virtualenv/res/src/Imaging-1.1.7.tar.gz has changed
Binary file virtualenv/res/src/JCC-2.6-py2.6-win32.egg has changed
Binary file virtualenv/res/src/MySQL-python-1.2.3.tar.gz has changed
Binary file virtualenv/res/src/PIL-1.1.7.win32-py2.6.exe has changed
Binary file virtualenv/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/res/src/django-extensions-0.6.tar.gz has changed
Binary file virtualenv/res/src/django-oauth-plus.tar.gz has changed
Binary file virtualenv/res/src/django-piston-0.2.2-modified.tar.gz has changed
Binary file virtualenv/res/src/django-registration.tar.gz has changed
Binary file virtualenv/res/src/django-tagging-0.3.1.tar.gz has changed
Binary file virtualenv/res/src/facebook-python-sdk-322930c.tar.gz has changed
Binary file virtualenv/res/src/httplib2-0.6.0.tar.gz has changed
Binary file virtualenv/res/src/jogging-0.2.2.tar.gz has changed
Binary file virtualenv/res/src/lucene-3.0.2-py2.6-win32.egg has changed
Binary file virtualenv/res/src/lxml-2.2.8-py2.6-win32.egg has changed
Binary file virtualenv/res/src/lxml-2.2.8.tar.gz has changed
Binary file virtualenv/res/src/mercurial-1.7.5.tar.gz has changed
Binary file virtualenv/res/src/oauth-1.0.1.tar.gz has changed
Binary file virtualenv/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
Binary file virtualenv/res/src/psycopg2-2.3.2.tar.gz has changed
Binary file virtualenv/res/src/pylinkedin-0.3.tar.bz2 has changed
Binary file virtualenv/res/src/pylucene-3.0.3-1-src.tar.gz has changed
Binary file virtualenv/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
Binary file virtualenv/res/src/python-openid-2.2.5.tar.gz has changed
Binary file virtualenv/res/src/setuptools_hg-0.2.tar.gz has changed
Binary file virtualenv/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz has changed
--- a/virtualenv/setup/create_python_env.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/setup/create_python_env.py	Fri Apr 08 16:33:01 2011 +0200
@@ -29,137 +29,17 @@
 # - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
 # - pylucene  - script
 
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
-
-EXTRA_TEXT  = "URLS = { \n"
-
-EXTRA_TEXT += "    'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'MERCURIAL' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"mercurial-1.7.5.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
-    parser.add_option(
-        '--index-url',
-        metavar='INDEX_URL',
-        dest='index_url',
-        default='',
-        help='base URL of Python Package Index')
-    parser.add_option(
-        '--type-install',
-        metavar='type_install',
-        dest='type_install',
-        default='local',
-        help='type install : local, url, setup')
-    parser.add_option(
-        '--ignore-packages',
-        metavar='ignore_packages',
-        dest='ignore_packages',
-        default=None,
-        help='list of comma separated keys for package to ignore')
-
-
-
-def adjust_options(options, args):
-    pass
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
+patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
 
 
-def after_install(options, home_dir):
-    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
-    base_dir = os.path.dirname(home_dir)
-    src_dir = join(home_dir, 'src')
-    tmp_dir = join(home_dir, 'tmp')
-    ensure_dir(src_dir)
-    ensure_dir(tmp_dir)
-    system_str = platform.system()
-    
-    res_source_key = options.type_install
-    
-    ignore_packages = []
-    
-    if options.ignore_packages :
-        ignore_packages = options.ignore_packages.split(",")
-    
-    logger.indent += 2
-    try:
-        NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
-            ('DISTRIBUTE', 'pip', None, None),
-            ('MERCURIAL', 'pip', None, None),
-            ]
-
-            
-        for key, method, option_str, extra_env in NORMAL_INSTALL:
-            if key not in ignore_packages:
-                normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-                        
-        logger.notify("Clear source dir")
-        shutil.rmtree(src_dir)
-
-    finally:
-        logger.indent -= 2
-    script_dir = join(base_dir, bin_dir)
-    logger.notify('Run "%s Package" to install new packages that provide builds'
-                  % join(script_dir, 'easy_install'))
-
+EXTRA_TEXT  = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
 
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
-    logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
-    if method == 'pip':
-        if sys.platform == 'win32':
-            args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
-        else:
-            args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
-        if option_str :
-            args.insert(4,option_str)
-        call_subprocess(args,
-                cwd=os.path.abspath(tmp_dir),
-                filter_stdout=filter_python_develop,
-                show_stdout=True,
-                extra_env=extra_env)
-    else:
-        if sys.platform == 'win32':
-            args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
-        else:
-            args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
-        if option_str :
-            args.insert(1,option_str)
-        call_subprocess(args,
-                cwd=os.path.abspath(tmp_dir),
-                filter_stdout=filter_python_develop,
-                show_stdout=True,
-                extra_env=extra_env)
-    
-
-def ensure_dir(dir):
-    if not os.path.exists(dir):
-        logger.notify('Creating directory %s' % dir)
-        os.makedirs(dir)
-
-def filter_python_develop(line):
-    if not line.strip():
-        return Logger.DEBUG
-    for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
-                   'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
-                   'creating ', 'Copying ']:
-        if line.startswith(prefix):
-            return Logger.DEBUG
-    return Logger.NOTIFY
-"""
 
 def main():
     python_version = ".".join(map(str,sys.version_info[0:2]))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/setup/res/res_create_env.py	Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,15 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+INSTALLS = [#(key,method, option_str, dict_extra_env)
+    ('DISTRIBUTE', 'pip', None, None),
+    ('MERCURIAL', 'pip', None, None),
+    ('SETUPTOOLS-HG', 'pip', None, None), 
+]
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):    
+    return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
Binary file virtualenv/setup/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/setup/res/src/mercurial-1.7.5.tar.gz has changed
--- a/virtualenv/web/create_python_env.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/web/create_python_env.py	Fri Apr 08 16:33:01 2011 +0200
@@ -8,7 +8,8 @@
 - distribute
 - psycopg2 requires the PostgreSQL libpq libraries and the pg_config utility
 
-- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local <path_to_venv>
+- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local --ignore-packages=MYSQL <path_to_venv>
+- python project-boot.py --no-site-packages --clear --ignore-packages=MYSQL  --type-install=local <path_to_venv>
 - For Linux :
 python project-boot.py --unzip-setuptools --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local <path_to_venv>
 
@@ -33,293 +34,23 @@
 # - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
 # - pylucene  - script
 
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
 patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
 
-EXTRA_TEXT  = "URLS = { \n"
 
-EXTRA_TEXT += "    'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
-    EXTRA_TEXT += "    'PSYCOPG2' : { 'setup': 'psycopg2','url': 'D:/wakimd/dev/hg/platform/virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip")).replace("\\","/")+"'},\n"
-else:
-    EXTRA_TEXT += "    'PSYCOPG2' : { 'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.3.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'MYSQL' : { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local' : '"+ os.path.abspath(os.path.join(src_base,"MySQL-python-1.2.3.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
-    EXTRA_TEXT += "    'PYLUCENE' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lucene-3.0.2-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-    EXTRA_TEXT += "    'JCC' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"JCC-2.6-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
-    EXTRA_TEXT += "    'PYLUCENE' : { 'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"pylucene-3.0.3-1-src.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
-    EXTRA_TEXT += "    'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local': '"+ os.path.abspath(os.path.join(src_base,"PIL-1.1.7.win32-py2.6.exe")).replace("\\","/")+"'},\n"
-else:
-    EXTRA_TEXT += "    'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"Imaging-1.1.7.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'DJANGO' : { 'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local': '"+ os.path.abspath(os.path.join(src_base,"Django-1.2.4.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'JOGGING' : { 'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local': '"+ os.path.abspath(os.path.join(src_base,"jogging-0.2.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'DJANGO-EXTENSIONS' : { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-extensions-0.6.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'DJANGO-REGISTRATION' : { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-registration.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'DJANGO-TAGGING' : { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-tagging-0.3.1.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'DJANGO-PISTON' : { 'setup': 'django-piston', 'url':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"' },\n"
-if sys.platform == 'win32':
-    EXTRA_TEXT += "    'LXML' : { 'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
-    EXTRA_TEXT += "    'LXML' : { 'setup': 'lxml', 'url': '"+ os.path.abspath(os.path.join(src_base,"lxml_2.2.8.tar.gz"))+"', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "    'SETUPTOOLS-HG' : { 'setup': 'setuptools-hg', 'url':'http://bitbucket.org/jezdez/setuptools_hg/downloads/setuptools_hg-0.2.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"setuptools_hg-0.2.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'OAUTH2' : { 'setup': 'python-oauth2', 'url':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'HTTPLIB2' : { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"httplib2-0.6.0.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "    'DJANGO-OAUTH-PLUS' : { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-oauth-plus.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-import patch
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
-    parser.add_option(
-        '--index-url',
-        metavar='INDEX_URL',
-        dest='index_url',
-        default='',
-        help='base URL of Python Package Index')
-    parser.add_option(
-        '--type-install',
-        metavar='type_install',
-        dest='type_install',
-        default='local',
-        help='type install : local, url, setup')
-    parser.add_option(
-        '--ignore-packages',
-        metavar='ignore_packages',
-        dest='ignore_packages',
-        default=None,
-        help='list of comma separated keys for package to ignore')
+EXTRA_TEXT  = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
 
 
 
-def adjust_options(options, args):
-    pass
-
-
-def after_install(options, home_dir):
-    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
-    base_dir = os.path.dirname(home_dir)
-    src_dir = join(home_dir, 'src')
-    tmp_dir = join(home_dir, 'tmp')
-    ensure_dir(src_dir)
-    ensure_dir(tmp_dir)
-    system_str = platform.system()
-    
-    res_source_key = options.type_install
-    
-    ignore_packages = []
-    
-    if options.ignore_packages :
-        ignore_packages = options.ignore_packages.split(",")
-    
-    logger.indent += 2
-    try:
-        
-        if 'PYLUCENE' not in ignore_packages and system_str != "Windows":
-            #get pylucene
-            logger.notify("Get Pylucene from %s " % URLS['PYLUCENE'][res_source_key])
-            pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
-            if res_source_key == 'local':
-                shutil.copy(URLS['PYLUCENE'][res_source_key], pylucene_src)
-            else:
-                urllib.urlretrieve(URLS['PYLUCENE'][res_source_key], pylucene_src)
-            tf = tarfile.open(pylucene_src,'r:gz')
-            pylucene_base_path = os.path.join(src_dir,"pylucene") 
-            logger.notify("Extract Pylucene to %s " % pylucene_base_path)
-            tf.extractall(pylucene_base_path)
-            tf.close()
-            
-            pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
-            jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
-            
-            #install jcc
-    
-            #patch for linux
-            if system_str == 'Linux' :
-                olddir = os.getcwd()
-                patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
-                if os.path.isfile(patch_dest_path):
-                    # must unzip egg
-                    # rename file and etract all
-                    shutil.move(patch_dest_path, patch_dest_path + ".zip")
-                    zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
-                    zf.extractall(patch_dest_path)
-                    os.remove(patch_dest_path + ".zip")
-                logger.notify("Patch jcc : %s " % (patch_dest_path))
-                os.chdir(patch_dest_path)
-                p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
-                p.apply()
-                os.chdir(olddir)
-    
-            logger.notify("Install jcc")
-            call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
-                            cwd=jcc_src_path,
-                            filter_stdout=filter_python_develop,
-                            show_stdout=True)
-            #install pylucene
-            
-            logger.notify("Install pylucene")
-            #modify makefile
-            makefile_path = os.path.join(pylucene_src_path,"Makefile")
-            logger.notify("Modify makefile %s " % makefile_path)
-            shutil.move( makefile_path, makefile_path+"~" )
-    
-            destination= open( makefile_path, "w" )
-            source= open( makefile_path+"~", "r" )
-            destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\\n")
-            destination.write("ANT=ant\\n")
-            destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\\n")
-            
-            if system_str == "Darwin":
-                if sys.version_info >= (2,6):
-                    destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
-                else:
-                    destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\\n")
-                destination.write("NUM_FILES=2\\n")
-            elif system_str == "Windows":
-                destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
-                destination.write("NUM_FILES=2\\n")
-            else:
-                if sys.version_info >= (2,6) and sys.version_info < (2,7):
-                    destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\\n")
-                else:
-                    destination.write("JCC=$(PYTHON) -m jcc --shared\\n")
-                destination.write("NUM_FILES=2\\n")
-            for line in source:
-                destination.write( line )
-            source.close()
-            destination.close()
-            os.remove(makefile_path+"~" )
-    
-            logger.notify("pylucene make")
-            call_subprocess(['make'],
-                            cwd=os.path.abspath(pylucene_src_path),
-                            filter_stdout=filter_python_develop,
-                            show_stdout=True)
-    
-            logger.notify("pylucene make install")
-            call_subprocess(['make', 'install'],
-                            cwd=os.path.abspath(pylucene_src_path),
-                            filter_stdout=filter_python_develop,
-                            show_stdout=True)
-
-        if system_str == 'Linux'  and 'DISTRIBUTE' not in ignore_packages:
-            normal_install('DISTRIBUTE', 'pip', None, None, res_source_key, home_dir, tmp_dir)
-
-        NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
-            ('MYSQL', 'pip', None, None),
-            ('PIL', 'easy_install', None, None), 
-            ('DJANGO','pip', None, None),
-            ('JOGGING','pip', None, None),
-            ('DJANGO-EXTENSIONS', 'pip', None, None),
-            ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
-            ('DJANGO-TAGGING', 'pip', None, None),
-            ('DJANGO-PISTON', 'pip', None, None),
-            ('SETUPTOOLS-HG', 'pip', None, None), 
-            ('HTTPLIB2', 'pip', None, None),
-            ('OAUTH2', 'pip', None, None),
-            ('DJANGO-OAUTH-PLUS', 'pip', None, None),
-            ]
-
-        if 'PYLUCENE' not in ignore_packages and system_str == "Windows":
-            NORMAL_INSTALL.append(('JCC','easy_install',None,None))
-            NORMAL_INSTALL.append(('PYLUCENE','easy_install',None,None))
-             
-        if system_str == "Darwin":
-            NORMAL_INSTALL.append(('LXML', 'easy_install', None, {'STATIC_DEPS': 'true'}))
-        else:
-            NORMAL_INSTALL.append(('LXML', 'easy_install', None, None))
-        
-        if system_str == "Windows":
-            #get psycopg2
-            psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
-            shutil.copy(URLS['PSYCOPG2'][res_source_key], psycopg2_src)
-            #extract psycopg2
-            zf = zipfile.ZipFile(psycopg2_src)
-            psycopg2_base_path = os.path.join(src_dir,"psycopg2")
-            zf.extractall(psycopg2_base_path)
-            zf.close()
-            
-            psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
-            shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
-            shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
-        else:
-            NORMAL_INSTALL.append(('PSYCOPG2', 'pip', None, None))
-
-            
-        for key, method, option_str, extra_env in NORMAL_INSTALL:
-            if key not in ignore_packages:
-                normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-                        
-        logger.notify("Clear source dir")
-        shutil.rmtree(src_dir)
-
-    finally:
-        logger.indent -= 2
-    script_dir = join(base_dir, bin_dir)
-    logger.notify('Run "%s Package" to install new packages that provide builds'
-                  % join(script_dir, 'easy_install'))
-
-
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
-    logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
-    if method == 'pip':
-        if sys.platform == 'win32':
-            args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
-        else:
-            args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
-        if option_str :
-            args.insert(4,option_str)
-        call_subprocess(args,
-                cwd=os.path.abspath(tmp_dir),
-                filter_stdout=filter_python_develop,
-                show_stdout=True,
-                extra_env=extra_env)
-    else:
-        if sys.platform == 'win32':
-            args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
-        else:
-            args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
-        if option_str :
-            args.insert(1,option_str)
-        call_subprocess(args,
-                cwd=os.path.abspath(tmp_dir),
-                filter_stdout=filter_python_develop,
-                show_stdout=True,
-                extra_env=extra_env)
-    
-
-def ensure_dir(dir):
-    if not os.path.exists(dir):
-        logger.notify('Creating directory %s' % dir)
-        os.makedirs(dir)
-
-def filter_python_develop(line):
-    if not line.strip():
-        return Logger.DEBUG
-    for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
-                   'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
-                   'creating ', 'Copying ']:
-        if line.startswith(prefix):
-            return Logger.DEBUG
-    return Logger.NOTIFY
-"""
+#f = open(os.path.join(os.path. os.path.join(os.path.dirname(os.path.abspath(__file__)),"res"),'res_create_env.py'), 'r')
+#EXTRA_TEXT += f.read()
+#EXTRA_TEXT += "\n"
+#EXTRA_TEXT += "RES_ENV = ResourcesEnv('%s')\n" % (src_base)
 
 def main():
     python_version = ".".join(map(str,sys.version_info[0:2]))
--- a/virtualenv/web/res/lib/patch.py	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,589 +0,0 @@
-""" Patch utility to apply unified diffs
-
-    Brute-force line-by-line non-recursive parsing 
-
-    Copyright (c) 2008-2010 anatoly techtonik
-    Available under the terms of MIT license
-
-    Project home: http://code.google.com/p/python-patch/
-
-
-    $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
-    $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
-"""
-
-__author__ = "techtonik.rainforce.org"
-__version__ = "10.04"
-
-import copy
-import logging
-import re
-# cStringIO doesn't support unicode in 2.5
-from StringIO import StringIO
-from logging import debug, info, warning
-
-from os.path import exists, isfile, abspath
-from os import unlink
-
-
-#------------------------------------------------
-# Logging is controlled by "python_patch" logger
-
-debugmode = False
-
-logger = logging.getLogger("python_patch")
-loghandler = logging.StreamHandler()
-logger.addHandler(loghandler)
-
-debug = logger.debug
-info = logger.info
-warning = logger.warning
-
-#: disable library logging by default
-logger.setLevel(logging.CRITICAL)
-
-#------------------------------------------------
-
-
-def fromfile(filename):
-  """ Parse patch file and return Patch() object
-  """
-
-  info("reading patch from file %s" % filename)
-  fp = open(filename, "rb")
-  patch = Patch(fp)
-  fp.close()
-  return patch
-
-
-def fromstring(s):
-  """ Parse text string and return Patch() object
-  """
-
-  return Patch(
-           StringIO.StringIO(s)    
-         )
-
-
-
-class HunkInfo(object):
-  """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
-
-  def __init__(self):
-    self.startsrc=None #: line count starts with 1
-    self.linessrc=None
-    self.starttgt=None
-    self.linestgt=None
-    self.invalid=False
-    self.text=[]
-
-  def copy(self):
-    return copy.copy(self)
-
-#  def apply(self, estream):
-#    """ write hunk data into enumerable stream
-#        return strings one by one until hunk is
-#        over
-#
-#        enumerable stream are tuples (lineno, line)
-#        where lineno starts with 0
-#    """
-#    pass
-
-
-
-class Patch(object):
-
-  def __init__(self, stream=None):
-
-    # define Patch data members
-    # table with a row for every source file
-
-    #: list of source filenames
-    self.source=None
-    self.target=None
-    #: list of lists of hunks
-    self.hunks=None
-    #: file endings statistics for every hunk
-    self.hunkends=None
-
-    if stream:
-      self.parse(stream)
-
-  def copy(self):
-    return copy.copy(self)
-
-  def parse(self, stream):
-    """ parse unified diff """
-    self.source = []
-    self.target = []
-    self.hunks = []
-    self.hunkends = []
-
-    # define possible file regions that will direct the parser flow
-    header = False    # comments before the patch body
-    filenames = False # lines starting with --- and +++
-
-    hunkhead = False  # @@ -R +R @@ sequence
-    hunkbody = False  #
-    hunkskip = False  # skipping invalid hunk mode
-
-    header = True
-    lineends = dict(lf=0, crlf=0, cr=0)
-    nextfileno = 0
-    nexthunkno = 0    #: even if index starts with 0 user messages number hunks from 1
-
-    # hunkinfo holds parsed values, hunkactual - calculated
-    hunkinfo = HunkInfo()
-    hunkactual = dict(linessrc=None, linestgt=None)
-
-    fe = enumerate(stream)
-    for lineno, line in fe:
-
-      # analyze state
-      if header and line.startswith("--- "):
-        header = False
-        # switch to filenames state
-        filenames = True
-      #: skip hunkskip and hunkbody code until you read definition of hunkhead
-      if hunkbody:
-        # process line first
-        if re.match(r"^[- \+\\]", line):
-            # gather stats about line endings
-            if line.endswith("\r\n"):
-              self.hunkends[nextfileno-1]["crlf"] += 1
-            elif line.endswith("\n"):
-              self.hunkends[nextfileno-1]["lf"] += 1
-            elif line.endswith("\r"):
-              self.hunkends[nextfileno-1]["cr"] += 1
-              
-            if line.startswith("-"):
-              hunkactual["linessrc"] += 1
-            elif line.startswith("+"):
-              hunkactual["linestgt"] += 1
-            elif not line.startswith("\\"):
-              hunkactual["linessrc"] += 1
-              hunkactual["linestgt"] += 1
-            hunkinfo.text.append(line)
-            # todo: handle \ No newline cases
-        else:
-            warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
-            # add hunk status node
-            self.hunks[nextfileno-1].append(hunkinfo.copy())
-            self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
-            # switch to hunkskip state
-            hunkbody = False
-            hunkskip = True
-
-        # check exit conditions
-        if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
-            warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
-            # add hunk status node
-            self.hunks[nextfileno-1].append(hunkinfo.copy())
-            self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
-            # switch to hunkskip state
-            hunkbody = False
-            hunkskip = True
-        elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
-            self.hunks[nextfileno-1].append(hunkinfo.copy())
-            # switch to hunkskip state
-            hunkbody = False
-            hunkskip = True
-
-            # detect mixed window/unix line ends
-            ends = self.hunkends[nextfileno-1]
-            if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
-              warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
-            if debugmode:
-              debuglines = dict(ends)
-              debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
-              debug("crlf: %(crlf)d  lf: %(lf)d  cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
-
-      if hunkskip:
-        match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
-        if match:
-          # switch to hunkhead state
-          hunkskip = False
-          hunkhead = True
-        elif line.startswith("--- "):
-          # switch to filenames state
-          hunkskip = False
-          filenames = True
-          if debugmode and len(self.source) > 0:
-            debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
-      if filenames:
-        if line.startswith("--- "):
-          if nextfileno in self.source:
-            warning("skipping invalid patch for %s" % self.source[nextfileno])
-            del self.source[nextfileno]
-            # double source filename line is encountered
-            # attempt to restart from this second line
-          re_filename = "^--- ([^\t]+)"
-          match = re.match(re_filename, line)
-          # todo: support spaces in filenames
-          if match:
-            self.source.append(match.group(1).strip())
-          else:
-            warning("skipping invalid filename at line %d" % lineno)
-            # switch back to header state
-            filenames = False
-            header = True
-        elif not line.startswith("+++ "):
-          if nextfileno in self.source:
-            warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
-            del self.source[nextfileno]
-          else:
-            # this should be unreachable
-            warning("skipping invalid target patch")
-          filenames = False
-          header = True
-        else:
-          if nextfileno in self.target:
-            warning("skipping invalid patch - double target at line %d" % lineno)
-            del self.source[nextfileno]
-            del self.target[nextfileno]
-            nextfileno -= 1
-            # double target filename line is encountered
-            # switch back to header state
-            filenames = False
-            header = True
-          else:
-            re_filename = "^\+\+\+ ([^\t]+)"
-            match = re.match(re_filename, line)
-            if not match:
-              warning("skipping invalid patch - no target filename at line %d" % lineno)
-              # switch back to header state
-              filenames = False
-              header = True
-            else:
-              self.target.append(match.group(1).strip())
-              nextfileno += 1
-              # switch to hunkhead state
-              filenames = False
-              hunkhead = True
-              nexthunkno = 0
-              self.hunks.append([])
-              self.hunkends.append(lineends.copy())
-              continue
-
-      if hunkhead:
-        match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
-        if not match:
-          if nextfileno-1 not in self.hunks:
-            warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
-            # switch to header state
-            hunkhead = False
-            header = True
-            continue
-          else:
-            # switch to header state
-            hunkhead = False
-            header = True
-        else:
-          hunkinfo.startsrc = int(match.group(1))
-          hunkinfo.linessrc = 1
-          if match.group(3): hunkinfo.linessrc = int(match.group(3))
-          hunkinfo.starttgt = int(match.group(4))
-          hunkinfo.linestgt = 1
-          if match.group(6): hunkinfo.linestgt = int(match.group(6))
-          hunkinfo.invalid = False
-          hunkinfo.text = []
-
-          hunkactual["linessrc"] = hunkactual["linestgt"] = 0
-
-          # switch to hunkbody state
-          hunkhead = False
-          hunkbody = True
-          nexthunkno += 1
-          continue
-    else:
-      if not hunkskip:
-        warning("patch file incomplete - %s" % filename)
-        # sys.exit(?)
-      else:
-        # duplicated message when an eof is reached
-        if debugmode and len(self.source) > 0:
-            debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
-    info("total files: %d  total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
-
-
-  def apply(self):
-    """ apply parsed patch """
-
-    total = len(self.source)
-    for fileno, filename in enumerate(self.source):
-
-      f2patch = filename
-      if not exists(f2patch):
-        f2patch = self.target[fileno]
-        if not exists(f2patch):
-          warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
-          continue
-      if not isfile(f2patch):
-        warning("not a file - %s" % f2patch)
-        continue
-      filename = f2patch
-
-      info("processing %d/%d:\t %s" % (fileno+1, total, filename))
-
-      # validate before patching
-      f2fp = open(filename)
-      hunkno = 0
-      hunk = self.hunks[fileno][hunkno]
-      hunkfind = []
-      hunkreplace = []
-      validhunks = 0
-      canpatch = False
-      for lineno, line in enumerate(f2fp):
-        if lineno+1 < hunk.startsrc:
-          continue
-        elif lineno+1 == hunk.startsrc:
-          hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
-          hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
-          #pprint(hunkreplace)
-          hunklineno = 0
-
-          # todo \ No newline at end of file
-
-        # check hunks in source file
-        if lineno+1 < hunk.startsrc+len(hunkfind)-1:
-          if line.rstrip("\r\n") == hunkfind[hunklineno]:
-            hunklineno+=1
-          else:
-            debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
-            # file may be already patched, but we will check other hunks anyway
-            hunkno += 1
-            if hunkno < len(self.hunks[fileno]):
-              hunk = self.hunks[fileno][hunkno]
-              continue
-            else:
-              break
-
-        # check if processed line is the last line
-        if lineno+1 == hunk.startsrc+len(hunkfind)-1:
-          debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
-          hunkno+=1
-          validhunks+=1
-          if hunkno < len(self.hunks[fileno]):
-            hunk = self.hunks[fileno][hunkno]
-          else:
-            if validhunks == len(self.hunks[fileno]):
-              # patch file
-              canpatch = True
-              break
-      else:
-        if hunkno < len(self.hunks[fileno]):
-          warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
-
-      f2fp.close()
-
-      if validhunks < len(self.hunks[fileno]):
-        if self._match_file_hunks(filename, self.hunks[fileno]):
-          warning("already patched  %s" % filename)
-        else:
-          warning("source file is different - %s" % filename)
-      if canpatch:
-        backupname = filename+".orig"
-        if exists(backupname):
-          warning("can't backup original file to %s - aborting" % backupname)
-        else:
-          import shutil
-          shutil.move(filename, backupname)
-          if self.write_hunks(backupname, filename, self.hunks[fileno]):
-            warning("successfully patched %s" % filename)
-            unlink(backupname)
-          else:
-            warning("error patching file %s" % filename)
-            shutil.copy(filename, filename+".invalid")
-            warning("invalid version is saved to %s" % filename+".invalid")
-            # todo: proper rejects
-            shutil.move(backupname, filename)
-
-    # todo: check for premature eof
-
-
-  def can_patch(self, filename):
-    """ Check if specified filename can be patched. Returns None if file can
-    not be found among source filenames. False if patch can not be applied
-    clearly. True otherwise.
-
-    :returns: True, False or None
-    """
-    idx = self._get_file_idx(filename, source=True)
-    if idx == None:
-      return None
-    return self._match_file_hunks(filename, self.hunks[idx])
-    
-
-  def _match_file_hunks(self, filepath, hunks):
-    matched = True
-    fp = open(abspath(filepath))
-
-    class NoMatch(Exception):
-      pass
-
-    lineno = 1
-    line = fp.readline()
-    hno = None
-    try:
-      for hno, h in enumerate(hunks):
-        # skip to first line of the hunk
-        while lineno < h.starttgt:
-          if not len(line): # eof
-            debug("check failed - premature eof before hunk: %d" % (hno+1))
-            raise NoMatch
-          line = fp.readline()
-          lineno += 1
-        for hline in h.text:
-          if hline.startswith("-"):
-            continue
-          if not len(line):
-            debug("check failed - premature eof on hunk: %d" % (hno+1))
-            # todo: \ No newline at the end of file
-            raise NoMatch
-          if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
-            debug("file is not patched - failed hunk: %d" % (hno+1))
-            raise NoMatch
-          line = fp.readline()
-          lineno += 1
-
-    except NoMatch:
-      matched = False
-      # todo: display failed hunk, i.e. expected/found
-
-    fp.close()
-    return matched
-
-
-  def patch_stream(self, instream, hunks):
-    """ Generator that yields stream patched with hunks iterable
-    
-        Converts lineends in hunk lines to the best suitable format
-        autodetected from input
-    """
-
-    # todo: At the moment substituted lineends may not be the same
-    #       at the start and at the end of patching. Also issue a
-    #       warning/throw about mixed lineends (is it really needed?)
-
-    hunks = iter(hunks)
-
-    srclineno = 1
-
-    lineends = {'\n':0, '\r\n':0, '\r':0}
-    def get_line():
-      """
-      local utility function - return line from source stream
-      collecting line end statistics on the way
-      """
-      line = instream.readline()
-        # 'U' mode works only with text files
-      if line.endswith("\r\n"):
-        lineends["\r\n"] += 1
-      elif line.endswith("\n"):
-        lineends["\n"] += 1
-      elif line.endswith("\r"):
-        lineends["\r"] += 1
-      return line
-
-    for hno, h in enumerate(hunks):
-      debug("hunk %d" % (hno+1))
-      # skip to line just before hunk starts
-      while srclineno < h.startsrc:
-        yield get_line()
-        srclineno += 1
-
-      for hline in h.text:
-        # todo: check \ No newline at the end of file
-        if hline.startswith("-") or hline.startswith("\\"):
-          get_line()
-          srclineno += 1
-          continue
-        else:
-          if not hline.startswith("+"):
-            get_line()
-            srclineno += 1
-          line2write = hline[1:]
-          # detect if line ends are consistent in source file
-          if sum([bool(lineends[x]) for x in lineends]) == 1:
-            newline = [x for x in lineends if lineends[x] != 0][0]
-            yield line2write.rstrip("\r\n")+newline
-          else: # newlines are mixed
-            yield line2write
-     
-    for line in instream:
-      yield line
-
-
-  def write_hunks(self, srcname, tgtname, hunks):
-    src = open(srcname, "rb")
-    tgt = open(tgtname, "wb")
-
-    debug("processing target file %s" % tgtname)
-
-    tgt.writelines(self.patch_stream(src, hunks))
-
-    tgt.close()
-    src.close()
-    return True
-  
-
-  def _get_file_idx(self, filename, source=None):
-    """ Detect index of given filename within patch.
-
-        :param filename:
-        :param source: search filename among sources (True),
-                       targets (False), or both (None)
-        :returns: int or None
-    """
-    filename = abspath(filename)
-    if source == True or source == None:
-      for i,fnm in enumerate(self.source):
-        if filename == abspath(fnm):
-          return i  
-    if source == False or source == None:
-      for i,fnm in enumerate(self.target):
-        if filename == abspath(fnm):
-          return i  
-
-
-
-
-from optparse import OptionParser
-from os.path import exists
-import sys
-
-if __name__ == "__main__":
-  opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
-  opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
-  (options, args) = opt.parse_args()
-
-  if not args:
-    opt.print_version()
-    opt.print_help()
-    sys.exit()
-  debugmode = options.debugmode
-  patchfile = args[0]
-  if not exists(patchfile) or not isfile(patchfile):
-    sys.exit("patch file does not exist - %s" % patchfile)
-
-
-  if debugmode:
-    loglevel = logging.DEBUG
-    logformat = "%(levelname)8s %(message)s"
-  else:
-    loglevel = logging.INFO
-    logformat = "%(message)s"
-  logger.setLevel(loglevel)
-  loghandler.setFormatter(logging.Formatter(logformat))
-
-
-
-  patch = fromfile(patchfile)
-  #pprint(patch)
-  patch.apply()
-
-  # todo: document and test line ends handling logic - patch.py detects proper line-endings
-  #       for inserted hunks and issues a warning if patched file has incosistent line ends
--- a/virtualenv/web/res/patch/piston.diff	Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/web/res/patch/piston.diff	Fri Apr 08 16:33:01 2011 +0200
@@ -3,6 +3,6 @@
     timestamp = models.IntegerField()
     is_approved = models.BooleanField(default=False)
 
--   user = models.ForeignKey(User, null=True, blank=True, related_name='piston_tokens')
+-   user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
 +   user = models.ForeignKey(User, null=True, blank=True, related_name='piston_tokens')
     consumer = models.ForeignKey(Consumer)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/web/res/res_create_env.py	Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,54 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+if system_str == 'Linux':
+    INSTALLS = [
+    ('DISTRIBUTE', 'pip', None, None),
+    ]
+else:
+    INSTALLS = []
+    
+
+INSTALLS.extend([ #(key,method, option_str, dict_extra_env)
+    ('SETUPTOOLS-HG', 'pip', None, None), 
+    ('MYSQL', 'pip', None, None),
+    ('PIL', 'easy_install', None, None), 
+    ('DJANGO','pip', None, None),
+    ('JOGGING','pip', None, None),
+    ('DJANGO-EXTENSIONS', 'pip', None, None),
+    ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
+    ('DJANGO-TAGGING', 'pip', None, None),
+    ('DJANGO-PISTON', 'pip', None, None),
+    ('HTTPLIB2', 'pip', None, None),
+    ('OAUTH2', 'easy_install', None, None),
+    ('DJANGO-OAUTH-PLUS', 'pip', None, None),
+])
+
+if system_str == 'Windows':
+    INSTALLS.extend([
+    ('JCC','easy_install',None,None),
+    ('PYLUCENE','easy_install',None,None),
+    ('PSYCOPG2',install_psycopg2,None,None),
+    ])
+else:
+    INSTALLS.extend([
+    ('PYLUCENE',install_pylucene,None,None),
+    ('PSYCOPG2', 'pip', None, None),
+    ])
+
+if system_str == "Darwin":
+    INSTALLS.extend([
+    ('LXML', 'pip', None, {'STATIC_DEPS': 'true', 'LIBXML2_VERSION': '2.7.8', 'LIBXSLT_VERSION': '1.1.26'}),
+    ])
+else:
+    INSTALLS.extend([
+    ('LXML', 'pip', None, None),
+    ])
+
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):    
+    return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
Binary file virtualenv/web/res/src/Django-1.2.4.tar.gz has changed
Binary file virtualenv/web/res/src/Imaging-1.1.7.tar.gz has changed
Binary file virtualenv/web/res/src/JCC-2.6-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/MySQL-python-1.2.3.tar.gz has changed
Binary file virtualenv/web/res/src/PIL-1.1.7.win32-py2.6.exe has changed
Binary file virtualenv/web/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/web/res/src/django-extensions-0.6.tar.gz has changed
Binary file virtualenv/web/res/src/django-oauth-plus.tar.gz has changed
Binary file virtualenv/web/res/src/django-piston-0.2.2-modified.tar.gz has changed
Binary file virtualenv/web/res/src/django-registration.tar.gz has changed
Binary file virtualenv/web/res/src/django-tagging-0.3.1.tar.gz has changed
Binary file virtualenv/web/res/src/facebook-python-sdk-322930c.tar.gz has changed
Binary file virtualenv/web/res/src/httplib2-0.6.0.tar.gz has changed
Binary file virtualenv/web/res/src/jogging-0.2.2.tar.gz has changed
Binary file virtualenv/web/res/src/lucene-3.0.2-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/lxml-2.2.8-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/lxml-2.2.8.tar.gz has changed
Binary file virtualenv/web/res/src/oauth-1.0.1.tar.gz has changed
Binary file virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
Binary file virtualenv/web/res/src/psycopg2-2.3.2.tar.gz has changed
Binary file virtualenv/web/res/src/pylinkedin-0.3.tar.bz2 has changed
Binary file virtualenv/web/res/src/pylucene-3.0.3-1-src.tar.gz has changed
Binary file virtualenv/web/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
Binary file virtualenv/web/res/src/python-openid-2.2.5.tar.gz has changed
Binary file virtualenv/web/res/src/setuptools_hg-0.2.tar.gz has changed
Binary file virtualenv/web/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz has changed
--- a/web/ldtplatform/settings.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/settings.py	Fri Apr 08 16:33:01 2011 +0200
@@ -1,5 +1,6 @@
 #@PydevCodeAnalysisIgnore
 import os.path
+from social_auth_settings import *
 # Django settings for project.
 
 DEBUG = True
@@ -89,13 +90,15 @@
     "django.core.context_processors.debug",
     "django.core.context_processors.i18n",
     "django.core.context_processors.media",
-    "ldt.utils.context_processors.ldt",
-    "ldt.utils.context_processors.base",
-    "ldt.utils.context_processors.web",
-    "ldtplatform.utils.context_processors.version",
+#    "ldt.utils.context_processors.ldt",
+#    "ldt.utils.context_processors.base",
+#    "ldt.utils.context_processors.web",
+    #"ldtplatform.utils.context_processors.version",
+    "ldt.utils.context_processors.ldtcontext",
 )
 
 
+
 ROOT_URLCONF = 'ldtplatform.urls'
 
 TEMPLATE_DIRS = (
@@ -130,7 +133,8 @@
     'oauth_provider',
     'openid_consumer',
     'piston',
-    'socialauth',    
+    #'socialauth',
+    'social_auth',
 )
 
 DECOUPAGE_BLACKLIST = (
@@ -141,6 +145,23 @@
      "__MACOSX",
 )
 
+AUTHENTICATION_BACKENDS = (
+    'social_auth.backends.twitter.TwitterBackend',
+    'social_auth.backends.facebook.FacebookBackend',
+#    'social_auth.backends.google.GoogleOAuthBackend',
+#    'social_auth.backends.google.GoogleOAuth2Backend',
+    'social_auth.backends.google.GoogleBackend',
+    'social_auth.backends.yahoo.YahooBackend',
+#    'social_auth.backends.contrib.linkedin.LinkedinBackend',
+#    'social_auth.backends.contrib.LiveJournalBackend',
+#    'social_auth.backends.contrib.orkut.OrkutBackend',
+    'social_auth.backends.OpenIDBackend',
+    'django.contrib.auth.backends.ModelBackend',
+)
+SOCIAL_AUTH_IMPORT_BACKENDS = (
+    'myproy.social_auth_extra_services',
+)
+
 ACCOUNT_ACTIVATION_DAYS = 7 
 
 LDT_MAX_SEARCH_NUMBER = 50
@@ -152,17 +173,25 @@
 OAUTH_PROVIDER_CONSUMER_KEY_SIZE = 256
 OAUTH_AUTHORIZE_VIEW = 'oauth_provider.views.fake_authorize_view'
 OAUTH_CALLBACK_VIEW = 'oauth_provider.views.fake_callback_view'
-TEST_WEBSERVER_ADDRPORT = "127.0.0.1:8000"
+TEST_WEBSERVER_ADDRPORT = "127.0.0.1:8888"
 
 from config import *
-from socialauthsettings import *
+#from socialauthsettings import *
 
 LOGIN_URL = BASE_URL + 'accounts/login/'
-LOGOUT_URL = BASE_URL + 'accounts/logout/'
+LOGOUT_URL = BASE_URL + 'accounts/disconnect/'
 #LOGIN_REDIRECT_URL = BASE_URL + 'ldtplatform'
 LOGIN_REDIRECT_URL = BASE_URL + 'ldt/'
 LOGOUT_REDIRECT_URL = BASE_URL + 'accounts/login'
-PROFILE_REDIRECT_URL = BASE_URL + 'accounts/create/profile'
+PROFILE_REDIRECT_URL = BASE_URL + 'auth_accounts/create/profile'
+
+LOGIN_ERROR_URL = BASE_URL + 'accounts/login'
+
+FACEBOOK_APP_ID = '163134140411313'
+FACEBOOK_API_SECRET = 'f25e0754a44f0d90d3f4d9ea961ff012'
+
+SOCIAL_AUTH_COMPLETE_URL_NAME  = 'complete'
+SOCIAL_AUTH_ASSOCIATE_URL_NAME = 'associate_complete'
 
 GLOBAL_LOG_LEVEL = LOG_LEVEL
 GLOBAL_LOG_HANDLERS = [{'handler':logging.FileHandler(LOG_FILE), 'format':"%(asctime)s - %(levelname)s : %(message)s"}]
--- a/web/ldtplatform/socialauthsettings.py	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-OPENID_REDIRECT_NEXT = '/accounts/openid/done/'
-
-OPENID_SREG = {"required": "nickname, email, fullname",
-               "optional":"postcode, country",
-               "policy_url": ""}
-
-#example should be something more like the real thing, i think
-OPENID_AX = [{"type_uri": "http://axschema.org/contact/email",
-              "count": 1,
-              "required": True,
-              "alias": "email"},
-             {"type_uri": "http://axschema.org/schema/fullname",
-              "count":1 ,
-              "required": False,
-              "alias": "fname"}]
-
-OPENID_AX_PROVIDER_MAP = {'Google': {'email': 'http://axschema.org/contact/email',
-                                     'firstname': 'http://axschema.org/namePerson/first',
-                                     'lastname': 'http://axschema.org/namePerson/last'},
-                          'Default': {'email': 'http://axschema.org/contact/email',
-                                      'fullname': 'http://axschema.org/namePerson',
-                                      'nickname': 'http://axschema.org/namePerson/friendly'}
-                          }
-
-TWITTER_CONSUMER_KEY = 'AeTl6TefgICQCXwDf4gOA'
-TWITTER_CONSUMER_SECRET = 'v7XqozHzQuzjyU9RkpJXrXZdTYpCMCpzZCjli62dOA'
-
-FACEBOOK_APP_ID = '168524599848954'
-FACEBOOK_API_KEY = '5fbfc9bc5171449685e54a6fd33038a4'
-FACEBOOK_SECRET_KEY = 'ffd9012b6d974180f2578c09bcb38d9f'
-
-LINKEDIN_CONSUMER_KEY = ''
-LINKEDIN_CONSUMER_SECRET = ''
-
-## if any of this information is desired for your app
-FACEBOOK_EXTENDED_PERMISSIONS = (
-    #'publish_stream',
-    #'create_event',
-    #'rsvp_event',
-    #'sms',
-    #'offline_access',
-    #'email',
-    #'read_stream',
-    #'user_about_me',
-    #'user_activites',
-    #'user_birthday',
-    #'user_education_history',
-    #'user_events',
-    #'user_groups',
-    #'user_hometown',
-    #'user_interests',
-    #'user_likes',
-    #'user_location',
-    #'user_notes',
-    #'user_online_presence',
-    #'user_photo_video_tags',
-    #'user_photos',
-    #'user_relationships',
-    #'user_religion_politics',
-    #'user_status',
-    #'user_videos',
-    #'user_website',
-    #'user_work_history',
-    #'read_friendlists',
-    #'read_requests',
-    #'friend_about_me',
-    #'friend_activites',
-    #'friend_birthday',
-    #'friend_education_history',
-    #'friend_events',
-    #'friend_groups',
-    #'friend_hometown',
-    #'friend_interests',
-    #'friend_likes',
-    #'friend_location',
-    #'friend_notes',
-    #'friend_online_presence',
-    #'friend_photo_video_tags',
-    #'friend_photos',
-    #'friend_relationships',
-    #'friend_religion_politics',
-    #'friend_status',
-    #'friend_videos',
-    #'friend_website',
-    #'friend_work_history',
-)
-
-
-AUTHENTICATION_BACKENDS = (
-    'django.contrib.auth.backends.ModelBackend',
-    'socialauth.auth_backends.OpenIdBackend',
-    'socialauth.auth_backends.TwitterBackend',
-    'socialauth.auth_backends.FacebookBackend',
-    'socialauth.auth_backends.LinkedInBackend',
-)
-
-TEMPLATE_CONTEXT_PROCESSORS = (
-    "socialauth.context_processors.facebook_api_key",
-    'django.core.context_processors.media',
-    "django.contrib.auth.context_processors.auth",
-    "django.core.context_processors.request",
-)
--- a/web/ldtplatform/templates/registration/login.html	Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/templates/registration/login.html	Fri Apr 08 16:33:01 2011 +0200
@@ -32,6 +32,8 @@
 	        <ul id="login_fields_list">
 	        {{form.as_ul}}
 	        </ul>
+            <p>{% trans "Or login with your external account" %}</p>
+            <p>{{social_list|safe}}</p>
 	        </div>
 	       	<div id="login_links" class="span-12 last">
 		       	<div id="login_links_list" class="span-12 last">
--- a/web/ldtplatform/urls.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/urls.py	Fri Apr 08 16:33:01 2011 +0200
@@ -1,7 +1,8 @@
-from django.conf.urls.defaults import patterns, include, handler500, handler404
+from django.conf.urls.defaults import patterns, include, handler500, handler404, url
 from django.contrib import admin
 from ldt.text import VERSION_STR
 from django.conf import settings
+from django.contrib.auth import views as auth_views
 
 
 # Uncomment the next two lines to enable the admin:
@@ -25,9 +26,13 @@
 
     (r'^auth_accounts/', include('registration.backends.simple.urls')),
 
-    (r'^accounts/', include('socialauth.urls')),
+    #(r'^accounts/', include('socialauth.urls')),
+    (r'^accounts/', include('social_auth.urls')),
+    url(r'^accounts/login/$',auth_views.login,{'template_name': 'registration/login.html'},name='auth_login'),
     (r'^oauth/', include('oauth_provider.urls')),
-    (r'^$', 'socialauth.views.signin_complete'),
+    
+    #(r'^$', 'socialauth.views.signin_complete'),
+    #(r'^$', 'social_auth.views.complete'),
     
     (r'^/?$', 'django.views.generic.simple.redirect_to', {'url': 'ldt'}),
     #(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}), 
--- a/web/ldtplatform/utils/context_processors.py	Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/utils/context_processors.py	Fri Apr 08 16:33:01 2011 +0200
@@ -2,11 +2,11 @@
 import ldtplatform #@UnresolvedImport
 import ldt
 
-def version(request):
-    return {'VERSION': "platform: %s - web: %s" % (ldt.get_version(),ldtplatform.get_version()) }
+#def version(request):
+#    return {'VERSION': "platform: %s - web: %s" % (ldt.get_version(),ldtplatform.get_version()) }
 
-def base(request):
-    return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL }
+#def base(request):
+#    return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL, 'TC2': 'TC2' }
 
-def web(request):
-    return {'WEB_URL': settings.WEB_URL }
+#def web(request):
+#    return {'WEB_URL': settings.WEB_URL }
Binary file web/static/ldt/swf/ldt/LignesDeTempsFlex.swf has changed
--- a/web/static/socialauth/css/openid.css	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,44 +0,0 @@
-#openid_form {
-	width: 400px;	
-}
-#openid_form legend {
-	font-weight: bold;
-}
-#openid_choice {
-	display: none;
-}
-#openid_input_area {
-	clear: both;
-	padding: 10px;
-}
-#openid_btns, #openid_btns br {
-	clear: both;
-}
-#openid_highlight {
-	padding: 3px;
-	background-color: #FFFCC9;
-	float: left;
-}
-.openid_large_btn {
-	width: 100px;
-	height: 60px;
-	border: 1px solid #DDD;
-	margin: 3px;
-	float: left;
-}
-.openid_small_btn {
-	width: 24px;
-	height: 24px;
-	border: 1px solid #DDD;
-	margin: 3px;
-	float: left;
-}	
-a.openid_large_btn:focus {
-	outline: none;
-}
-a.openid_large_btn:focus{
--moz-outline-style: none;
-}
-.openid_selected {
-	border: 4px solid #DDD;
-}	
\ No newline at end of file
--- a/web/static/socialauth/css/socialauth.css	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,73 +0,0 @@
-body {
-    margin-left: auto;
-    margin-right: auto;
-    width: 950px;
-}
-#openid_choice{
-	margin-top:10px;
-	display: none;
-}
-#openid_input_area{
-	padding-top:10px;
-	clear: both;
-}
-#openid_username{
-	margin-right:5px;
-}
-#openid_btns, #openid_btns br{
-	clear: both;
-}
-#openid_btns #facebook{
-	height: 60px;
-	width: 110px;
-	float: left;
-	margin-top: 3px;
-	display: table-cell;
-    text-align: center;
-    vertical-align: middle;
-	border: 1px solid #ccc;
-}
-#openid_highlight{
-	padding: 3px;
-	background-color: #FFFCC9;
-	float: left;
-}
-.openid_large_btn{
-	width: 100px;
-	height: 60px;
-	border: 1px solid #DDD;
-	margin: 3px;
-	float: left;
-}
-.openid_small_btn{
-	width: 24px;
-	height: 24px;
-	border: 1px solid #DDD;
-	margin: 3px;
-	float: left;
-}
-a.openid_large_btn:focus{
-	outline: none;
-}
-a.openid_large_btn:focus{
-	-moz-outline-style: none;
-}
-.openid_selected{
-	border: 4px solid #DDD;
-}
-.linkedin{
-	background: #FFF url(../images/linkedin.jpg) no-repeat center center;
-}
-.yahoo{
-	background: #FFF url(../images/yahoo.gif) no-repeat center center;
-}
-.google{
-	background: #FFF url(../images/google.png) no-repeat center center;
-}
-.openid{
-	background: #FFF url(../images/openid.png) no-repeat center center;
-}
-.twitter{
-	background: #FFF url(../images/twitter.png) no-repeat center center;
-}
-
Binary file web/static/socialauth/images/Thumbs.db has changed
Binary file web/static/socialauth/images/aol.gif has changed
Binary file web/static/socialauth/images/blogger.ico has changed
Binary file web/static/socialauth/images/claimid.ico has changed
Binary file web/static/socialauth/images/facebook.gif has changed
Binary file web/static/socialauth/images/flickr.ico has changed
Binary file web/static/socialauth/images/google.gif has changed
Binary file web/static/socialauth/images/linkedin.jpg has changed
Binary file web/static/socialauth/images/livejournal.ico has changed
Binary file web/static/socialauth/images/myopenid.ico has changed
Binary file web/static/socialauth/images/openid-inputicon.gif has changed
Binary file web/static/socialauth/images/openid.gif has changed
Binary file web/static/socialauth/images/technorati.ico has changed
Binary file web/static/socialauth/images/twitter.png has changed
Binary file web/static/socialauth/images/verisign.ico has changed
Binary file web/static/socialauth/images/vidoop.ico has changed
Binary file web/static/socialauth/images/wordpress.ico has changed
Binary file web/static/socialauth/images/yahoo.gif has changed
--- a/web/static/socialauth/js/jquery-1.2.6.min.js	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,32 +0,0 @@
-/*
- * jQuery 1.2.6 - New Wave Javascript
- *
- * Copyright (c) 2008 John Resig (jquery.com)
- * Dual licensed under the MIT (MIT-LICENSE.txt)
- * and GPL (GPL-LICENSE.txt) licenses.
- *
- * $Date: 2008-05-24 14:22:17 -0400 (Sat, 24 May 2008) $
- * $Rev: 5685 $
- */
-(function(){var _jQuery=window.jQuery,_$=window.$;var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);};var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;if(selector.nodeType){this[0]=selector;this.length=1;return this;}if(typeof selector=="string"){var match=quickExpr.exec(selector);if(match&&(match[1]||!context)){if(match[1])selector=jQuery.clean([match[1]],context);else{var elem=document.getElementById(match[3]);if(elem){if(elem.id!=match[3])return jQuery().find(selector);return jQuery(elem);}selector=[];}}else
-return jQuery(context).find(selector);}else if(jQuery.isFunction(selector))return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);return this.setArray(jQuery.makeArray(selector));},jquery:"1.2.6",size:function(){return this.length;},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];},pushStack:function(elems){var ret=jQuery(elems);ret.prevObject=this;return ret;},setArray:function(elems){this.length=0;Array.prototype.push.apply(this,elems);return this;},each:function(callback,args){return jQuery.each(this,callback,args);},index:function(elem){var ret=-1;return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);},attr:function(name,value,type){var options=name;if(name.constructor==String)if(value===undefined)return this[0]&&jQuery[type||"attr"](this[0],name);else{options={};options[name]=value;}return this.each(function(i){for(name in options)jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));});},css:function(key,value){if((key=='width'||key=='height')&&parseFloat(value)<0)value=undefined;return this.attr(key,value,"curCSS");},text:function(text){if(typeof text!="object"&&text!=null)return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));var ret="";jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8)ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);});});return ret;},wrapAll:function(html){if(this[0])jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;while(elem.firstChild)elem=elem.firstChild;return elem;}).append(this);return this;},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);});},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);});},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1)this.appendChild(elem);});},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1)this.insertBefore(elem,this.firstChild);});},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);});},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);});},end:function(){return this.prevObject||jQuery([]);},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);});return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");container.appendChild(clone);return jQuery.clean([container.innerHTML])[0];}else
-return this.cloneNode(true);});var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined)this[expando]=null;});if(events===true)this.find("*").andSelf().each(function(i){if(this.nodeType==3)return;var events=jQuery.data(this,"events");for(var type in events)for(var handler in events[type])jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);});return ret;},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);})||jQuery.multiFilter(selector,this));},not:function(selector){if(selector.constructor==String)if(isSimple.test(selector))return this.pushStack(jQuery.multiFilter(selector,this,true));else
-selector=jQuery.multiFilter(selector,this);var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;});},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=='string'?jQuery(selector):jQuery.makeArray(selector))));},is:function(selector){return!!selector&&jQuery.multiFilter(selector,this).length>0;},hasClass:function(selector){return this.is("."+selector);},val:function(value){if(value==undefined){if(this.length){var elem=this[0];if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";if(index<0)return null;for(var i=one?index:0,max=one?index+1:options.length;i<max;i++){var option=options[i];if(option.selected){value=jQuery.browser.msie&&!option.attributes.value.specified?option.text:option.value;if(one)return value;values.push(value);}}return values;}else
-return(this[0].value||"").replace(/\r/g,"");}return undefined;}if(value.constructor==Number)value+='';return this.each(function(){if(this.nodeType!=1)return;if(value.constructor==Array&&/radio|checkbox/.test(this.type))this.checked=(jQuery.inArray(this.value,value)>=0||jQuery.inArray(this.name,value)>=0);else if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);});if(!values.length)this.selectedIndex=-1;}else
-this.value=value;});},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);},replaceWith:function(value){return this.after(value).remove();},eq:function(i){return this.slice(i,i+1);},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);}));},andSelf:function(){return this.add(this.prevObject);},data:function(key,value){var parts=key.split(".");parts[1]=parts[1]?"."+parts[1]:"";if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);if(data===undefined&&this.length)data=jQuery.data(this[0],key);return data===undefined&&parts[1]?this.data(parts[0]):data;}else
-return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);});},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);});},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);if(reverse)elems.reverse();}var obj=this;if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr"))obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));var scripts=jQuery([]);jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;if(jQuery.nodeName(elem,"script"))scripts=scripts.add(elem);else{if(elem.nodeType==1)scripts=scripts.add(jQuery("script",elem).remove());callback.call(obj,elem);}});scripts.each(evalScript);});}};jQuery.fn.init.prototype=jQuery.fn;function evalScript(i,elem){if(elem.src)jQuery.ajax({url:elem.src,async:false,dataType:"script"});else
-jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");if(elem.parentNode)elem.parentNode.removeChild(elem);}function now(){return+new Date;}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;if(target.constructor==Boolean){deep=target;target=arguments[1]||{};i=2;}if(typeof target!="object"&&typeof target!="function")target={};if(length==i){target=this;--i;}for(;i<length;i++)if((options=arguments[i])!=null)for(var name in options){var src=target[name],copy=options[name];if(target===copy)continue;if(deep&&copy&&typeof copy=="object"&&!copy.nodeType)target[name]=jQuery.extend(deep,src||(copy.length!=null?[]:{}),copy);else if(copy!==undefined)target[name]=copy;}return target;};var expando="jQuery"+now(),uuid=0,windowData={},exclude=/z-?index|font-?weight|opacity|zoom|line-?height/i,defaultView=document.defaultView||{};jQuery.extend({noConflict:function(deep){window.$=_$;if(deep)window.jQuery=_jQuery;return jQuery;},isFunction:function(fn){return!!fn&&typeof fn!="string"&&!fn.nodeName&&fn.constructor!=Array&&/^[\s[]?function/.test(fn+"");},isXMLDoc:function(elem){return elem.documentElement&&!elem.body||elem.tagName&&elem.ownerDocument&&!elem.ownerDocument.body;},globalEval:function(data){data=jQuery.trim(data);if(data){var head=document.getElementsByTagName("head")[0]||document.documentElement,script=document.createElement("script");script.type="text/javascript";if(jQuery.browser.msie)script.text=data;else
-script.appendChild(document.createTextNode(data));head.insertBefore(script,head.firstChild);head.removeChild(script);}},nodeName:function(elem,name){return elem.nodeName&&elem.nodeName.toUpperCase()==name.toUpperCase();},cache:{},data:function(elem,name,data){elem=elem==window?windowData:elem;var id=elem[expando];if(!id)id=elem[expando]=++uuid;if(name&&!jQuery.cache[id])jQuery.cache[id]={};if(data!==undefined)jQuery.cache[id][name]=data;return name?jQuery.cache[id][name]:id;},removeData:function(elem,name){elem=elem==window?windowData:elem;var id=elem[expando];if(name){if(jQuery.cache[id]){delete jQuery.cache[id][name];name="";for(name in jQuery.cache[id])break;if(!name)jQuery.removeData(elem);}}else{try{delete elem[expando];}catch(e){if(elem.removeAttribute)elem.removeAttribute(expando);}delete jQuery.cache[id];}},each:function(object,callback,args){var name,i=0,length=object.length;if(args){if(length==undefined){for(name in object)if(callback.apply(object[name],args)===false)break;}else
-for(;i<length;)if(callback.apply(object[i++],args)===false)break;}else{if(length==undefined){for(name in object)if(callback.call(object[name],name,object[name])===false)break;}else
-for(var value=object[0];i<length&&callback.call(value,i,value)!==false;value=object[++i]){}}return object;},prop:function(elem,value,type,i,name){if(jQuery.isFunction(value))value=value.call(elem,i);return value&&value.constructor==Number&&type=="curCSS"&&!exclude.test(name)?value+"px":value;},className:{add:function(elem,classNames){jQuery.each((classNames||"").split(/\s+/),function(i,className){if(elem.nodeType==1&&!jQuery.className.has(elem.className,className))elem.className+=(elem.className?" ":"")+className;});},remove:function(elem,classNames){if(elem.nodeType==1)elem.className=classNames!=undefined?jQuery.grep(elem.className.split(/\s+/),function(className){return!jQuery.className.has(classNames,className);}).join(" "):"";},has:function(elem,className){return jQuery.inArray(className,(elem.className||elem).toString().split(/\s+/))>-1;}},swap:function(elem,options,callback){var old={};for(var name in options){old[name]=elem.style[name];elem.style[name]=options[name];}callback.call(elem);for(var name in options)elem.style[name]=old[name];},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;var padding=0,border=0;jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;});val-=Math.round(padding+border);}if(jQuery(elem).is(":visible"))getWH();else
-jQuery.swap(elem,props,getWH);return Math.max(0,val);}return jQuery.curCSS(elem,name,force);},curCSS:function(elem,name,force){var ret,style=elem.style;function color(elem){if(!jQuery.browser.safari)return false;var ret=defaultView.getComputedStyle(elem,null);return!ret||ret.getPropertyValue("color")=="";}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");return ret==""?"1":ret;}if(jQuery.browser.opera&&name=="display"){var save=style.outline;style.outline="0 solid black";style.outline=save;}if(name.match(/float/i))name=styleFloat;if(!force&&style&&style[name])ret=style[name];else if(defaultView.getComputedStyle){if(name.match(/float/i))name="float";name=name.replace(/([A-Z])/g,"-$1").toLowerCase();var computedStyle=defaultView.getComputedStyle(elem,null);if(computedStyle&&!color(elem))ret=computedStyle.getPropertyValue(name);else{var swap=[],stack=[],a=elem,i=0;for(;a&&color(a);a=a.parentNode)stack.unshift(a);for(;i<stack.length;i++)if(color(stack[i])){swap[i]=stack[i].style.display;stack[i].style.display="block";}ret=name=="display"&&swap[stack.length-1]!=null?"none":(computedStyle&&computedStyle.getPropertyValue(name))||"";for(i=0;i<swap.length;i++)if(swap[i]!=null)stack[i].style.display=swap[i];}if(name=="opacity"&&ret=="")ret="1";}else if(elem.currentStyle){var camelCase=name.replace(/\-(\w)/g,function(all,letter){return letter.toUpperCase();});ret=elem.currentStyle[name]||elem.currentStyle[camelCase];if(!/^\d+(px)?$/i.test(ret)&&/^\d/.test(ret)){var left=style.left,rsLeft=elem.runtimeStyle.left;elem.runtimeStyle.left=elem.currentStyle.left;style.left=ret||0;ret=style.pixelLeft+"px";style.left=left;elem.runtimeStyle.left=rsLeft;}}return ret;},clean:function(elems,context){var ret=[];context=context||document;if(typeof context.createElement=='undefined')context=context.ownerDocument||context[0]&&context[0].ownerDocument||document;jQuery.each(elems,function(i,elem){if(!elem)return;if(elem.constructor==Number)elem+='';if(typeof elem=="string"){elem=elem.replace(/(<(\w+)[^>]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+"></"+tag+">";});var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");var wrap=!tags.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!tags.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!tags.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!tags.indexOf("<td")||!tags.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!tags.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||jQuery.browser.msie&&[1,"div<div>","</div>"]||[0,"",""];div.innerHTML=wrap[1]+elem+wrap[2];while(wrap[0]--)div=div.lastChild;if(jQuery.browser.msie){var tbody=!tags.indexOf("<table")&&tags.indexOf("<tbody")<0?div.firstChild&&div.firstChild.childNodes:wrap[1]=="<table>"&&tags.indexOf("<tbody")<0?div.childNodes:[];for(var j=tbody.length-1;j>=0;--j)if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length)tbody[j].parentNode.removeChild(tbody[j]);if(/^\s/.test(elem))div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);}elem=jQuery.makeArray(div.childNodes);}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select")))return;if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options)ret.push(elem);else
-ret=jQuery.merge(ret,elem);});return ret;},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8)return undefined;var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;name=notxml&&jQuery.props[name]||name;if(elem.tagName){var special=/href|src|style/.test(name);if(name=="selected"&&jQuery.browser.safari)elem.parentNode.selectedIndex;if(name in elem&&notxml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode)throw"type property can't be changed";elem[name]=value;}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name))return elem.getAttributeNode(name).nodeValue;return elem[name];}if(msie&&notxml&&name=="style")return jQuery.attr(elem.style,"cssText",value);if(set)elem.setAttribute(name,""+value);var attr=msie&&notxml&&special?elem.getAttribute(name,2):elem.getAttribute(name);return attr===null?undefined:attr;}if(msie&&name=="opacity"){if(set){elem.zoom=1;elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+''=="NaN"?"":"alpha(opacity="+value*100+")");}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+'':"";}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();});if(set)elem[name]=value;return elem[name];},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");},makeArray:function(array){var ret=[];if(array!=null){var i=array.length;if(i==null||array.split||array.setInterval||array.call)ret[0]=array;else
-while(i)ret[--i]=array[i];}return ret;},inArray:function(elem,array){for(var i=0,length=array.length;i<length;i++)if(array[i]===elem)return i;return-1;},merge:function(first,second){var i=0,elem,pos=first.length;if(jQuery.browser.msie){while(elem=second[i++])if(elem.nodeType!=8)first[pos++]=elem;}else
-while(elem=second[i++])first[pos++]=elem;return first;},unique:function(array){var ret=[],done={};try{for(var i=0,length=array.length;i<length;i++){var id=jQuery.data(array[i]);if(!done[id]){done[id]=true;ret.push(array[i]);}}}catch(e){ret=array;}return ret;},grep:function(elems,callback,inv){var ret=[];for(var i=0,length=elems.length;i<length;i++)if(!inv!=!callback(elems[i],i))ret.push(elems[i]);return ret;},map:function(elems,callback){var ret=[];for(var i=0,length=elems.length;i<length;i++){var value=callback(elems[i],i);if(value!=null)ret[ret.length]=value;}return ret.concat.apply([],ret);}});var userAgent=navigator.userAgent.toLowerCase();jQuery.browser={version:(userAgent.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[])[1],safari:/webkit/.test(userAgent),opera:/opera/.test(userAgent),msie:/msie/.test(userAgent)&&!/opera/.test(userAgent),mozilla:/mozilla/.test(userAgent)&&!/(compatible|webkit)/.test(userAgent)};var styleFloat=jQuery.browser.msie?"styleFloat":"cssFloat";jQuery.extend({boxModel:!jQuery.browser.msie||document.compatMode=="CSS1Compat",props:{"for":"htmlFor","class":"className","float":styleFloat,cssFloat:styleFloat,styleFloat:styleFloat,readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing"}});jQuery.each({parent:function(elem){return elem.parentNode;},parents:function(elem){return jQuery.dir(elem,"parentNode");},next:function(elem){return jQuery.nth(elem,2,"nextSibling");},prev:function(elem){return jQuery.nth(elem,2,"previousSibling");},nextAll:function(elem){return jQuery.dir(elem,"nextSibling");},prevAll:function(elem){return jQuery.dir(elem,"previousSibling");},siblings:function(elem){return jQuery.sibling(elem.parentNode.firstChild,elem);},children:function(elem){return jQuery.sibling(elem.firstChild);},contents:function(elem){return jQuery.nodeName(elem,"iframe")?elem.contentDocument||elem.contentWindow.document:jQuery.makeArray(elem.childNodes);}},function(name,fn){jQuery.fn[name]=function(selector){var ret=jQuery.map(this,fn);if(selector&&typeof selector=="string")ret=jQuery.multiFilter(selector,ret);return this.pushStack(jQuery.unique(ret));};});jQuery.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(name,original){jQuery.fn[name]=function(){var args=arguments;return this.each(function(){for(var i=0,length=args.length;i<length;i++)jQuery(args[i])[original](this);});};});jQuery.each({removeAttr:function(name){jQuery.attr(this,name,"");if(this.nodeType==1)this.removeAttribute(name);},addClass:function(classNames){jQuery.className.add(this,classNames);},removeClass:function(classNames){jQuery.className.remove(this,classNames);},toggleClass:function(classNames){jQuery.className[jQuery.className.has(this,classNames)?"remove":"add"](this,classNames);},remove:function(selector){if(!selector||jQuery.filter(selector,[this]).r.length){jQuery("*",this).add(this).each(function(){jQuery.event.remove(this);jQuery.removeData(this);});if(this.parentNode)this.parentNode.removeChild(this);}},empty:function(){jQuery(">*",this).remove();while(this.firstChild)this.removeChild(this.firstChild);}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);};});jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");};});function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);},"#":function(a,i,m){return a.getAttribute("id")==m[2];},":":{lt:function(a,i,m){return i<m[3]-0;},gt:function(a,i,m){return i>m[3]-0;},nth:function(a,i,m){return m[3]-0==i;},eq:function(a,i,m){return m[3]-0==i;},first:function(a,i){return i==0;},last:function(a,i,m,r){return i==r.length-1;},even:function(a,i){return i%2==0;},odd:function(a,i){return i%2;},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;},"only-child":function(a){return!jQuery.nth(a.parentNode.lastChild,2,"previousSibling");},parent:function(a){return a.firstChild;},empty:function(a){return!a.firstChild;},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";},enabled:function(a){return!a.disabled;},disabled:function(a){return a.disabled;},checked:function(a){return a.checked;},selected:function(a){return a.selected||jQuery.attr(a,"selected");},text:function(a){return"text"==a.type;},radio:function(a){return"radio"==a.type;},checkbox:function(a){return"checkbox"==a.type;},file:function(a){return"file"==a.type;},password:function(a){return"password"==a.type;},submit:function(a){return"submit"==a.type;},image:function(a){return"image"==a.type;},reset:function(a){return"reset"==a.type;},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);},has:function(a,i,m){return jQuery.find(m[3],a).length;},header:function(a){return/h\d/i.test(a.nodeName);},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;}).length;}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];while(expr&&expr!=old){old=expr;var f=jQuery.filter(expr,elems,not);expr=f.t.replace(/^\s*,\s*/,"");cur=not?elems=f.r:jQuery.merge(cur,f.r);}return cur;},find:function(t,context){if(typeof t!="string")return[t];if(context&&context.nodeType!=1&&context.nodeType!=9)return[];context=context||document;var ret=[context],done=[],last,nodeName;while(t&&last!=t){var r=[];last=t;t=jQuery.trim(t);var foundToken=false,re=quickChild,m=re.exec(t);if(m){nodeName=m[1].toUpperCase();for(var i=0;ret[i];i++)for(var c=ret[i].firstChild;c;c=c.nextSibling)if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName))r.push(c);ret=r;t=t.replace(re,"");if(t.indexOf(" ")==0)continue;foundToken=true;}else{re=/^([>+~])\s*(\w*)/i;if((m=re.exec(t))!=null){r=[];var merge={};nodeName=m[2].toUpperCase();m=m[1];for(var j=0,rl=ret.length;j<rl;j++){var n=m=="~"||m=="+"?ret[j].nextSibling:ret[j].firstChild;for(;n;n=n.nextSibling)if(n.nodeType==1){var id=jQuery.data(n);if(m=="~"&&merge[id])break;if(!nodeName||n.nodeName.toUpperCase()==nodeName){if(m=="~")merge[id]=true;r.push(n);}if(m=="+")break;}}ret=r;t=jQuery.trim(t.replace(re,""));foundToken=true;}}if(t&&!foundToken){if(!t.indexOf(",")){if(context==ret[0])ret.shift();done=jQuery.merge(done,ret);r=ret=[context];t=" "+t.substr(1,t.length);}else{var re2=quickID;var m=re2.exec(t);if(m){m=[0,m[2],m[3],m[1]];}else{re2=quickClass;m=re2.exec(t);}m[2]=m[2].replace(/\\/g,"");var elem=ret[ret.length-1];if(m[1]=="#"&&elem&&elem.getElementById&&!jQuery.isXMLDoc(elem)){var oid=elem.getElementById(m[2]);if((jQuery.browser.msie||jQuery.browser.opera)&&oid&&typeof oid.id=="string"&&oid.id!=m[2])oid=jQuery('[@id="'+m[2]+'"]',elem)[0];ret=r=oid&&(!m[3]||jQuery.nodeName(oid,m[3]))?[oid]:[];}else{for(var i=0;ret[i];i++){var tag=m[1]=="#"&&m[3]?m[3]:m[1]!=""||m[0]==""?"*":m[2];if(tag=="*"&&ret[i].nodeName.toLowerCase()=="object")tag="param";r=jQuery.merge(r,ret[i].getElementsByTagName(tag));}if(m[1]==".")r=jQuery.classFilter(r,m[2]);if(m[1]=="#"){var tmp=[];for(var i=0;r[i];i++)if(r[i].getAttribute("id")==m[2]){tmp=[r[i]];break;}r=tmp;}ret=r;}t=t.replace(re2,"");}}if(t){var val=jQuery.filter(t,r);ret=r=val.r;t=jQuery.trim(val.t);}}if(t)ret=[];if(ret&&context==ret[0])ret.shift();done=jQuery.merge(done,ret);return done;},classFilter:function(r,m,not){m=" "+m+" ";var tmp=[];for(var i=0;r[i];i++){var pass=(" "+r[i].className+" ").indexOf(m)>=0;if(!not&&pass||not&&!pass)tmp.push(r[i]);}return tmp;},filter:function(t,r,not){var last;while(t&&t!=last){last=t;var p=jQuery.parse,m;for(var i=0;p[i];i++){m=p[i].exec(t);if(m){t=t.substring(m[0].length);m[2]=m[2].replace(/\\/g,"");break;}}if(!m)break;if(m[1]==":"&&m[2]=="not")r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);else if(m[1]==".")r=jQuery.classFilter(r,m[2],not);else if(m[1]=="["){var tmp=[],type=m[3];for(var i=0,rl=r.length;i<rl;i++){var a=r[i],z=a[jQuery.props[m[2]]||m[2]];if(z==null||/href|src|selected/.test(m[2]))z=jQuery.attr(a,m[2])||'';if((type==""&&!!z||type=="="&&z==m[5]||type=="!="&&z!=m[5]||type=="^="&&z&&!z.indexOf(m[5])||type=="$="&&z.substr(z.length-m[5].length)==m[5]||(type=="*="||type=="~=")&&z.indexOf(m[5])>=0)^not)tmp.push(a);}r=tmp;}else if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;for(var i=0,rl=r.length;i<rl;i++){var node=r[i],parentNode=node.parentNode,id=jQuery.data(parentNode);if(!merge[id]){var c=1;for(var n=parentNode.firstChild;n;n=n.nextSibling)if(n.nodeType==1)n.nodeIndex=c++;merge[id]=true;}var add=false;if(first==0){if(node.nodeIndex==last)add=true;}else if((node.nodeIndex-last)%first==0&&(node.nodeIndex-last)/first>=0)add=true;if(add^not)tmp.push(node);}r=tmp;}else{var fn=jQuery.expr[m[1]];if(typeof fn=="object")fn=fn[m[2]];if(typeof fn=="string")fn=eval("false||function(a,i){return "+fn+";}");r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);},not);}}return{r:r,t:t};},dir:function(elem,dir){var matched=[],cur=elem[dir];while(cur&&cur!=document){if(cur.nodeType==1)matched.push(cur);cur=cur[dir];}return matched;},nth:function(cur,result,dir,elem){result=result||1;var num=0;for(;cur;cur=cur[dir])if(cur.nodeType==1&&++num==result)break;return cur;},sibling:function(n,elem){var r=[];for(;n;n=n.nextSibling){if(n.nodeType==1&&n!=elem)r.push(n);}return r;}});jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8)return;if(jQuery.browser.msie&&elem.setInterval)elem=window;if(!handler.guid)handler.guid=this.guid++;if(data!=undefined){var fn=handler;handler=this.proxy(fn,function(){return fn.apply(this,arguments);});handler.data=data;}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered)return jQuery.event.handle.apply(arguments.callee.elem,arguments);});handle.elem=elem;jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];handler.type=parts[1];var handlers=events[type];if(!handlers){handlers=events[type]={};if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener)elem.addEventListener(type,handle,false);else if(elem.attachEvent)elem.attachEvent("on"+type,handle);}}handlers[handler.guid]=handler;jQuery.event.global[type]=true;});elem=null;},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8)return;var events=jQuery.data(elem,"events"),ret,index;if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)=="."))for(var type in events)this.remove(elem,type+(types||""));else{if(types.type){handler=types.handler;types=types.type;}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];if(events[type]){if(handler)delete events[type][handler.guid];else
-for(handler in events[type])if(!parts[1]||events[type][handler].type==parts[1])delete events[type][handler];for(ret in events[type])break;if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener)elem.removeEventListener(type,jQuery.data(elem,"handle"),false);else if(elem.detachEvent)elem.detachEvent("on"+type,jQuery.data(elem,"handle"));}ret=null;delete events[type];}}});}for(ret in events)break;if(!ret){var handle=jQuery.data(elem,"handle");if(handle)handle.elem=null;jQuery.removeData(elem,"events");jQuery.removeData(elem,"handle");}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);if(type.indexOf("!")>=0){type=type.slice(0,-1);var exclusive=true;}if(!elem){if(this.global[type])jQuery("*").add([window,document]).trigger(type,data);}else{if(elem.nodeType==3||elem.nodeType==8)return undefined;var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});data[0][expando]=true;}data[0].type=type;if(exclusive)data[0].exclusive=true;var handle=jQuery.data(elem,"handle");if(handle)val=handle.apply(elem,data);if((!fn||(jQuery.nodeName(elem,'a')&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false)val=false;if(event)data.shift();if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));if(ret!==undefined)val=ret;}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,'a')&&type=="click")){this.triggered=true;try{elem[type]();}catch(e){}}this.triggered=false;}return val;},handle:function(event){var val,ret,namespace,all,handlers;event=arguments[0]=jQuery.event.fix(event||window.event);namespace=event.type.split(".");event.type=namespace[0];namespace=namespace[1];all=!namespace&&!event.exclusive;handlers=(jQuery.data(this,"events")||{})[event.type];for(var j in handlers){var handler=handlers[j];if(all||handler.type==namespace){event.handler=handler;event.data=handler.data;ret=handler.apply(this,arguments);if(val!==false)val=ret;if(ret===false){event.preventDefault();event.stopPropagation();}}}return val;},fix:function(event){if(event[expando]==true)return event;var originalEvent=event;event={originalEvent:originalEvent};var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");for(var i=props.length;i;i--)event[props[i]]=originalEvent[props[i]];event[expando]=true;event.preventDefault=function(){if(originalEvent.preventDefault)originalEvent.preventDefault();originalEvent.returnValue=false;};event.stopPropagation=function(){if(originalEvent.stopPropagation)originalEvent.stopPropagation();originalEvent.cancelBubble=true;};event.timeStamp=event.timeStamp||now();if(!event.target)event.target=event.srcElement||document;if(event.target.nodeType==3)event.target=event.target.parentNode;if(!event.relatedTarget&&event.fromElement)event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode))event.which=event.charCode||event.keyCode;if(!event.metaKey&&event.ctrlKey)event.metaKey=event.ctrlKey;if(!event.which&&event.button)event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));return event;},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;return proxy;},special:{ready:{setup:function(){bindReady();return;},teardown:function(){return;}},mouseenter:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseenter";return jQuery.event.handle.apply(this,arguments);}},mouseleave:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseleave";return jQuery.event.handle.apply(this,arguments);}}}};jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);});},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);return(fn||data).apply(this,arguments);});return this.each(function(){jQuery.event.add(this,type,one,fn&&data);});},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);});},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);});},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);},toggle:function(fn){var args=arguments,i=1;while(i<args.length)jQuery.event.proxy(fn,args[i++]);return this.click(jQuery.event.proxy(fn,function(event){this.lastToggle=(this.lastToggle||0)%i;event.preventDefault();return args[this.lastToggle++].apply(this,arguments)||false;}));},hover:function(fnOver,fnOut){return this.bind('mouseenter',fnOver).bind('mouseleave',fnOut);},ready:function(fn){bindReady();if(jQuery.isReady)fn.call(document,jQuery);else
-jQuery.readyList.push(function(){return fn.call(this,jQuery);});return this;}});jQuery.extend({isReady:false,readyList:[],ready:function(){if(!jQuery.isReady){jQuery.isReady=true;if(jQuery.readyList){jQuery.each(jQuery.readyList,function(){this.call(document);});jQuery.readyList=null;}jQuery(document).triggerHandler("ready");}}});var readyBound=false;function bindReady(){if(readyBound)return;readyBound=true;if(document.addEventListener&&!jQuery.browser.opera)document.addEventListener("DOMContentLoaded",jQuery.ready,false);if(jQuery.browser.msie&&window==top)(function(){if(jQuery.isReady)return;try{document.documentElement.doScroll("left");}catch(error){setTimeout(arguments.callee,0);return;}jQuery.ready();})();if(jQuery.browser.opera)document.addEventListener("DOMContentLoaded",function(){if(jQuery.isReady)return;for(var i=0;i<document.styleSheets.length;i++)if(document.styleSheets[i].disabled){setTimeout(arguments.callee,0);return;}jQuery.ready();},false);if(jQuery.browser.safari){var numStyles;(function(){if(jQuery.isReady)return;if(document.readyState!="loaded"&&document.readyState!="complete"){setTimeout(arguments.callee,0);return;}if(numStyles===undefined)numStyles=jQuery("style, link[rel=stylesheet]").length;if(document.styleSheets.length!=numStyles){setTimeout(arguments.callee,0);return;}jQuery.ready();})();}jQuery.event.add(window,"load",jQuery.ready);}jQuery.each(("blur,focus,load,resize,scroll,unload,click,dblclick,"+"mousedown,mouseup,mousemove,mouseover,mouseout,change,select,"+"submit,keydown,keypress,keyup,error").split(","),function(i,name){jQuery.fn[name]=function(fn){return fn?this.bind(name,fn):this.trigger(name);};});var withinElement=function(event,elem){var parent=event.relatedTarget;while(parent&&parent!=elem)try{parent=parent.parentNode;}catch(error){parent=elem;}return parent==elem;};jQuery(window).bind("unload",function(){jQuery("*").add(document).unbind();});jQuery.fn.extend({_load:jQuery.fn.load,load:function(url,params,callback){if(typeof url!='string')return this._load(url);var off=url.indexOf(" ");if(off>=0){var selector=url.slice(off,url.length);url=url.slice(0,off);}callback=callback||function(){};var type="GET";if(params)if(jQuery.isFunction(params)){callback=params;params=null;}else{params=jQuery.param(params);type="POST";}var self=this;jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified")self.html(selector?jQuery("<div/>").append(res.responseText.replace(/<script(.|\s)*?\/script>/g,"")).find(selector):res.responseText);self.each(callback,[res.responseText,status,res]);}});return this;},serialize:function(){return jQuery.param(this.serializeArray());},serializeArray:function(){return this.map(function(){return jQuery.nodeName(this,"form")?jQuery.makeArray(this.elements):this;}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password/i.test(this.type));}).map(function(i,elem){var val=jQuery(this).val();return val==null?null:val.constructor==Array?jQuery.map(val,function(val,i){return{name:elem.name,value:val};}):{name:elem.name,value:val};}).get();}});jQuery.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(i,o){jQuery.fn[o]=function(f){return this.bind(o,f);};});var jsc=now();jQuery.extend({get:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data=null;}return jQuery.ajax({type:"GET",url:url,data:data,success:callback,dataType:type});},getScript:function(url,callback){return jQuery.get(url,null,callback,"script");},getJSON:function(url,data,callback){return jQuery.get(url,data,callback,"json");},post:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data={};}return jQuery.ajax({type:"POST",url:url,data:data,success:callback,dataType:type});},ajaxSetup:function(settings){jQuery.extend(jQuery.ajaxSettings,settings);},ajaxSettings:{url:location.href,global:true,type:"GET",timeout:0,contentType:"application/x-www-form-urlencoded",processData:true,async:true,data:null,username:null,password:null,accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(s){s=jQuery.extend(true,s,jQuery.extend(true,{},jQuery.ajaxSettings,s));var jsonp,jsre=/=\?(&|$)/g,status,data,type=s.type.toUpperCase();if(s.data&&s.processData&&typeof s.data!="string")s.data=jQuery.param(s.data);if(s.dataType=="jsonp"){if(type=="GET"){if(!s.url.match(jsre))s.url+=(s.url.match(/\?/)?"&":"?")+(s.jsonp||"callback")+"=?";}else if(!s.data||!s.data.match(jsre))s.data=(s.data?s.data+"&":"")+(s.jsonp||"callback")+"=?";s.dataType="json";}if(s.dataType=="json"&&(s.data&&s.data.match(jsre)||s.url.match(jsre))){jsonp="jsonp"+jsc++;if(s.data)s.data=(s.data+"").replace(jsre,"="+jsonp+"$1");s.url=s.url.replace(jsre,"="+jsonp+"$1");s.dataType="script";window[jsonp]=function(tmp){data=tmp;success();complete();window[jsonp]=undefined;try{delete window[jsonp];}catch(e){}if(head)head.removeChild(script);};}if(s.dataType=="script"&&s.cache==null)s.cache=false;if(s.cache===false&&type=="GET"){var ts=now();var ret=s.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+ts+"$2");s.url=ret+((ret==s.url)?(s.url.match(/\?/)?"&":"?")+"_="+ts:"");}if(s.data&&type=="GET"){s.url+=(s.url.match(/\?/)?"&":"?")+s.data;s.data=null;}if(s.global&&!jQuery.active++)jQuery.event.trigger("ajaxStart");var remote=/^(?:\w+:)?\/\/([^\/?#]+)/;if(s.dataType=="script"&&type=="GET"&&remote.test(s.url)&&remote.exec(s.url)[1]!=location.host){var head=document.getElementsByTagName("head")[0];var script=document.createElement("script");script.src=s.url;if(s.scriptCharset)script.charset=s.scriptCharset;if(!jsonp){var done=false;script.onload=script.onreadystatechange=function(){if(!done&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){done=true;success();complete();head.removeChild(script);}};}head.appendChild(script);return undefined;}var requestDone=false;var xhr=window.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest();if(s.username)xhr.open(type,s.url,s.async,s.username,s.password);else
-xhr.open(type,s.url,s.async);try{if(s.data)xhr.setRequestHeader("Content-Type",s.contentType);if(s.ifModified)xhr.setRequestHeader("If-Modified-Since",jQuery.lastModified[s.url]||"Thu, 01 Jan 1970 00:00:00 GMT");xhr.setRequestHeader("X-Requested-With","XMLHttpRequest");xhr.setRequestHeader("Accept",s.dataType&&s.accepts[s.dataType]?s.accepts[s.dataType]+", */*":s.accepts._default);}catch(e){}if(s.beforeSend&&s.beforeSend(xhr,s)===false){s.global&&jQuery.active--;xhr.abort();return false;}if(s.global)jQuery.event.trigger("ajaxSend",[xhr,s]);var onreadystatechange=function(isTimeout){if(!requestDone&&xhr&&(xhr.readyState==4||isTimeout=="timeout")){requestDone=true;if(ival){clearInterval(ival);ival=null;}status=isTimeout=="timeout"&&"timeout"||!jQuery.httpSuccess(xhr)&&"error"||s.ifModified&&jQuery.httpNotModified(xhr,s.url)&&"notmodified"||"success";if(status=="success"){try{data=jQuery.httpData(xhr,s.dataType,s.dataFilter);}catch(e){status="parsererror";}}if(status=="success"){var modRes;try{modRes=xhr.getResponseHeader("Last-Modified");}catch(e){}if(s.ifModified&&modRes)jQuery.lastModified[s.url]=modRes;if(!jsonp)success();}else
-jQuery.handleError(s,xhr,status);complete();if(s.async)xhr=null;}};if(s.async){var ival=setInterval(onreadystatechange,13);if(s.timeout>0)setTimeout(function(){if(xhr){xhr.abort();if(!requestDone)onreadystatechange("timeout");}},s.timeout);}try{xhr.send(s.data);}catch(e){jQuery.handleError(s,xhr,null,e);}if(!s.async)onreadystatechange();function success(){if(s.success)s.success(data,status);if(s.global)jQuery.event.trigger("ajaxSuccess",[xhr,s]);}function complete(){if(s.complete)s.complete(xhr,status);if(s.global)jQuery.event.trigger("ajaxComplete",[xhr,s]);if(s.global&&!--jQuery.active)jQuery.event.trigger("ajaxStop");}return xhr;},handleError:function(s,xhr,status,e){if(s.error)s.error(xhr,status,e);if(s.global)jQuery.event.trigger("ajaxError",[xhr,s,e]);},active:0,httpSuccess:function(xhr){try{return!xhr.status&&location.protocol=="file:"||(xhr.status>=200&&xhr.status<300)||xhr.status==304||xhr.status==1223||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpNotModified:function(xhr,url){try{var xhrRes=xhr.getResponseHeader("Last-Modified");return xhr.status==304||xhrRes==jQuery.lastModified[url]||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpData:function(xhr,type,filter){var ct=xhr.getResponseHeader("content-type"),xml=type=="xml"||!type&&ct&&ct.indexOf("xml")>=0,data=xml?xhr.responseXML:xhr.responseText;if(xml&&data.documentElement.tagName=="parsererror")throw"parsererror";if(filter)data=filter(data,type);if(type=="script")jQuery.globalEval(data);if(type=="json")data=eval("("+data+")");return data;},param:function(a){var s=[];if(a.constructor==Array||a.jquery)jQuery.each(a,function(){s.push(encodeURIComponent(this.name)+"="+encodeURIComponent(this.value));});else
-for(var j in a)if(a[j]&&a[j].constructor==Array)jQuery.each(a[j],function(){s.push(encodeURIComponent(j)+"="+encodeURIComponent(this));});else
-s.push(encodeURIComponent(j)+"="+encodeURIComponent(jQuery.isFunction(a[j])?a[j]():a[j]));return s.join("&").replace(/%20/g,"+");}});jQuery.fn.extend({show:function(speed,callback){return speed?this.animate({height:"show",width:"show",opacity:"show"},speed,callback):this.filter(":hidden").each(function(){this.style.display=this.oldblock||"";if(jQuery.css(this,"display")=="none"){var elem=jQuery("<"+this.tagName+" />").appendTo("body");this.style.display=elem.css("display");if(this.style.display=="none")this.style.display="block";elem.remove();}}).end();},hide:function(speed,callback){return speed?this.animate({height:"hide",width:"hide",opacity:"hide"},speed,callback):this.filter(":visible").each(function(){this.oldblock=this.oldblock||jQuery.css(this,"display");this.style.display="none";}).end();},_toggle:jQuery.fn.toggle,toggle:function(fn,fn2){return jQuery.isFunction(fn)&&jQuery.isFunction(fn2)?this._toggle.apply(this,arguments):fn?this.animate({height:"toggle",width:"toggle",opacity:"toggle"},fn,fn2):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();});},slideDown:function(speed,callback){return this.animate({height:"show"},speed,callback);},slideUp:function(speed,callback){return this.animate({height:"hide"},speed,callback);},slideToggle:function(speed,callback){return this.animate({height:"toggle"},speed,callback);},fadeIn:function(speed,callback){return this.animate({opacity:"show"},speed,callback);},fadeOut:function(speed,callback){return this.animate({opacity:"hide"},speed,callback);},fadeTo:function(speed,to,callback){return this.animate({opacity:to},speed,callback);},animate:function(prop,speed,easing,callback){var optall=jQuery.speed(speed,easing,callback);return this[optall.queue===false?"each":"queue"](function(){if(this.nodeType!=1)return false;var opt=jQuery.extend({},optall),p,hidden=jQuery(this).is(":hidden"),self=this;for(p in prop){if(prop[p]=="hide"&&hidden||prop[p]=="show"&&!hidden)return opt.complete.call(this);if(p=="height"||p=="width"){opt.display=jQuery.css(this,"display");opt.overflow=this.style.overflow;}}if(opt.overflow!=null)this.style.overflow="hidden";opt.curAnim=jQuery.extend({},prop);jQuery.each(prop,function(name,val){var e=new jQuery.fx(self,opt,name);if(/toggle|show|hide/.test(val))e[val=="toggle"?hidden?"show":"hide":val](prop);else{var parts=val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),start=e.cur(true)||0;if(parts){var end=parseFloat(parts[2]),unit=parts[3]||"px";if(unit!="px"){self.style[name]=(end||1)+unit;start=((end||1)/e.cur(true))*start;self.style[name]=start+unit;}if(parts[1])end=((parts[1]=="-="?-1:1)*end)+start;e.custom(start,end,unit);}else
-e.custom(start,val,"");}});return true;});},queue:function(type,fn){if(jQuery.isFunction(type)||(type&&type.constructor==Array)){fn=type;type="fx";}if(!type||(typeof type=="string"&&!fn))return queue(this[0],type);return this.each(function(){if(fn.constructor==Array)queue(this,type,fn);else{queue(this,type).push(fn);if(queue(this,type).length==1)fn.call(this);}});},stop:function(clearQueue,gotoEnd){var timers=jQuery.timers;if(clearQueue)this.queue([]);this.each(function(){for(var i=timers.length-1;i>=0;i--)if(timers[i].elem==this){if(gotoEnd)timers[i](true);timers.splice(i,1);}});if(!gotoEnd)this.dequeue();return this;}});var queue=function(elem,type,array){if(elem){type=type||"fx";var q=jQuery.data(elem,type+"queue");if(!q||array)q=jQuery.data(elem,type+"queue",jQuery.makeArray(array));}return q;};jQuery.fn.dequeue=function(type){type=type||"fx";return this.each(function(){var q=queue(this,type);q.shift();if(q.length)q[0].call(this);});};jQuery.extend({speed:function(speed,easing,fn){var opt=speed&&speed.constructor==Object?speed:{complete:fn||!fn&&easing||jQuery.isFunction(speed)&&speed,duration:speed,easing:fn&&easing||easing&&easing.constructor!=Function&&easing};opt.duration=(opt.duration&&opt.duration.constructor==Number?opt.duration:jQuery.fx.speeds[opt.duration])||jQuery.fx.speeds.def;opt.old=opt.complete;opt.complete=function(){if(opt.queue!==false)jQuery(this).dequeue();if(jQuery.isFunction(opt.old))opt.old.call(this);};return opt;},easing:{linear:function(p,n,firstNum,diff){return firstNum+diff*p;},swing:function(p,n,firstNum,diff){return((-Math.cos(p*Math.PI)/2)+0.5)*diff+firstNum;}},timers:[],timerId:null,fx:function(elem,options,prop){this.options=options;this.elem=elem;this.prop=prop;if(!options.orig)options.orig={};}});jQuery.fx.prototype={update:function(){if(this.options.step)this.options.step.call(this.elem,this.now,this);(jQuery.fx.step[this.prop]||jQuery.fx.step._default)(this);if(this.prop=="height"||this.prop=="width")this.elem.style.display="block";},cur:function(force){if(this.elem[this.prop]!=null&&this.elem.style[this.prop]==null)return this.elem[this.prop];var r=parseFloat(jQuery.css(this.elem,this.prop,force));return r&&r>-10000?r:parseFloat(jQuery.curCSS(this.elem,this.prop))||0;},custom:function(from,to,unit){this.startTime=now();this.start=from;this.end=to;this.unit=unit||this.unit||"px";this.now=this.start;this.pos=this.state=0;this.update();var self=this;function t(gotoEnd){return self.step(gotoEnd);}t.elem=this.elem;jQuery.timers.push(t);if(jQuery.timerId==null){jQuery.timerId=setInterval(function(){var timers=jQuery.timers;for(var i=0;i<timers.length;i++)if(!timers[i]())timers.splice(i--,1);if(!timers.length){clearInterval(jQuery.timerId);jQuery.timerId=null;}},13);}},show:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);this.options.show=true;this.custom(0,this.cur());if(this.prop=="width"||this.prop=="height")this.elem.style[this.prop]="1px";jQuery(this.elem).show();},hide:function(){this.options.orig[this.prop]=jQuery.attr(this.elem.style,this.prop);this.options.hide=true;this.custom(this.cur(),0);},step:function(gotoEnd){var t=now();if(gotoEnd||t>this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var done=true;for(var i in this.options.curAnim)if(this.options.curAnim[i]!==true)done=false;if(done){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(jQuery.css(this.elem,"display")=="none")this.elem.style.display="block";}if(this.options.hide)this.elem.style.display="none";if(this.options.hide||this.options.show)for(var p in this.options.curAnim)jQuery.attr(this.elem.style,p,this.options.orig[p]);}if(done)this.options.complete.call(this.elem);return false;}else{var n=t-this.startTime;this.state=n/this.options.duration;this.pos=jQuery.easing[this.options.easing||(jQuery.easing.swing?"swing":"linear")](this.state,n,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update();}return true;}};jQuery.extend(jQuery.fx,{speeds:{slow:600,fast:200,def:400},step:{scrollLeft:function(fx){fx.elem.scrollLeft=fx.now;},scrollTop:function(fx){fx.elem.scrollTop=fx.now;},opacity:function(fx){jQuery.attr(fx.elem.style,"opacity",fx.now);},_default:function(fx){fx.elem.style[fx.prop]=fx.now+fx.unit;}}});jQuery.fn.offset=function(){var left=0,top=0,elem=this[0],results;if(elem)with(jQuery.browser){var parent=elem.parentNode,offsetChild=elem,offsetParent=elem.offsetParent,doc=elem.ownerDocument,safari2=safari&&parseInt(version)<522&&!/adobeair/i.test(userAgent),css=jQuery.curCSS,fixed=css(elem,"position")=="fixed";if(elem.getBoundingClientRect){var box=elem.getBoundingClientRect();add(box.left+Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),box.top+Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));add(-doc.documentElement.clientLeft,-doc.documentElement.clientTop);}else{add(elem.offsetLeft,elem.offsetTop);while(offsetParent){add(offsetParent.offsetLeft,offsetParent.offsetTop);if(mozilla&&!/^t(able|d|h)$/i.test(offsetParent.tagName)||safari&&!safari2)border(offsetParent);if(!fixed&&css(offsetParent,"position")=="fixed")fixed=true;offsetChild=/^body$/i.test(offsetParent.tagName)?offsetChild:offsetParent;offsetParent=offsetParent.offsetParent;}while(parent&&parent.tagName&&!/^body|html$/i.test(parent.tagName)){if(!/^inline|table.*$/i.test(css(parent,"display")))add(-parent.scrollLeft,-parent.scrollTop);if(mozilla&&css(parent,"overflow")!="visible")border(parent);parent=parent.parentNode;}if((safari2&&(fixed||css(offsetChild,"position")=="absolute"))||(mozilla&&css(offsetChild,"position")!="absolute"))add(-doc.body.offsetLeft,-doc.body.offsetTop);if(fixed)add(Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));}results={top:top,left:left};}function border(elem){add(jQuery.curCSS(elem,"borderLeftWidth",true),jQuery.curCSS(elem,"borderTopWidth",true));}function add(l,t){left+=parseInt(l,10)||0;top+=parseInt(t,10)||0;}return results;};jQuery.fn.extend({position:function(){var left=0,top=0,results;if(this[0]){var offsetParent=this.offsetParent(),offset=this.offset(),parentOffset=/^body|html$/i.test(offsetParent[0].tagName)?{top:0,left:0}:offsetParent.offset();offset.top-=num(this,'marginTop');offset.left-=num(this,'marginLeft');parentOffset.top+=num(offsetParent,'borderTopWidth');parentOffset.left+=num(offsetParent,'borderLeftWidth');results={top:offset.top-parentOffset.top,left:offset.left-parentOffset.left};}return results;},offsetParent:function(){var offsetParent=this[0].offsetParent;while(offsetParent&&(!/^body|html$/i.test(offsetParent.tagName)&&jQuery.css(offsetParent,'position')=='static'))offsetParent=offsetParent.offsetParent;return jQuery(offsetParent);}});jQuery.each(['Left','Top'],function(i,name){var method='scroll'+name;jQuery.fn[method]=function(val){if(!this[0])return;return val!=undefined?this.each(function(){this==window||this==document?window.scrollTo(!i?val:jQuery(window).scrollLeft(),i?val:jQuery(window).scrollTop()):this[method]=val;}):this[0]==window||this[0]==document?self[i?'pageYOffset':'pageXOffset']||jQuery.boxModel&&document.documentElement[method]||document.body[method]:this[0][method];};});jQuery.each(["Height","Width"],function(i,name){var tl=i?"Left":"Top",br=i?"Right":"Bottom";jQuery.fn["inner"+name]=function(){return this[name.toLowerCase()]()+num(this,"padding"+tl)+num(this,"padding"+br);};jQuery.fn["outer"+name]=function(margin){return this["inner"+name]()+num(this,"border"+tl+"Width")+num(this,"border"+br+"Width")+(margin?num(this,"margin"+tl)+num(this,"margin"+br):0);};});})();
\ No newline at end of file
--- a/web/static/socialauth/js/openid-jquery.js	Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,228 +0,0 @@
-/*
-Simple OpenID Plugin
-http://code.google.com/p/openid-selector/
-
-This code is licenced under the New BSD License.
-*/
-
-var providers_large = {
-    google: {
-        name: 'Google',
-        url: 'https://www.google.com/accounts/o8/id'
-    },
-    yahoo: {
-        name: 'Yahoo',      
-        url: 'https://me.yahoo.com/'
-    },    
-    aol: {
-        name: 'AOL',     
-        label: 'Enter your AOL screenname.',
-        url: 'http://openid.aol.com/{username}/'
-    },
-    openid: {
-        name: 'OpenID',     
-        label: 'Enter your OpenID.',
-        url: null
-    }
-};
-var providers_small = {
-    myopenid: {
-        name: 'MyOpenID',
-        label: 'Enter your MyOpenID username.',
-        url: 'http://{username}.myopenid.com/'
-    },
-    livejournal: {
-        name: 'LiveJournal',
-        label: 'Enter your Livejournal username.',
-        url: 'http://{username}.livejournal.com/'
-    },
-    flickr: {
-        name: 'Flickr',        
-        label: 'Enter your Flickr username.',
-        url: 'http://flickr.com/photos/{username}/'
-    },
-    technorati: {
-        name: 'Technorati',
-        label: 'Enter your Technorati username.',
-        url: 'http://technorati.com/people/technorati/{username}/'
-    },
-    wordpress: {
-        name: 'Wordpress',
-        label: 'Enter your Wordpress.com username.',
-        url: 'http://{username}.wordpress.com/'
-    },
-    blogger: {
-        name: 'Blogger',
-        label: 'Your Blogger account',
-        url: 'http://{username}.blogspot.com/'
-    },
-    verisign: {
-        name: 'Verisign',
-        label: 'Your Verisign username',
-        url: 'http://{username}.pip.verisignlabs.com/'
-    },
-    vidoop: {
-        name: 'Vidoop',
-        label: 'Your Vidoop username',
-        url: 'http://{username}.myvidoop.com/'
-    },
-    verisign: {
-        name: 'Verisign',
-        label: 'Your Verisign username',
-        url: 'http://{username}.pip.verisignlabs.com/'
-    },
-    claimid: {
-        name: 'ClaimID',
-        label: 'Your ClaimID username',
-        url: 'http://claimid.com/{username}'
-    }
-};
-var providers = $.extend({}, providers_large, providers_small);
-
-var openid = {
-
-	cookie_expires: 6*30,	// 6 months.
-	cookie_name: 'openid_provider',
-	cookie_path: '/',
-	
-	img_path: '/platform/static/images/',
-	
-	input_id: null,
-	provider_url: null,
-	
-    init: function(input_id) {
-        
-        var openid_btns = $('#openid_btns');
-        
-        this.input_id = input_id;
-        
-        $('#openid_choice').show();
-        $('#openid_input_area').empty();
-        // add box for each provider
-        for (id in providers_large) {
-        
-           	openid_btns.append(this.getBoxHTML(providers_large[id], 'large', '.gif'));
-        }
-        if (providers_small) {
-        	openid_btns.append('<br/>');
-        	
-	        for (id in providers_small) {
-	        
-	           	openid_btns.append(this.getBoxHTML(providers_small[id], 'small', '.ico'));
-	        }
-        }
-        
-        $('#openid_form').submit(this.submit);
-        
-        var box_id = this.readCookie();
-        if (box_id) {
-        	this.signin(box_id, true);
-        }  
-    },
-    getBoxHTML: function(provider, box_size, image_ext) {
-            
-        var box_id = provider["name"].toLowerCase();
-        return '<a title="'+provider["name"]+'" href="javascript: openid.signin(\''+ box_id +'\');"' +
-        		' style="background: #FFF url(' + this.img_path + box_id + image_ext+') no-repeat center center" ' + 
-        		'class="' + box_id + ' openid_' + box_size + '_btn"></a>';    
-    
-    },
-    /* Provider image click */
-    signin: function(box_id, onload) {
-    
-    	var provider = providers[box_id];
-  		if (! provider) {
-  			return;
-  		}
-		
-		this.highlight(box_id);
-		this.setCookie(box_id);
-		
-		// prompt user for input?
-		if (provider['label']) {
-			
-			this.useInputBox(provider);
-			this.provider_url = provider['url'];
-			
-		} else {
-			
-			this.setOpenIdUrl(provider['url']);
-			if (! onload) {
-				$('#openid_form').submit();
-			}	
-		}
-    },
-    /* Sign-in button click */
-    submit: function() {
-        
-    	var url = openid.provider_url; 
-    	if (url) {
-    		url = url.replace('{username}', $('#openid_username').val());
-    		openid.setOpenIdUrl(url);
-    	}
-    	return true;
-    },
-    setOpenIdUrl: function (url) {
-    
-    	var hidden = $('#'+this.input_id);
-    	if (hidden.length > 0) {
-    		hidden.value = url;
-    	} else {
-    		$('#openid_form').append('<input type="hidden" id="' + this.input_id + '" name="' + this.input_id + '" value="'+url+'"/>');
-    	}
-    },
-    highlight: function (box_id) {
-    	
-    	// remove previous highlight.
-    	var highlight = $('#openid_highlight');
-    	if (highlight) {
-    		highlight.replaceWith($('#openid_highlight a')[0]);
-    	}
-    	// add new highlight.
-    	$('.'+box_id).wrap('<div id="openid_highlight"></div>');
-    },
-    setCookie: function (value) {
-    
-		var date = new Date();
-		date.setTime(date.getTime()+(this.cookie_expires*24*60*60*1000));
-		var expires = "; expires="+date.toGMTString();
-		
-		document.cookie = this.cookie_name+"="+value+expires+"; path=" + this.cookie_path;
-    },
-    readCookie: function () {
-		var nameEQ = this.cookie_name + "=";
-		var ca = document.cookie.split(';');
-		for(var i=0;i < ca.length;i++) {
-			var c = ca[i];
-			while (c.charAt(0)==' ') c = c.substring(1,c.length);
-			if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length);
-		}
-		return null;
-    },
-    useInputBox: function (provider) {
-   	
-		var input_area = $('#openid_input_area');
-		
-		var html = '';
-		var id = 'openid_username';
-		var value = '';
-		var label = provider['label'];
-		var style = '';
-		
-		if (label) {
-			html = '<p>' + label + '</p>';
-		}
-		if (provider['name'] == 'OpenID') {
-			id = this.input_id;
-			value = 'http://';
-			style = 'background:#FFF url('+this.img_path+'openid-inputicon.gif) no-repeat scroll 0 50%; padding-left:18px;';
-		}
-		html += '<input id="'+id+'" type="text" style="'+style+'" name="'+id+'" value="'+value+'" />' + 
-					'<input id="openid_submit" type="submit" value="Sign-In"/>';
-		
-		input_area.empty();
-		input_area.append(html);
-
-		$('#'+id).focus();
-    }
-};