# HG changeset patch
# User cavaliet
# Date 1302273181 -7200
# Node ID 125fc6df230d752dcda392fb8cb093098cc8c9cc
# Parent 2e2989c3072c88a1df669c1b5aa5140d91d3ac30# Parent 0556c56ef5ebf7cafd290c95d473814ec09b0143
fusionner
diff -r 2e2989c3072c -r 125fc6df230d .hgignore
--- a/.hgignore Wed Mar 23 17:34:36 2011 +0100
+++ b/.hgignore Fri Apr 08 16:33:01 2011 +0200
@@ -32,4 +32,10 @@
syntax: regexp
^src/ldt/build$
syntax: regexp
-^src/ldt/ldt\.egg-info$
\ No newline at end of file
+^src/ldt/ldt\.egg-info$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14\.tar\.gz$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14-py2\.6\.egg$
+syntax: regexp
+^src/ldt/MANIFEST\.in$
\ No newline at end of file
diff -r 2e2989c3072c -r 125fc6df230d .pydevproject
--- a/.pydevproject Wed Mar 23 17:34:36 2011 +0100
+++ b/.pydevproject Fri Apr 08 16:33:01 2011 +0200
@@ -1,11 +1,11 @@
-
-
-
-
-python_platform
-python 2.6
-
-/platform/src/ldt
-/platform/web
-
-
+
+
+
+
+python_live
+python 2.6
+
+/platform/src/ldt
+/platform/web
+
+
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/distribute_setup.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/ldt/distribute_setup.py Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,485 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+ from distribute_setup import use_setuptools
+ use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+from distutils import log
+
+try:
+ from site import USER_SITE
+except ImportError:
+ USER_SITE = None
+
+try:
+ import subprocess
+
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+except ImportError:
+ # will be used for python 2.3
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ # quoting arguments if windows
+ if sys.platform == 'win32':
+ def quote(arg):
+ if ' ' in arg:
+ return '"%s"' % arg
+ return arg
+ args = [quote(arg) for arg in args]
+ return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.14"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # installing
+ log.warn('Installing Distribute')
+ if not _python_cmd('setup.py', 'install'):
+ log.warn('Something went wrong during the installation.')
+ log.warn('See the error message above.')
+ finally:
+ os.chdir(old_wd)
+
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a Distribute egg in %s', to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+ egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+ % (version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ tarball = download_setuptools(version, download_base,
+ to_dir, download_delay)
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, download_delay=15, no_fake=True):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ was_imported = 'pkg_resources' in sys.modules or \
+ 'setuptools' in sys.modules
+ try:
+ try:
+ import pkg_resources
+ if not hasattr(pkg_resources, '_distribute'):
+ if not no_fake:
+ _fake_setuptools()
+ raise ImportError
+ except ImportError:
+ return _do_download(version, download_base, to_dir, download_delay)
+ try:
+ pkg_resources.require("distribute>="+version)
+ return
+ except pkg_resources.VersionConflict:
+ e = sys.exc_info()[1]
+ if was_imported:
+ sys.stderr.write(
+ "The required version of distribute (>=%s) is not available,\n"
+ "and can't be installed while this script is running. Please\n"
+ "install a more recent version first, using\n"
+ "'easy_install -U distribute'."
+ "\n\n(Currently using %r)\n" % (version, e.args[0]))
+ sys.exit(2)
+ else:
+ del pkg_resources, sys.modules['pkg_resources'] # reload ok
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ except pkg_resources.DistributionNotFound:
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ finally:
+ if not no_fake:
+ _create_fake_setuptools_pkg_info(to_dir)
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, delay=15):
+ """Download distribute from a specified location and return its filename
+
+ `version` should be a valid distribute version number that is available
+ as an egg for download under the `download_base` URL (which should end
+ with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `delay` is the number of seconds to pause before an actual download
+ attempt.
+ """
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+ tgz_name = "distribute-%s.tar.gz" % version
+ url = download_base + tgz_name
+ saveto = os.path.join(to_dir, tgz_name)
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ log.warn("Downloading %s", url)
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+ dst = open(saveto, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+ return os.path.realpath(saveto)
+
+def _no_sandbox(function):
+ def __no_sandbox(*args, **kw):
+ try:
+ from setuptools.sandbox import DirectorySandbox
+ if not hasattr(DirectorySandbox, '_old'):
+ def violation(*args):
+ pass
+ DirectorySandbox._old = DirectorySandbox._violation
+ DirectorySandbox._violation = violation
+ patched = True
+ else:
+ patched = False
+ except ImportError:
+ patched = False
+
+ try:
+ return function(*args, **kw)
+ finally:
+ if patched:
+ DirectorySandbox._violation = DirectorySandbox._old
+ del DirectorySandbox._old
+
+ return __no_sandbox
+
+def _patch_file(path, content):
+ """Will backup the file then patch it"""
+ existing_content = open(path).read()
+ if existing_content == content:
+ # already patched
+ log.warn('Already patched.')
+ return False
+ log.warn('Patching...')
+ _rename_path(path)
+ f = open(path, 'w')
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ return True
+
+_patch_file = _no_sandbox(_patch_file)
+
+def _same_content(path, content):
+ return open(path).read() == content
+
+def _rename_path(path):
+ new_name = path + '.OLD.%s' % time.time()
+ log.warn('Renaming %s into %s', path, new_name)
+ os.rename(path, new_name)
+ return new_name
+
+def _remove_flat_installation(placeholder):
+ if not os.path.isdir(placeholder):
+ log.warn('Unkown installation at %s', placeholder)
+ return False
+ found = False
+ for file in os.listdir(placeholder):
+ if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+ found = True
+ break
+ if not found:
+ log.warn('Could not locate setuptools*.egg-info')
+ return
+
+ log.warn('Removing elements out of the way...')
+ pkg_info = os.path.join(placeholder, file)
+ if os.path.isdir(pkg_info):
+ patched = _patch_egg_dir(pkg_info)
+ else:
+ patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+ if not patched:
+ log.warn('%s already patched.', pkg_info)
+ return False
+ # now let's move the files out of the way
+ for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+ element = os.path.join(placeholder, element)
+ if os.path.exists(element):
+ _rename_path(element)
+ else:
+ log.warn('Could not find the %s element of the '
+ 'Setuptools distribution', element)
+ return True
+
+_remove_flat_installation = _no_sandbox(_remove_flat_installation)
+
+def _after_install(dist):
+ log.warn('After install bootstrap.')
+ placeholder = dist.get_command_obj('install').install_purelib
+ _create_fake_setuptools_pkg_info(placeholder)
+
+def _create_fake_setuptools_pkg_info(placeholder):
+ if not placeholder or not os.path.exists(placeholder):
+ log.warn('Could not find the install location')
+ return
+ pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+ setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+ (SETUPTOOLS_FAKED_VERSION, pyver)
+ pkg_info = os.path.join(placeholder, setuptools_file)
+ if os.path.exists(pkg_info):
+ log.warn('%s already exists', pkg_info)
+ return
+
+ log.warn('Creating %s', pkg_info)
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+
+ pth_file = os.path.join(placeholder, 'setuptools.pth')
+ log.warn('Creating %s', pth_file)
+ f = open(pth_file, 'w')
+ try:
+ f.write(os.path.join(os.curdir, setuptools_file))
+ finally:
+ f.close()
+
+_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
+
+def _patch_egg_dir(path):
+ # let's check if it's already patched
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ if os.path.exists(pkg_info):
+ if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+ log.warn('%s already patched.', pkg_info)
+ return False
+ _rename_path(path)
+ os.mkdir(path)
+ os.mkdir(os.path.join(path, 'EGG-INFO'))
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+ return True
+
+_patch_egg_dir = _no_sandbox(_patch_egg_dir)
+
+def _before_install():
+ log.warn('Before install bootstrap.')
+ _fake_setuptools()
+
+
+def _under_prefix(location):
+ if 'install' not in sys.argv:
+ return True
+ args = sys.argv[sys.argv.index('install')+1:]
+ for index, arg in enumerate(args):
+ for option in ('--root', '--prefix'):
+ if arg.startswith('%s=' % option):
+ top_dir = arg.split('root=')[-1]
+ return location.startswith(top_dir)
+ elif arg == option:
+ if len(args) > index:
+ top_dir = args[index+1]
+ return location.startswith(top_dir)
+ if arg == '--user' and USER_SITE is not None:
+ return location.startswith(USER_SITE)
+ return True
+
+
+def _fake_setuptools():
+ log.warn('Scanning installed packages')
+ try:
+ import pkg_resources
+ except ImportError:
+ # we're cool
+ log.warn('Setuptools or Distribute does not seem to be installed.')
+ return
+ ws = pkg_resources.working_set
+ try:
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
+ replacement=False))
+ except TypeError:
+ # old distribute API
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+
+ if setuptools_dist is None:
+ log.warn('No setuptools distribution found')
+ return
+ # detecting if it was already faked
+ setuptools_location = setuptools_dist.location
+ log.warn('Setuptools installation detected at %s', setuptools_location)
+
+ # if --root or --preix was provided, and if
+ # setuptools is not located in them, we don't patch it
+ if not _under_prefix(setuptools_location):
+ log.warn('Not patching, --root or --prefix is installing Distribute'
+ ' in another location')
+ return
+
+ # let's see if its an egg
+ if not setuptools_location.endswith('.egg'):
+ log.warn('Non-egg installation')
+ res = _remove_flat_installation(setuptools_location)
+ if not res:
+ return
+ else:
+ log.warn('Egg installation')
+ pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+ if (os.path.exists(pkg_info) and
+ _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+ log.warn('Already patched.')
+ return
+ log.warn('Patching...')
+ # let's create a fake egg replacing setuptools one
+ res = _patch_egg_dir(setuptools_location)
+ if not res:
+ return
+ log.warn('Patched done.')
+ _relaunch()
+
+
+def _relaunch():
+ log.warn('Relaunching...')
+ # we have to relaunch the process
+ # pip marker to avoid a relaunch bug
+ if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
+ sys.argv[0] = 'setup.py'
+ args = [sys.executable] + sys.argv
+ sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
+ else:
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+def main(argv, version=DEFAULT_VERSION):
+ """Install or upgrade setuptools and EasyInstall"""
+ tarball = download_setuptools()
+ _install(tarball)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/ldt/ldt_utils/projectserializer.py
--- a/src/ldt/ldt/ldt_utils/projectserializer.py Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/ldt_utils/projectserializer.py Fri Apr 08 16:33:01 2011 +0200
@@ -147,6 +147,7 @@
element_duration = element_node.attrib[u"dur"]
element_media = content.iri_id
element_color = element_node.attrib[u"color"]
+ element_ldt_src = element_node.attrib.get(u"src", "")
element_title = reduce_text_node(element_node, "title/text()")
element_description = reduce_text_node(element_node, "abstract/text()")
@@ -211,11 +212,15 @@
"end": int(element_begin) + int(element_duration),
"id": element_id,
"media": element_media,
+ "color": element_color,
"content": {
"mimetype": "application/x-ldt-structured",
"title": element_title,
"description": element_description,
"color": element_color,
+ "img": {
+ "src": element_ldt_src,
+ },
"audio": {
"src" : element_audio_src,
"mimetype": "audio/mp3",
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html
--- a/src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/ldt_utils/templates/ldt/ldt_utils/partial/projectslist.html Fri Apr 08 16:33:01 2011 +0200
@@ -18,9 +18,9 @@
|
{% ifequal project.state 2 %}
-
+
{% else %}
-
+
{% endifequal %}
|
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/ldt/media/swf/ldt/LignesDeTempsFlex.swf
Binary file src/ldt/ldt/media/swf/ldt/LignesDeTempsFlex.swf has changed
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/ldt/settings.py
--- a/src/ldt/ldt/settings.py Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/settings.py Fri Apr 08 16:33:01 2011 +0200
@@ -28,6 +28,7 @@
'ldt.user',
'ldt.management',
'oauth_provider',
+ 'social_auth',
)
MIDDLEWARE_CLASSES = (
@@ -41,18 +42,6 @@
'jogging.middleware.LoggingMiddleware',
)
-TEMPLATE_CONTEXT_PROCESSORS = (
- "django.core.context_processors.request",
- "django.core.context_processors.auth",
- "django.core.context_processors.debug",
- "django.core.context_processors.i18n",
- "django.core.context_processors.media",
- "ldt.utils.context_processors.ldt",
- "ldt.utils.context_processors.base",
- "ldt.utils.context_processors.web",
- "ldt.utils.context_processors.version",
-)
-
WEB_URL = getattr(settings, 'WEB_URL', '')
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/ldt/utils/context_processors.py
--- a/src/ldt/ldt/utils/context_processors.py Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/ldt/utils/context_processors.py Fri Apr 08 16:33:01 2011 +0200
@@ -2,15 +2,8 @@
import ldt
-def ldt(request):
- return {'LDT_MEDIA_PREFIX': settings.LDT_MEDIA_PREFIX }
-
-def base(request):
- return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL }
+def ldtcontext(request):
+ return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL, 'TC1': 'TC1', \
+ 'LDT_MEDIA_PREFIX': settings.LDT_MEDIA_PREFIX, 'WEB_URL': settings.WEB_URL, \
+ 'ADMIN_MEDIA_PREFIX': settings.ADMIN_MEDIA_PREFIX, 'VERSION': ldt.get_version() }
-def web(request):
- return {'WEB_URL': settings.WEB_URL }
-
-def version(request):
- return {'VERSION': ldt.get_version() }
-
diff -r 2e2989c3072c -r 125fc6df230d src/ldt/setup.py
--- a/src/ldt/setup.py Wed Mar 23 17:34:36 2011 +0100
+++ b/src/ldt/setup.py Fri Apr 08 16:33:01 2011 +0200
@@ -1,5 +1,9 @@
import os
-from setuptools import setup, find_packages
+
+from distribute_setup import use_setuptools
+use_setuptools()
+
+from setuptools import setup
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR, 'ldt')
@@ -43,13 +47,22 @@
new_data_files.extend([os.path.join(ldirpath[len(base_path):], f) for f in lfilenames])
data_files.setdefault(key,[]).extend(new_data_files)
+#write MANIFEST.in
+
+m = open("MANIFEST.in", "w")
+
+m.write("exclude MANIFEST.in\n")
+for key,file_list in data_files.iteritems():
+ for filename in file_list:
+ m.write("include %s/%s\n" % (key.replace(".","/"), filename))
+m.close()
setup(
name='ldt',
version=version,
author='Yves-Marie Haussonne (IRI)',
author_email='contact@iri.centrepompidou.fr',
- packages = find_packages(),
+ packages = packages,
package_data = data_files,
scripts=[],
url='https://www.iri.centrepompidou.fr/dev/hg/platform',
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/lib/lib_create_env.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/lib_create_env.py Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,292 @@
+import sys
+import os
+import os.path
+import shutil
+import tarfile
+import zipfile
+import urllib
+import platform
+import patch
+
+join = os.path.join
+system_str = platform.system()
+
+
+URLS = {
+ 'DISTRIBUTE': {'setup': 'distribute', 'url':'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local':"distribute-0.6.14.tar.gz"},
+ 'DJANGO': {'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local':"Django-1.2.4.tar.gz"},
+ 'JOGGING': {'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local':"jogging-0.2.2.tar.gz"},
+ 'DJANGO-EXTENSIONS': { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':"django-extensions-0.6.tar.gz"},
+ 'DJANGO-REGISTRATION': { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':"django-registration.tar.gz"},
+ 'DJANGO-TAGGING': { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':"django-tagging-0.3.1.tar.gz"},
+ 'DJANGO-PISTON': { 'setup': 'django-piston', 'url':"django-piston-0.2.2-modified.tar.gz", 'local':"django-piston-0.2.2-modified.tar.gz"},
+ 'OAUTH2': { 'setup': 'python-oauth2', 'url':"python-oauth2-1.2.1-modified.tar.gz", 'local':"python-oauth2-1.2.1-modified.tar.gz"},
+ 'HTTPLIB2': { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':"httplib2-0.6.0.tar.gz"},
+ 'DJANGO-OAUTH-PLUS': { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':"django-oauth-plus.tar.gz"},
+ 'MYSQL': { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local':"MySQL-python-1.2.3.tar.gz"},
+ 'SETUPTOOLS-HG': { 'setup':'mercurial_hg', 'url':'http://pypi.python.org/packages/source/s/setuptools_hg/setuptools_hg-0.2.tar.gz', 'local':"setuptools_hg-0.2.tar.gz"},
+ 'MERCURIAL': {'setup':'mercurial', 'url':'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local':"mercurial-1.7.5.tar.gz"}
+}
+
+if system_str == 'Windows':
+ URLS.update({
+ 'PSYCOPG2': {'setup': 'psycopg2','url': 'psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local':"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip"},
+ 'JCC': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local':"JCC-2.6-py2.6-win32.egg"},
+ 'PYLUCENE': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local':"lucene-3.0.2-py2.6-win32.egg"},
+ 'PIL': {'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local':"PIL-1.1.7.win32-py2.6.exe"},
+ 'LXML': {'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local':"lxml-2.2.8-py2.6-win32.egg"}
+ })
+else:
+ URLS.update({
+ 'PSYCOPG2': {'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local':"psycopg2-2.3.2.tar.gz"},
+ 'PYLUCENE': {'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local':"pylucene-3.0.3-1-src.tar.gz"},
+ 'PIL': {'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local':"Imaging-1.1.7.tar.gz"},
+ 'LXML': {'setup': 'lxml', 'url':"lxml_2.2.8.tar.gz", 'local':"lxml-2.2.8.tar.gz"}
+ })
+
+
+
+class ResourcesEnv(object):
+
+ def __init__(self, src_base, urls, normal_installs):
+ self.src_base = src_base
+ self.URLS = {}
+ self.__init_url(urls)
+ self.NORMAL_INSTALL = normal_installs
+
+ def get_src_base_path(self, fpath):
+ return os.path.abspath(os.path.join(self.src_base, fpath)).replace("\\","/")
+
+ def __add_package_def(self, key, setup, url, local):
+ self.URLS[key] = {'setup':setup, 'url':url, 'local':self.get_src_base_path(local)}
+
+ def __init_url(self, urls):
+ for key, url_dict in urls.items():
+ url = url_dict['url']
+ if not url.startswith("http://"):
+ url = self.get_src_base_path(url)
+ self.__add_package_def(key, url_dict["setup"], url, url_dict["local"])
+
+def ensure_dir(dir, logger):
+ if not os.path.exists(dir):
+ logger.notify('Creating directory %s' % dir)
+ os.makedirs(dir)
+
+def extend_parser(parser):
+ parser.add_option(
+ '--index-url',
+ metavar='INDEX_URL',
+ dest='index_url',
+ default='http://pypi.python.org/simple/',
+ help='base URL of Python Package Index')
+ parser.add_option(
+ '--type-install',
+ metavar='type_install',
+ dest='type_install',
+ default='local',
+ help='type install : local, url, setup')
+ parser.add_option(
+ '--ignore-packages',
+ metavar='ignore_packages',
+ dest='ignore_packages',
+ default=None,
+ help='list of comma separated keys for package to ignore')
+
+def adjust_options(options, args):
+ pass
+
+
+def install_pylucene(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+
+ logger.notify("Get Pylucene from %s " % res_env.URLS['PYLUCENE'][res_source_key])
+ pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
+ if res_source_key == 'local':
+ shutil.copy(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+ else:
+ urllib.urlretrieve(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+ tf = tarfile.open(pylucene_src,'r:gz')
+ pylucene_base_path = os.path.join(src_dir,"pylucene")
+ logger.notify("Extract Pylucene to %s " % pylucene_base_path)
+ tf.extractall(pylucene_base_path)
+ tf.close()
+
+ pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
+ jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
+
+ #install jcc
+
+ #patch for linux
+ if system_str == 'Linux' :
+ olddir = os.getcwd()
+ patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
+ if os.path.isfile(patch_dest_path):
+ # must unzip egg
+ # rename file and etract all
+ shutil.move(patch_dest_path, patch_dest_path + ".zip")
+ zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
+ zf.extractall(patch_dest_path)
+ os.remove(patch_dest_path + ".zip")
+ logger.notify("Patch jcc : %s " % (patch_dest_path))
+ os.chdir(patch_dest_path)
+ p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
+ p.apply()
+ os.chdir(olddir)
+
+ logger.notify("Install jcc")
+ call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
+ cwd=jcc_src_path,
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+ #install pylucene
+
+ logger.notify("Install pylucene")
+ #modify makefile
+ makefile_path = os.path.join(pylucene_src_path,"Makefile")
+ logger.notify("Modify makefile %s " % makefile_path)
+ shutil.move( makefile_path, makefile_path+"~" )
+
+ destination= open( makefile_path, "w" )
+ source= open( makefile_path+"~", "r" )
+ destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\n")
+ destination.write("ANT=ant\n")
+ destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\n")
+
+ if system_str == "Darwin":
+ if sys.version_info >= (2,6):
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+ else:
+ destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\n")
+ destination.write("NUM_FILES=2\n")
+ elif system_str == "Windows":
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+ destination.write("NUM_FILES=2\n")
+ else:
+ if sys.version_info >= (2,6) and sys.version_info <= (2,7):
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\n")
+ else:
+ destination.write("JCC=$(PYTHON) -m jcc --shared\n")
+ destination.write("NUM_FILES=2\n")
+ for line in source:
+ destination.write( line )
+ source.close()
+ destination.close()
+ os.remove(makefile_path+"~" )
+
+ logger.notify("pylucene make")
+ call_subprocess(['make'],
+ cwd=os.path.abspath(pylucene_src_path),
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+
+ logger.notify("pylucene make install")
+ call_subprocess(['make', 'install'],
+ cwd=os.path.abspath(pylucene_src_path),
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+
+
+def install_psycopg2(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+ psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
+ shutil.copy(res_env.URLS['PSYCOPG2'][res_source_key], psycopg2_src)
+ #extract psycopg2
+ zf = zipfile.ZipFile(psycopg2_src)
+ psycopg2_base_path = os.path.join(src_dir,"psycopg2")
+ zf.extractall(psycopg2_base_path)
+ zf.close()
+
+ psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
+ shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
+ shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
+
+
+
+def lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, normal_installs, urls=None):
+
+ all_urls = URLS.copy()
+ if urls is not None:
+ all_urls.update(urls)
+
+ res_env = ResourcesEnv(src_base, all_urls, normal_installs)
+
+ def filter_python_develop(line):
+ if not line.strip():
+ return Logger.DEBUG
+ for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
+ 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
+ 'creating ', 'Copying ']:
+ if line.startswith(prefix):
+ return Logger.DEBUG
+ return Logger.NOTIFY
+
+
+ def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess):
+ logger.notify("Install %s from %s with %s" % (key,res_env.URLS[key][res_source_key],method))
+ if method == 'pip':
+ if sys.platform == 'win32':
+ args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+ else:
+ args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+ if option_str :
+ args.insert(4,option_str)
+ call_subprocess(args,
+ cwd=os.path.abspath(tmp_dir),
+ filter_stdout=filter_python_develop,
+ show_stdout=True,
+ extra_env=extra_env)
+ else:
+ if sys.platform == 'win32':
+ args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), res_env.URLS[key][res_source_key]]
+ else:
+ args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), res_env.URLS[key][res_source_key]]
+ if option_str :
+ args.insert(1,option_str)
+ call_subprocess(args,
+ cwd=os.path.abspath(tmp_dir),
+ filter_stdout=filter_python_develop,
+ show_stdout=True,
+ extra_env=extra_env)
+
+
+ def after_install(options, home_dir):
+
+ global logger
+
+ verbosity = options.verbose - options.quiet
+ logger = Logger([(Logger.level_for_integer(2-verbosity), sys.stdout)])
+
+
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+ base_dir = os.path.dirname(home_dir)
+ src_dir = os.path.join(home_dir, 'src')
+ tmp_dir = os.path.join(home_dir, 'tmp')
+ ensure_dir(src_dir, logger)
+ ensure_dir(tmp_dir, logger)
+ system_str = platform.system()
+
+ res_source_key = options.type_install
+
+ ignore_packages = []
+
+ if options.ignore_packages :
+ ignore_packages = options.ignore_packages.split(",")
+
+ logger.indent += 2
+ try:
+ for key, method, option_str, extra_env in res_env.NORMAL_INSTALL:
+ if key not in ignore_packages:
+ if callable(method):
+ method(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop)
+ else:
+ normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess)
+
+ logger.notify("Clear source dir")
+ shutil.rmtree(src_dir)
+
+ finally:
+ logger.indent -= 2
+ script_dir = join(base_dir, bin_dir)
+ logger.notify('Run "%s Package" to install new packages that provide builds'
+ % join(script_dir, 'easy_install'))
+
+
+ return adjust_options, extend_parser, after_install
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/lib/patch.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/patch.py Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,589 @@
+""" Patch utility to apply unified diffs
+
+ Brute-force line-by-line non-recursive parsing
+
+ Copyright (c) 2008-2010 anatoly techtonik
+ Available under the terms of MIT license
+
+ Project home: http://code.google.com/p/python-patch/
+
+
+ $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
+ $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+"""
+
+__author__ = "techtonik.rainforce.org"
+__version__ = "10.04"
+
+import copy
+import logging
+import re
+# cStringIO doesn't support unicode in 2.5
+from StringIO import StringIO
+from logging import debug, info, warning
+
+from os.path import exists, isfile, abspath
+from os import unlink
+
+
+#------------------------------------------------
+# Logging is controlled by "python_patch" logger
+
+debugmode = False
+
+logger = logging.getLogger("python_patch")
+loghandler = logging.StreamHandler()
+logger.addHandler(loghandler)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+#: disable library logging by default
+logger.setLevel(logging.CRITICAL)
+
+#------------------------------------------------
+
+
+def fromfile(filename):
+ """ Parse patch file and return Patch() object
+ """
+
+ info("reading patch from file %s" % filename)
+ fp = open(filename, "rb")
+ patch = Patch(fp)
+ fp.close()
+ return patch
+
+
+def fromstring(s):
+ """ Parse text string and return Patch() object
+ """
+
+ return Patch(
+ StringIO.StringIO(s)
+ )
+
+
+
+class HunkInfo(object):
+ """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+ def __init__(self):
+ self.startsrc=None #: line count starts with 1
+ self.linessrc=None
+ self.starttgt=None
+ self.linestgt=None
+ self.invalid=False
+ self.text=[]
+
+ def copy(self):
+ return copy.copy(self)
+
+# def apply(self, estream):
+# """ write hunk data into enumerable stream
+# return strings one by one until hunk is
+# over
+#
+# enumerable stream are tuples (lineno, line)
+# where lineno starts with 0
+# """
+# pass
+
+
+
+class Patch(object):
+
+ def __init__(self, stream=None):
+
+ # define Patch data members
+ # table with a row for every source file
+
+ #: list of source filenames
+ self.source=None
+ self.target=None
+ #: list of lists of hunks
+ self.hunks=None
+ #: file endings statistics for every hunk
+ self.hunkends=None
+
+ if stream:
+ self.parse(stream)
+
+ def copy(self):
+ return copy.copy(self)
+
+ def parse(self, stream):
+ """ parse unified diff """
+ self.source = []
+ self.target = []
+ self.hunks = []
+ self.hunkends = []
+
+ # define possible file regions that will direct the parser flow
+ header = False # comments before the patch body
+ filenames = False # lines starting with --- and +++
+
+ hunkhead = False # @@ -R +R @@ sequence
+ hunkbody = False #
+ hunkskip = False # skipping invalid hunk mode
+
+ header = True
+ lineends = dict(lf=0, crlf=0, cr=0)
+ nextfileno = 0
+ nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
+
+ # hunkinfo holds parsed values, hunkactual - calculated
+ hunkinfo = HunkInfo()
+ hunkactual = dict(linessrc=None, linestgt=None)
+
+ fe = enumerate(stream)
+ for lineno, line in fe:
+
+ # analyze state
+ if header and line.startswith("--- "):
+ header = False
+ # switch to filenames state
+ filenames = True
+ #: skip hunkskip and hunkbody code until you read definition of hunkhead
+ if hunkbody:
+ # process line first
+ if re.match(r"^[- \+\\]", line):
+ # gather stats about line endings
+ if line.endswith("\r\n"):
+ self.hunkends[nextfileno-1]["crlf"] += 1
+ elif line.endswith("\n"):
+ self.hunkends[nextfileno-1]["lf"] += 1
+ elif line.endswith("\r"):
+ self.hunkends[nextfileno-1]["cr"] += 1
+
+ if line.startswith("-"):
+ hunkactual["linessrc"] += 1
+ elif line.startswith("+"):
+ hunkactual["linestgt"] += 1
+ elif not line.startswith("\\"):
+ hunkactual["linessrc"] += 1
+ hunkactual["linestgt"] += 1
+ hunkinfo.text.append(line)
+ # todo: handle \ No newline cases
+ else:
+ warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+ # add hunk status node
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # check exit conditions
+ if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
+ warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+ # add hunk status node
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+ elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # detect mixed window/unix line ends
+ ends = self.hunkends[nextfileno-1]
+ if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
+ warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
+ if debugmode:
+ debuglines = dict(ends)
+ debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
+ debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+
+ if hunkskip:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if match:
+ # switch to hunkhead state
+ hunkskip = False
+ hunkhead = True
+ elif line.startswith("--- "):
+ # switch to filenames state
+ hunkskip = False
+ filenames = True
+ if debugmode and len(self.source) > 0:
+ debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+ if filenames:
+ if line.startswith("--- "):
+ if nextfileno in self.source:
+ warning("skipping invalid patch for %s" % self.source[nextfileno])
+ del self.source[nextfileno]
+ # double source filename line is encountered
+ # attempt to restart from this second line
+ re_filename = "^--- ([^\t]+)"
+ match = re.match(re_filename, line)
+ # todo: support spaces in filenames
+ if match:
+ self.source.append(match.group(1).strip())
+ else:
+ warning("skipping invalid filename at line %d" % lineno)
+ # switch back to header state
+ filenames = False
+ header = True
+ elif not line.startswith("+++ "):
+ if nextfileno in self.source:
+ warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
+ del self.source[nextfileno]
+ else:
+ # this should be unreachable
+ warning("skipping invalid target patch")
+ filenames = False
+ header = True
+ else:
+ if nextfileno in self.target:
+ warning("skipping invalid patch - double target at line %d" % lineno)
+ del self.source[nextfileno]
+ del self.target[nextfileno]
+ nextfileno -= 1
+ # double target filename line is encountered
+ # switch back to header state
+ filenames = False
+ header = True
+ else:
+ re_filename = "^\+\+\+ ([^\t]+)"
+ match = re.match(re_filename, line)
+ if not match:
+ warning("skipping invalid patch - no target filename at line %d" % lineno)
+ # switch back to header state
+ filenames = False
+ header = True
+ else:
+ self.target.append(match.group(1).strip())
+ nextfileno += 1
+ # switch to hunkhead state
+ filenames = False
+ hunkhead = True
+ nexthunkno = 0
+ self.hunks.append([])
+ self.hunkends.append(lineends.copy())
+ continue
+
+ if hunkhead:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if not match:
+ if nextfileno-1 not in self.hunks:
+ warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
+ # switch to header state
+ hunkhead = False
+ header = True
+ continue
+ else:
+ # switch to header state
+ hunkhead = False
+ header = True
+ else:
+ hunkinfo.startsrc = int(match.group(1))
+ hunkinfo.linessrc = 1
+ if match.group(3): hunkinfo.linessrc = int(match.group(3))
+ hunkinfo.starttgt = int(match.group(4))
+ hunkinfo.linestgt = 1
+ if match.group(6): hunkinfo.linestgt = int(match.group(6))
+ hunkinfo.invalid = False
+ hunkinfo.text = []
+
+ hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+ # switch to hunkbody state
+ hunkhead = False
+ hunkbody = True
+ nexthunkno += 1
+ continue
+ else:
+ if not hunkskip:
+ warning("patch file incomplete - %s" % filename)
+ # sys.exit(?)
+ else:
+ # duplicated message when an eof is reached
+ if debugmode and len(self.source) > 0:
+ debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+ info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
+
+
+ def apply(self):
+ """ apply parsed patch """
+
+ total = len(self.source)
+ for fileno, filename in enumerate(self.source):
+
+ f2patch = filename
+ if not exists(f2patch):
+ f2patch = self.target[fileno]
+ if not exists(f2patch):
+ warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
+ continue
+ if not isfile(f2patch):
+ warning("not a file - %s" % f2patch)
+ continue
+ filename = f2patch
+
+ info("processing %d/%d:\t %s" % (fileno+1, total, filename))
+
+ # validate before patching
+ f2fp = open(filename)
+ hunkno = 0
+ hunk = self.hunks[fileno][hunkno]
+ hunkfind = []
+ hunkreplace = []
+ validhunks = 0
+ canpatch = False
+ for lineno, line in enumerate(f2fp):
+ if lineno+1 < hunk.startsrc:
+ continue
+ elif lineno+1 == hunk.startsrc:
+ hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+ hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
+ #pprint(hunkreplace)
+ hunklineno = 0
+
+ # todo \ No newline at end of file
+
+ # check hunks in source file
+ if lineno+1 < hunk.startsrc+len(hunkfind)-1:
+ if line.rstrip("\r\n") == hunkfind[hunklineno]:
+ hunklineno+=1
+ else:
+ debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
+ # file may be already patched, but we will check other hunks anyway
+ hunkno += 1
+ if hunkno < len(self.hunks[fileno]):
+ hunk = self.hunks[fileno][hunkno]
+ continue
+ else:
+ break
+
+ # check if processed line is the last line
+ if lineno+1 == hunk.startsrc+len(hunkfind)-1:
+ debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
+ hunkno+=1
+ validhunks+=1
+ if hunkno < len(self.hunks[fileno]):
+ hunk = self.hunks[fileno][hunkno]
+ else:
+ if validhunks == len(self.hunks[fileno]):
+ # patch file
+ canpatch = True
+ break
+ else:
+ if hunkno < len(self.hunks[fileno]):
+ warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
+
+ f2fp.close()
+
+ if validhunks < len(self.hunks[fileno]):
+ if self._match_file_hunks(filename, self.hunks[fileno]):
+ warning("already patched %s" % filename)
+ else:
+ warning("source file is different - %s" % filename)
+ if canpatch:
+ backupname = filename+".orig"
+ if exists(backupname):
+ warning("can't backup original file to %s - aborting" % backupname)
+ else:
+ import shutil
+ shutil.move(filename, backupname)
+ if self.write_hunks(backupname, filename, self.hunks[fileno]):
+ warning("successfully patched %s" % filename)
+ unlink(backupname)
+ else:
+ warning("error patching file %s" % filename)
+ shutil.copy(filename, filename+".invalid")
+ warning("invalid version is saved to %s" % filename+".invalid")
+ # todo: proper rejects
+ shutil.move(backupname, filename)
+
+ # todo: check for premature eof
+
+
+ def can_patch(self, filename):
+ """ Check if specified filename can be patched. Returns None if file can
+ not be found among source filenames. False if patch can not be applied
+ clearly. True otherwise.
+
+ :returns: True, False or None
+ """
+ idx = self._get_file_idx(filename, source=True)
+ if idx == None:
+ return None
+ return self._match_file_hunks(filename, self.hunks[idx])
+
+
+ def _match_file_hunks(self, filepath, hunks):
+ matched = True
+ fp = open(abspath(filepath))
+
+ class NoMatch(Exception):
+ pass
+
+ lineno = 1
+ line = fp.readline()
+ hno = None
+ try:
+ for hno, h in enumerate(hunks):
+ # skip to first line of the hunk
+ while lineno < h.starttgt:
+ if not len(line): # eof
+ debug("check failed - premature eof before hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+ for hline in h.text:
+ if hline.startswith("-"):
+ continue
+ if not len(line):
+ debug("check failed - premature eof on hunk: %d" % (hno+1))
+ # todo: \ No newline at the end of file
+ raise NoMatch
+ if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+ debug("file is not patched - failed hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+
+ except NoMatch:
+ matched = False
+ # todo: display failed hunk, i.e. expected/found
+
+ fp.close()
+ return matched
+
+
+ def patch_stream(self, instream, hunks):
+ """ Generator that yields stream patched with hunks iterable
+
+ Converts lineends in hunk lines to the best suitable format
+ autodetected from input
+ """
+
+ # todo: At the moment substituted lineends may not be the same
+ # at the start and at the end of patching. Also issue a
+ # warning/throw about mixed lineends (is it really needed?)
+
+ hunks = iter(hunks)
+
+ srclineno = 1
+
+ lineends = {'\n':0, '\r\n':0, '\r':0}
+ def get_line():
+ """
+ local utility function - return line from source stream
+ collecting line end statistics on the way
+ """
+ line = instream.readline()
+ # 'U' mode works only with text files
+ if line.endswith("\r\n"):
+ lineends["\r\n"] += 1
+ elif line.endswith("\n"):
+ lineends["\n"] += 1
+ elif line.endswith("\r"):
+ lineends["\r"] += 1
+ return line
+
+ for hno, h in enumerate(hunks):
+ debug("hunk %d" % (hno+1))
+ # skip to line just before hunk starts
+ while srclineno < h.startsrc:
+ yield get_line()
+ srclineno += 1
+
+ for hline in h.text:
+ # todo: check \ No newline at the end of file
+ if hline.startswith("-") or hline.startswith("\\"):
+ get_line()
+ srclineno += 1
+ continue
+ else:
+ if not hline.startswith("+"):
+ get_line()
+ srclineno += 1
+ line2write = hline[1:]
+ # detect if line ends are consistent in source file
+ if sum([bool(lineends[x]) for x in lineends]) == 1:
+ newline = [x for x in lineends if lineends[x] != 0][0]
+ yield line2write.rstrip("\r\n")+newline
+ else: # newlines are mixed
+ yield line2write
+
+ for line in instream:
+ yield line
+
+
+ def write_hunks(self, srcname, tgtname, hunks):
+ src = open(srcname, "rb")
+ tgt = open(tgtname, "wb")
+
+ debug("processing target file %s" % tgtname)
+
+ tgt.writelines(self.patch_stream(src, hunks))
+
+ tgt.close()
+ src.close()
+ return True
+
+
+ def _get_file_idx(self, filename, source=None):
+ """ Detect index of given filename within patch.
+
+ :param filename:
+ :param source: search filename among sources (True),
+ targets (False), or both (None)
+ :returns: int or None
+ """
+ filename = abspath(filename)
+ if source == True or source == None:
+ for i,fnm in enumerate(self.source):
+ if filename == abspath(fnm):
+ return i
+ if source == False or source == None:
+ for i,fnm in enumerate(self.target):
+ if filename == abspath(fnm):
+ return i
+
+
+
+
+from optparse import OptionParser
+from os.path import exists
+import sys
+
+if __name__ == "__main__":
+ opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
+ opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+ (options, args) = opt.parse_args()
+
+ if not args:
+ opt.print_version()
+ opt.print_help()
+ sys.exit()
+ debugmode = options.debugmode
+ patchfile = args[0]
+ if not exists(patchfile) or not isfile(patchfile):
+ sys.exit("patch file does not exist - %s" % patchfile)
+
+
+ if debugmode:
+ loglevel = logging.DEBUG
+ logformat = "%(levelname)8s %(message)s"
+ else:
+ loglevel = logging.INFO
+ logformat = "%(message)s"
+ logger.setLevel(loglevel)
+ loghandler.setFormatter(logging.Formatter(logformat))
+
+
+
+ patch = fromfile(patchfile)
+ #pprint(patch)
+ patch.apply()
+
+ # todo: document and test line ends handling logic - patch.py detects proper line-endings
+ # for inserted hunks and issues a warning if patched file has incosistent line ends
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/Django-1.2.4.tar.gz
Binary file virtualenv/res/src/Django-1.2.4.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/Imaging-1.1.7.tar.gz
Binary file virtualenv/res/src/Imaging-1.1.7.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/JCC-2.6-py2.6-win32.egg
Binary file virtualenv/res/src/JCC-2.6-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/MySQL-python-1.2.3.tar.gz
Binary file virtualenv/res/src/MySQL-python-1.2.3.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/PIL-1.1.7.win32-py2.6.exe
Binary file virtualenv/res/src/PIL-1.1.7.win32-py2.6.exe has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/distribute-0.6.14.tar.gz
Binary file virtualenv/res/src/distribute-0.6.14.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/django-extensions-0.6.tar.gz
Binary file virtualenv/res/src/django-extensions-0.6.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/django-oauth-plus.tar.gz
Binary file virtualenv/res/src/django-oauth-plus.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/django-piston-0.2.2-modified.tar.gz
Binary file virtualenv/res/src/django-piston-0.2.2-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/django-registration.tar.gz
Binary file virtualenv/res/src/django-registration.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/django-tagging-0.3.1.tar.gz
Binary file virtualenv/res/src/django-tagging-0.3.1.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/facebook-python-sdk-322930c.tar.gz
Binary file virtualenv/res/src/facebook-python-sdk-322930c.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/httplib2-0.6.0.tar.gz
Binary file virtualenv/res/src/httplib2-0.6.0.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/jogging-0.2.2.tar.gz
Binary file virtualenv/res/src/jogging-0.2.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/lucene-3.0.2-py2.6-win32.egg
Binary file virtualenv/res/src/lucene-3.0.2-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/lxml-2.2.8-py2.6-win32.egg
Binary file virtualenv/res/src/lxml-2.2.8-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/lxml-2.2.8.tar.gz
Binary file virtualenv/res/src/lxml-2.2.8.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/mercurial-1.7.5.tar.gz
Binary file virtualenv/res/src/mercurial-1.7.5.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/oauth-1.0.1.tar.gz
Binary file virtualenv/res/src/oauth-1.0.1.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip
Binary file virtualenv/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/psycopg2-2.3.2.tar.gz
Binary file virtualenv/res/src/psycopg2-2.3.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/pylinkedin-0.3.tar.bz2
Binary file virtualenv/res/src/pylinkedin-0.3.tar.bz2 has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/pylucene-3.0.3-1-src.tar.gz
Binary file virtualenv/res/src/pylucene-3.0.3-1-src.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/python-oauth2-1.2.1-modified.tar.gz
Binary file virtualenv/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/python-openid-2.2.5.tar.gz
Binary file virtualenv/res/src/python-openid-2.2.5.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/setuptools_hg-0.2.tar.gz
Binary file virtualenv/res/src/setuptools_hg-0.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz
Binary file virtualenv/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/setup/create_python_env.py
--- a/virtualenv/setup/create_python_env.py Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/setup/create_python_env.py Fri Apr 08 16:33:01 2011 +0200
@@ -29,137 +29,17 @@
# - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
# - pylucene - script
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
-
-EXTRA_TEXT = "URLS = { \n"
-
-EXTRA_TEXT += " 'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'MERCURIAL' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"mercurial-1.7.5.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
- parser.add_option(
- '--index-url',
- metavar='INDEX_URL',
- dest='index_url',
- default='',
- help='base URL of Python Package Index')
- parser.add_option(
- '--type-install',
- metavar='type_install',
- dest='type_install',
- default='local',
- help='type install : local, url, setup')
- parser.add_option(
- '--ignore-packages',
- metavar='ignore_packages',
- dest='ignore_packages',
- default=None,
- help='list of comma separated keys for package to ignore')
-
-
-
-def adjust_options(options, args):
- pass
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
+patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
-def after_install(options, home_dir):
- home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
- base_dir = os.path.dirname(home_dir)
- src_dir = join(home_dir, 'src')
- tmp_dir = join(home_dir, 'tmp')
- ensure_dir(src_dir)
- ensure_dir(tmp_dir)
- system_str = platform.system()
-
- res_source_key = options.type_install
-
- ignore_packages = []
-
- if options.ignore_packages :
- ignore_packages = options.ignore_packages.split(",")
-
- logger.indent += 2
- try:
- NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
- ('DISTRIBUTE', 'pip', None, None),
- ('MERCURIAL', 'pip', None, None),
- ]
-
-
- for key, method, option_str, extra_env in NORMAL_INSTALL:
- if key not in ignore_packages:
- normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-
- logger.notify("Clear source dir")
- shutil.rmtree(src_dir)
-
- finally:
- logger.indent -= 2
- script_dir = join(base_dir, bin_dir)
- logger.notify('Run "%s Package" to install new packages that provide builds'
- % join(script_dir, 'easy_install'))
-
+EXTRA_TEXT = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
- logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
- if method == 'pip':
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- if option_str :
- args.insert(4,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
- else:
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
- if option_str :
- args.insert(1,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
-
-
-def ensure_dir(dir):
- if not os.path.exists(dir):
- logger.notify('Creating directory %s' % dir)
- os.makedirs(dir)
-
-def filter_python_develop(line):
- if not line.strip():
- return Logger.DEBUG
- for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
- 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
- 'creating ', 'Copying ']:
- if line.startswith(prefix):
- return Logger.DEBUG
- return Logger.NOTIFY
-"""
def main():
python_version = ".".join(map(str,sys.version_info[0:2]))
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/setup/res/res_create_env.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/setup/res/res_create_env.py Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,15 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+INSTALLS = [#(key,method, option_str, dict_extra_env)
+ ('DISTRIBUTE', 'pip', None, None),
+ ('MERCURIAL', 'pip', None, None),
+ ('SETUPTOOLS-HG', 'pip', None, None),
+]
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):
+ return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/setup/res/src/distribute-0.6.14.tar.gz
Binary file virtualenv/setup/res/src/distribute-0.6.14.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/setup/res/src/mercurial-1.7.5.tar.gz
Binary file virtualenv/setup/res/src/mercurial-1.7.5.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/create_python_env.py
--- a/virtualenv/web/create_python_env.py Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/web/create_python_env.py Fri Apr 08 16:33:01 2011 +0200
@@ -8,7 +8,8 @@
- distribute
- psycopg2 requires the PostgreSQL libpq libraries and the pg_config utility
-- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local
+- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local --ignore-packages=MYSQL
+- python project-boot.py --no-site-packages --clear --ignore-packages=MYSQL --type-install=local
- For Linux :
python project-boot.py --unzip-setuptools --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local
@@ -33,293 +34,23 @@
# - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
# - pylucene - script
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
-EXTRA_TEXT = "URLS = { \n"
-EXTRA_TEXT += " 'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PSYCOPG2' : { 'setup': 'psycopg2','url': 'D:/wakimd/dev/hg/platform/virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PSYCOPG2' : { 'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.3.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'MYSQL' : { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local' : '"+ os.path.abspath(os.path.join(src_base,"MySQL-python-1.2.3.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PYLUCENE' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lucene-3.0.2-py2.6-win32.egg")).replace("\\","/")+"'},\n"
- EXTRA_TEXT += " 'JCC' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"JCC-2.6-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PYLUCENE' : { 'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"pylucene-3.0.3-1-src.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local': '"+ os.path.abspath(os.path.join(src_base,"PIL-1.1.7.win32-py2.6.exe")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"Imaging-1.1.7.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'DJANGO' : { 'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local': '"+ os.path.abspath(os.path.join(src_base,"Django-1.2.4.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'JOGGING' : { 'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local': '"+ os.path.abspath(os.path.join(src_base,"jogging-0.2.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'DJANGO-EXTENSIONS' : { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-extensions-0.6.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-REGISTRATION' : { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-registration.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-TAGGING' : { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-tagging-0.3.1.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-PISTON' : { 'setup': 'django-piston', 'url':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"' },\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'LXML' : { 'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'LXML' : { 'setup': 'lxml', 'url': '"+ os.path.abspath(os.path.join(src_base,"lxml_2.2.8.tar.gz"))+"', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'SETUPTOOLS-HG' : { 'setup': 'setuptools-hg', 'url':'http://bitbucket.org/jezdez/setuptools_hg/downloads/setuptools_hg-0.2.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"setuptools_hg-0.2.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'OAUTH2' : { 'setup': 'python-oauth2', 'url':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'HTTPLIB2' : { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"httplib2-0.6.0.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-OAUTH-PLUS' : { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-oauth-plus.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-import patch
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
- parser.add_option(
- '--index-url',
- metavar='INDEX_URL',
- dest='index_url',
- default='',
- help='base URL of Python Package Index')
- parser.add_option(
- '--type-install',
- metavar='type_install',
- dest='type_install',
- default='local',
- help='type install : local, url, setup')
- parser.add_option(
- '--ignore-packages',
- metavar='ignore_packages',
- dest='ignore_packages',
- default=None,
- help='list of comma separated keys for package to ignore')
+EXTRA_TEXT = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
-def adjust_options(options, args):
- pass
-
-
-def after_install(options, home_dir):
- home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
- base_dir = os.path.dirname(home_dir)
- src_dir = join(home_dir, 'src')
- tmp_dir = join(home_dir, 'tmp')
- ensure_dir(src_dir)
- ensure_dir(tmp_dir)
- system_str = platform.system()
-
- res_source_key = options.type_install
-
- ignore_packages = []
-
- if options.ignore_packages :
- ignore_packages = options.ignore_packages.split(",")
-
- logger.indent += 2
- try:
-
- if 'PYLUCENE' not in ignore_packages and system_str != "Windows":
- #get pylucene
- logger.notify("Get Pylucene from %s " % URLS['PYLUCENE'][res_source_key])
- pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
- if res_source_key == 'local':
- shutil.copy(URLS['PYLUCENE'][res_source_key], pylucene_src)
- else:
- urllib.urlretrieve(URLS['PYLUCENE'][res_source_key], pylucene_src)
- tf = tarfile.open(pylucene_src,'r:gz')
- pylucene_base_path = os.path.join(src_dir,"pylucene")
- logger.notify("Extract Pylucene to %s " % pylucene_base_path)
- tf.extractall(pylucene_base_path)
- tf.close()
-
- pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
- jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
-
- #install jcc
-
- #patch for linux
- if system_str == 'Linux' :
- olddir = os.getcwd()
- patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
- if os.path.isfile(patch_dest_path):
- # must unzip egg
- # rename file and etract all
- shutil.move(patch_dest_path, patch_dest_path + ".zip")
- zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
- zf.extractall(patch_dest_path)
- os.remove(patch_dest_path + ".zip")
- logger.notify("Patch jcc : %s " % (patch_dest_path))
- os.chdir(patch_dest_path)
- p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
- p.apply()
- os.chdir(olddir)
-
- logger.notify("Install jcc")
- call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
- cwd=jcc_src_path,
- filter_stdout=filter_python_develop,
- show_stdout=True)
- #install pylucene
-
- logger.notify("Install pylucene")
- #modify makefile
- makefile_path = os.path.join(pylucene_src_path,"Makefile")
- logger.notify("Modify makefile %s " % makefile_path)
- shutil.move( makefile_path, makefile_path+"~" )
-
- destination= open( makefile_path, "w" )
- source= open( makefile_path+"~", "r" )
- destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\\n")
- destination.write("ANT=ant\\n")
- destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\\n")
-
- if system_str == "Darwin":
- if sys.version_info >= (2,6):
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
- else:
- destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\\n")
- destination.write("NUM_FILES=2\\n")
- elif system_str == "Windows":
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
- destination.write("NUM_FILES=2\\n")
- else:
- if sys.version_info >= (2,6) and sys.version_info < (2,7):
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\\n")
- else:
- destination.write("JCC=$(PYTHON) -m jcc --shared\\n")
- destination.write("NUM_FILES=2\\n")
- for line in source:
- destination.write( line )
- source.close()
- destination.close()
- os.remove(makefile_path+"~" )
-
- logger.notify("pylucene make")
- call_subprocess(['make'],
- cwd=os.path.abspath(pylucene_src_path),
- filter_stdout=filter_python_develop,
- show_stdout=True)
-
- logger.notify("pylucene make install")
- call_subprocess(['make', 'install'],
- cwd=os.path.abspath(pylucene_src_path),
- filter_stdout=filter_python_develop,
- show_stdout=True)
-
- if system_str == 'Linux' and 'DISTRIBUTE' not in ignore_packages:
- normal_install('DISTRIBUTE', 'pip', None, None, res_source_key, home_dir, tmp_dir)
-
- NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
- ('MYSQL', 'pip', None, None),
- ('PIL', 'easy_install', None, None),
- ('DJANGO','pip', None, None),
- ('JOGGING','pip', None, None),
- ('DJANGO-EXTENSIONS', 'pip', None, None),
- ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
- ('DJANGO-TAGGING', 'pip', None, None),
- ('DJANGO-PISTON', 'pip', None, None),
- ('SETUPTOOLS-HG', 'pip', None, None),
- ('HTTPLIB2', 'pip', None, None),
- ('OAUTH2', 'pip', None, None),
- ('DJANGO-OAUTH-PLUS', 'pip', None, None),
- ]
-
- if 'PYLUCENE' not in ignore_packages and system_str == "Windows":
- NORMAL_INSTALL.append(('JCC','easy_install',None,None))
- NORMAL_INSTALL.append(('PYLUCENE','easy_install',None,None))
-
- if system_str == "Darwin":
- NORMAL_INSTALL.append(('LXML', 'easy_install', None, {'STATIC_DEPS': 'true'}))
- else:
- NORMAL_INSTALL.append(('LXML', 'easy_install', None, None))
-
- if system_str == "Windows":
- #get psycopg2
- psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
- shutil.copy(URLS['PSYCOPG2'][res_source_key], psycopg2_src)
- #extract psycopg2
- zf = zipfile.ZipFile(psycopg2_src)
- psycopg2_base_path = os.path.join(src_dir,"psycopg2")
- zf.extractall(psycopg2_base_path)
- zf.close()
-
- psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
- shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
- shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
- else:
- NORMAL_INSTALL.append(('PSYCOPG2', 'pip', None, None))
-
-
- for key, method, option_str, extra_env in NORMAL_INSTALL:
- if key not in ignore_packages:
- normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-
- logger.notify("Clear source dir")
- shutil.rmtree(src_dir)
-
- finally:
- logger.indent -= 2
- script_dir = join(base_dir, bin_dir)
- logger.notify('Run "%s Package" to install new packages that provide builds'
- % join(script_dir, 'easy_install'))
-
-
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
- logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
- if method == 'pip':
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- if option_str :
- args.insert(4,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
- else:
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
- if option_str :
- args.insert(1,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
-
-
-def ensure_dir(dir):
- if not os.path.exists(dir):
- logger.notify('Creating directory %s' % dir)
- os.makedirs(dir)
-
-def filter_python_develop(line):
- if not line.strip():
- return Logger.DEBUG
- for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
- 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
- 'creating ', 'Copying ']:
- if line.startswith(prefix):
- return Logger.DEBUG
- return Logger.NOTIFY
-"""
+#f = open(os.path.join(os.path. os.path.join(os.path.dirname(os.path.abspath(__file__)),"res"),'res_create_env.py'), 'r')
+#EXTRA_TEXT += f.read()
+#EXTRA_TEXT += "\n"
+#EXTRA_TEXT += "RES_ENV = ResourcesEnv('%s')\n" % (src_base)
def main():
python_version = ".".join(map(str,sys.version_info[0:2]))
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/lib/patch.py
--- a/virtualenv/web/res/lib/patch.py Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,589 +0,0 @@
-""" Patch utility to apply unified diffs
-
- Brute-force line-by-line non-recursive parsing
-
- Copyright (c) 2008-2010 anatoly techtonik
- Available under the terms of MIT license
-
- Project home: http://code.google.com/p/python-patch/
-
-
- $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
- $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
-"""
-
-__author__ = "techtonik.rainforce.org"
-__version__ = "10.04"
-
-import copy
-import logging
-import re
-# cStringIO doesn't support unicode in 2.5
-from StringIO import StringIO
-from logging import debug, info, warning
-
-from os.path import exists, isfile, abspath
-from os import unlink
-
-
-#------------------------------------------------
-# Logging is controlled by "python_patch" logger
-
-debugmode = False
-
-logger = logging.getLogger("python_patch")
-loghandler = logging.StreamHandler()
-logger.addHandler(loghandler)
-
-debug = logger.debug
-info = logger.info
-warning = logger.warning
-
-#: disable library logging by default
-logger.setLevel(logging.CRITICAL)
-
-#------------------------------------------------
-
-
-def fromfile(filename):
- """ Parse patch file and return Patch() object
- """
-
- info("reading patch from file %s" % filename)
- fp = open(filename, "rb")
- patch = Patch(fp)
- fp.close()
- return patch
-
-
-def fromstring(s):
- """ Parse text string and return Patch() object
- """
-
- return Patch(
- StringIO.StringIO(s)
- )
-
-
-
-class HunkInfo(object):
- """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
-
- def __init__(self):
- self.startsrc=None #: line count starts with 1
- self.linessrc=None
- self.starttgt=None
- self.linestgt=None
- self.invalid=False
- self.text=[]
-
- def copy(self):
- return copy.copy(self)
-
-# def apply(self, estream):
-# """ write hunk data into enumerable stream
-# return strings one by one until hunk is
-# over
-#
-# enumerable stream are tuples (lineno, line)
-# where lineno starts with 0
-# """
-# pass
-
-
-
-class Patch(object):
-
- def __init__(self, stream=None):
-
- # define Patch data members
- # table with a row for every source file
-
- #: list of source filenames
- self.source=None
- self.target=None
- #: list of lists of hunks
- self.hunks=None
- #: file endings statistics for every hunk
- self.hunkends=None
-
- if stream:
- self.parse(stream)
-
- def copy(self):
- return copy.copy(self)
-
- def parse(self, stream):
- """ parse unified diff """
- self.source = []
- self.target = []
- self.hunks = []
- self.hunkends = []
-
- # define possible file regions that will direct the parser flow
- header = False # comments before the patch body
- filenames = False # lines starting with --- and +++
-
- hunkhead = False # @@ -R +R @@ sequence
- hunkbody = False #
- hunkskip = False # skipping invalid hunk mode
-
- header = True
- lineends = dict(lf=0, crlf=0, cr=0)
- nextfileno = 0
- nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
-
- # hunkinfo holds parsed values, hunkactual - calculated
- hunkinfo = HunkInfo()
- hunkactual = dict(linessrc=None, linestgt=None)
-
- fe = enumerate(stream)
- for lineno, line in fe:
-
- # analyze state
- if header and line.startswith("--- "):
- header = False
- # switch to filenames state
- filenames = True
- #: skip hunkskip and hunkbody code until you read definition of hunkhead
- if hunkbody:
- # process line first
- if re.match(r"^[- \+\\]", line):
- # gather stats about line endings
- if line.endswith("\r\n"):
- self.hunkends[nextfileno-1]["crlf"] += 1
- elif line.endswith("\n"):
- self.hunkends[nextfileno-1]["lf"] += 1
- elif line.endswith("\r"):
- self.hunkends[nextfileno-1]["cr"] += 1
-
- if line.startswith("-"):
- hunkactual["linessrc"] += 1
- elif line.startswith("+"):
- hunkactual["linestgt"] += 1
- elif not line.startswith("\\"):
- hunkactual["linessrc"] += 1
- hunkactual["linestgt"] += 1
- hunkinfo.text.append(line)
- # todo: handle \ No newline cases
- else:
- warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
- # add hunk status node
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
-
- # check exit conditions
- if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
- warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
- # add hunk status node
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
- elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
-
- # detect mixed window/unix line ends
- ends = self.hunkends[nextfileno-1]
- if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
- warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
- if debugmode:
- debuglines = dict(ends)
- debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
- debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
-
- if hunkskip:
- match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
- if match:
- # switch to hunkhead state
- hunkskip = False
- hunkhead = True
- elif line.startswith("--- "):
- # switch to filenames state
- hunkskip = False
- filenames = True
- if debugmode and len(self.source) > 0:
- debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
- if filenames:
- if line.startswith("--- "):
- if nextfileno in self.source:
- warning("skipping invalid patch for %s" % self.source[nextfileno])
- del self.source[nextfileno]
- # double source filename line is encountered
- # attempt to restart from this second line
- re_filename = "^--- ([^\t]+)"
- match = re.match(re_filename, line)
- # todo: support spaces in filenames
- if match:
- self.source.append(match.group(1).strip())
- else:
- warning("skipping invalid filename at line %d" % lineno)
- # switch back to header state
- filenames = False
- header = True
- elif not line.startswith("+++ "):
- if nextfileno in self.source:
- warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
- del self.source[nextfileno]
- else:
- # this should be unreachable
- warning("skipping invalid target patch")
- filenames = False
- header = True
- else:
- if nextfileno in self.target:
- warning("skipping invalid patch - double target at line %d" % lineno)
- del self.source[nextfileno]
- del self.target[nextfileno]
- nextfileno -= 1
- # double target filename line is encountered
- # switch back to header state
- filenames = False
- header = True
- else:
- re_filename = "^\+\+\+ ([^\t]+)"
- match = re.match(re_filename, line)
- if not match:
- warning("skipping invalid patch - no target filename at line %d" % lineno)
- # switch back to header state
- filenames = False
- header = True
- else:
- self.target.append(match.group(1).strip())
- nextfileno += 1
- # switch to hunkhead state
- filenames = False
- hunkhead = True
- nexthunkno = 0
- self.hunks.append([])
- self.hunkends.append(lineends.copy())
- continue
-
- if hunkhead:
- match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
- if not match:
- if nextfileno-1 not in self.hunks:
- warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
- # switch to header state
- hunkhead = False
- header = True
- continue
- else:
- # switch to header state
- hunkhead = False
- header = True
- else:
- hunkinfo.startsrc = int(match.group(1))
- hunkinfo.linessrc = 1
- if match.group(3): hunkinfo.linessrc = int(match.group(3))
- hunkinfo.starttgt = int(match.group(4))
- hunkinfo.linestgt = 1
- if match.group(6): hunkinfo.linestgt = int(match.group(6))
- hunkinfo.invalid = False
- hunkinfo.text = []
-
- hunkactual["linessrc"] = hunkactual["linestgt"] = 0
-
- # switch to hunkbody state
- hunkhead = False
- hunkbody = True
- nexthunkno += 1
- continue
- else:
- if not hunkskip:
- warning("patch file incomplete - %s" % filename)
- # sys.exit(?)
- else:
- # duplicated message when an eof is reached
- if debugmode and len(self.source) > 0:
- debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
- info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
-
-
- def apply(self):
- """ apply parsed patch """
-
- total = len(self.source)
- for fileno, filename in enumerate(self.source):
-
- f2patch = filename
- if not exists(f2patch):
- f2patch = self.target[fileno]
- if not exists(f2patch):
- warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
- continue
- if not isfile(f2patch):
- warning("not a file - %s" % f2patch)
- continue
- filename = f2patch
-
- info("processing %d/%d:\t %s" % (fileno+1, total, filename))
-
- # validate before patching
- f2fp = open(filename)
- hunkno = 0
- hunk = self.hunks[fileno][hunkno]
- hunkfind = []
- hunkreplace = []
- validhunks = 0
- canpatch = False
- for lineno, line in enumerate(f2fp):
- if lineno+1 < hunk.startsrc:
- continue
- elif lineno+1 == hunk.startsrc:
- hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
- hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
- #pprint(hunkreplace)
- hunklineno = 0
-
- # todo \ No newline at end of file
-
- # check hunks in source file
- if lineno+1 < hunk.startsrc+len(hunkfind)-1:
- if line.rstrip("\r\n") == hunkfind[hunklineno]:
- hunklineno+=1
- else:
- debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
- # file may be already patched, but we will check other hunks anyway
- hunkno += 1
- if hunkno < len(self.hunks[fileno]):
- hunk = self.hunks[fileno][hunkno]
- continue
- else:
- break
-
- # check if processed line is the last line
- if lineno+1 == hunk.startsrc+len(hunkfind)-1:
- debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
- hunkno+=1
- validhunks+=1
- if hunkno < len(self.hunks[fileno]):
- hunk = self.hunks[fileno][hunkno]
- else:
- if validhunks == len(self.hunks[fileno]):
- # patch file
- canpatch = True
- break
- else:
- if hunkno < len(self.hunks[fileno]):
- warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
-
- f2fp.close()
-
- if validhunks < len(self.hunks[fileno]):
- if self._match_file_hunks(filename, self.hunks[fileno]):
- warning("already patched %s" % filename)
- else:
- warning("source file is different - %s" % filename)
- if canpatch:
- backupname = filename+".orig"
- if exists(backupname):
- warning("can't backup original file to %s - aborting" % backupname)
- else:
- import shutil
- shutil.move(filename, backupname)
- if self.write_hunks(backupname, filename, self.hunks[fileno]):
- warning("successfully patched %s" % filename)
- unlink(backupname)
- else:
- warning("error patching file %s" % filename)
- shutil.copy(filename, filename+".invalid")
- warning("invalid version is saved to %s" % filename+".invalid")
- # todo: proper rejects
- shutil.move(backupname, filename)
-
- # todo: check for premature eof
-
-
- def can_patch(self, filename):
- """ Check if specified filename can be patched. Returns None if file can
- not be found among source filenames. False if patch can not be applied
- clearly. True otherwise.
-
- :returns: True, False or None
- """
- idx = self._get_file_idx(filename, source=True)
- if idx == None:
- return None
- return self._match_file_hunks(filename, self.hunks[idx])
-
-
- def _match_file_hunks(self, filepath, hunks):
- matched = True
- fp = open(abspath(filepath))
-
- class NoMatch(Exception):
- pass
-
- lineno = 1
- line = fp.readline()
- hno = None
- try:
- for hno, h in enumerate(hunks):
- # skip to first line of the hunk
- while lineno < h.starttgt:
- if not len(line): # eof
- debug("check failed - premature eof before hunk: %d" % (hno+1))
- raise NoMatch
- line = fp.readline()
- lineno += 1
- for hline in h.text:
- if hline.startswith("-"):
- continue
- if not len(line):
- debug("check failed - premature eof on hunk: %d" % (hno+1))
- # todo: \ No newline at the end of file
- raise NoMatch
- if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
- debug("file is not patched - failed hunk: %d" % (hno+1))
- raise NoMatch
- line = fp.readline()
- lineno += 1
-
- except NoMatch:
- matched = False
- # todo: display failed hunk, i.e. expected/found
-
- fp.close()
- return matched
-
-
- def patch_stream(self, instream, hunks):
- """ Generator that yields stream patched with hunks iterable
-
- Converts lineends in hunk lines to the best suitable format
- autodetected from input
- """
-
- # todo: At the moment substituted lineends may not be the same
- # at the start and at the end of patching. Also issue a
- # warning/throw about mixed lineends (is it really needed?)
-
- hunks = iter(hunks)
-
- srclineno = 1
-
- lineends = {'\n':0, '\r\n':0, '\r':0}
- def get_line():
- """
- local utility function - return line from source stream
- collecting line end statistics on the way
- """
- line = instream.readline()
- # 'U' mode works only with text files
- if line.endswith("\r\n"):
- lineends["\r\n"] += 1
- elif line.endswith("\n"):
- lineends["\n"] += 1
- elif line.endswith("\r"):
- lineends["\r"] += 1
- return line
-
- for hno, h in enumerate(hunks):
- debug("hunk %d" % (hno+1))
- # skip to line just before hunk starts
- while srclineno < h.startsrc:
- yield get_line()
- srclineno += 1
-
- for hline in h.text:
- # todo: check \ No newline at the end of file
- if hline.startswith("-") or hline.startswith("\\"):
- get_line()
- srclineno += 1
- continue
- else:
- if not hline.startswith("+"):
- get_line()
- srclineno += 1
- line2write = hline[1:]
- # detect if line ends are consistent in source file
- if sum([bool(lineends[x]) for x in lineends]) == 1:
- newline = [x for x in lineends if lineends[x] != 0][0]
- yield line2write.rstrip("\r\n")+newline
- else: # newlines are mixed
- yield line2write
-
- for line in instream:
- yield line
-
-
- def write_hunks(self, srcname, tgtname, hunks):
- src = open(srcname, "rb")
- tgt = open(tgtname, "wb")
-
- debug("processing target file %s" % tgtname)
-
- tgt.writelines(self.patch_stream(src, hunks))
-
- tgt.close()
- src.close()
- return True
-
-
- def _get_file_idx(self, filename, source=None):
- """ Detect index of given filename within patch.
-
- :param filename:
- :param source: search filename among sources (True),
- targets (False), or both (None)
- :returns: int or None
- """
- filename = abspath(filename)
- if source == True or source == None:
- for i,fnm in enumerate(self.source):
- if filename == abspath(fnm):
- return i
- if source == False or source == None:
- for i,fnm in enumerate(self.target):
- if filename == abspath(fnm):
- return i
-
-
-
-
-from optparse import OptionParser
-from os.path import exists
-import sys
-
-if __name__ == "__main__":
- opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
- opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
- (options, args) = opt.parse_args()
-
- if not args:
- opt.print_version()
- opt.print_help()
- sys.exit()
- debugmode = options.debugmode
- patchfile = args[0]
- if not exists(patchfile) or not isfile(patchfile):
- sys.exit("patch file does not exist - %s" % patchfile)
-
-
- if debugmode:
- loglevel = logging.DEBUG
- logformat = "%(levelname)8s %(message)s"
- else:
- loglevel = logging.INFO
- logformat = "%(message)s"
- logger.setLevel(loglevel)
- loghandler.setFormatter(logging.Formatter(logformat))
-
-
-
- patch = fromfile(patchfile)
- #pprint(patch)
- patch.apply()
-
- # todo: document and test line ends handling logic - patch.py detects proper line-endings
- # for inserted hunks and issues a warning if patched file has incosistent line ends
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/patch/piston.diff
--- a/virtualenv/web/res/patch/piston.diff Wed Mar 23 17:34:36 2011 +0100
+++ b/virtualenv/web/res/patch/piston.diff Fri Apr 08 16:33:01 2011 +0200
@@ -3,6 +3,6 @@
timestamp = models.IntegerField()
is_approved = models.BooleanField(default=False)
-- user = models.ForeignKey(User, null=True, blank=True, related_name='piston_tokens')
+- user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
+ user = models.ForeignKey(User, null=True, blank=True, related_name='piston_tokens')
consumer = models.ForeignKey(Consumer)
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/res_create_env.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/web/res/res_create_env.py Fri Apr 08 16:33:01 2011 +0200
@@ -0,0 +1,54 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+if system_str == 'Linux':
+ INSTALLS = [
+ ('DISTRIBUTE', 'pip', None, None),
+ ]
+else:
+ INSTALLS = []
+
+
+INSTALLS.extend([ #(key,method, option_str, dict_extra_env)
+ ('SETUPTOOLS-HG', 'pip', None, None),
+ ('MYSQL', 'pip', None, None),
+ ('PIL', 'easy_install', None, None),
+ ('DJANGO','pip', None, None),
+ ('JOGGING','pip', None, None),
+ ('DJANGO-EXTENSIONS', 'pip', None, None),
+ ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
+ ('DJANGO-TAGGING', 'pip', None, None),
+ ('DJANGO-PISTON', 'pip', None, None),
+ ('HTTPLIB2', 'pip', None, None),
+ ('OAUTH2', 'easy_install', None, None),
+ ('DJANGO-OAUTH-PLUS', 'pip', None, None),
+])
+
+if system_str == 'Windows':
+ INSTALLS.extend([
+ ('JCC','easy_install',None,None),
+ ('PYLUCENE','easy_install',None,None),
+ ('PSYCOPG2',install_psycopg2,None,None),
+ ])
+else:
+ INSTALLS.extend([
+ ('PYLUCENE',install_pylucene,None,None),
+ ('PSYCOPG2', 'pip', None, None),
+ ])
+
+if system_str == "Darwin":
+ INSTALLS.extend([
+ ('LXML', 'pip', None, {'STATIC_DEPS': 'true', 'LIBXML2_VERSION': '2.7.8', 'LIBXSLT_VERSION': '1.1.26'}),
+ ])
+else:
+ INSTALLS.extend([
+ ('LXML', 'pip', None, None),
+ ])
+
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):
+ return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/Django-1.2.4.tar.gz
Binary file virtualenv/web/res/src/Django-1.2.4.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/Imaging-1.1.7.tar.gz
Binary file virtualenv/web/res/src/Imaging-1.1.7.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/JCC-2.6-py2.6-win32.egg
Binary file virtualenv/web/res/src/JCC-2.6-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/MySQL-python-1.2.3.tar.gz
Binary file virtualenv/web/res/src/MySQL-python-1.2.3.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/PIL-1.1.7.win32-py2.6.exe
Binary file virtualenv/web/res/src/PIL-1.1.7.win32-py2.6.exe has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/distribute-0.6.14.tar.gz
Binary file virtualenv/web/res/src/distribute-0.6.14.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/django-extensions-0.6.tar.gz
Binary file virtualenv/web/res/src/django-extensions-0.6.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/django-oauth-plus.tar.gz
Binary file virtualenv/web/res/src/django-oauth-plus.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/django-piston-0.2.2-modified.tar.gz
Binary file virtualenv/web/res/src/django-piston-0.2.2-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/django-registration.tar.gz
Binary file virtualenv/web/res/src/django-registration.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/django-tagging-0.3.1.tar.gz
Binary file virtualenv/web/res/src/django-tagging-0.3.1.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/facebook-python-sdk-322930c.tar.gz
Binary file virtualenv/web/res/src/facebook-python-sdk-322930c.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/httplib2-0.6.0.tar.gz
Binary file virtualenv/web/res/src/httplib2-0.6.0.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/jogging-0.2.2.tar.gz
Binary file virtualenv/web/res/src/jogging-0.2.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/lucene-3.0.2-py2.6-win32.egg
Binary file virtualenv/web/res/src/lucene-3.0.2-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/lxml-2.2.8-py2.6-win32.egg
Binary file virtualenv/web/res/src/lxml-2.2.8-py2.6-win32.egg has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/lxml-2.2.8.tar.gz
Binary file virtualenv/web/res/src/lxml-2.2.8.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/oauth-1.0.1.tar.gz
Binary file virtualenv/web/res/src/oauth-1.0.1.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip
Binary file virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/psycopg2-2.3.2.tar.gz
Binary file virtualenv/web/res/src/psycopg2-2.3.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/pylinkedin-0.3.tar.bz2
Binary file virtualenv/web/res/src/pylinkedin-0.3.tar.bz2 has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/pylucene-3.0.3-1-src.tar.gz
Binary file virtualenv/web/res/src/pylucene-3.0.3-1-src.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/python-oauth2-1.2.1-modified.tar.gz
Binary file virtualenv/web/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/python-openid-2.2.5.tar.gz
Binary file virtualenv/web/res/src/python-openid-2.2.5.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/setuptools_hg-0.2.tar.gz
Binary file virtualenv/web/res/src/setuptools_hg-0.2.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d virtualenv/web/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz
Binary file virtualenv/web/res/src/uswaretech-Django-Socialauth-1e22872-modified.tar.gz has changed
diff -r 2e2989c3072c -r 125fc6df230d web/ldtplatform/settings.py
--- a/web/ldtplatform/settings.py Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/settings.py Fri Apr 08 16:33:01 2011 +0200
@@ -1,5 +1,6 @@
#@PydevCodeAnalysisIgnore
import os.path
+from social_auth_settings import *
# Django settings for project.
DEBUG = True
@@ -89,13 +90,15 @@
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
- "ldt.utils.context_processors.ldt",
- "ldt.utils.context_processors.base",
- "ldt.utils.context_processors.web",
- "ldtplatform.utils.context_processors.version",
+# "ldt.utils.context_processors.ldt",
+# "ldt.utils.context_processors.base",
+# "ldt.utils.context_processors.web",
+ #"ldtplatform.utils.context_processors.version",
+ "ldt.utils.context_processors.ldtcontext",
)
+
ROOT_URLCONF = 'ldtplatform.urls'
TEMPLATE_DIRS = (
@@ -130,7 +133,8 @@
'oauth_provider',
'openid_consumer',
'piston',
- 'socialauth',
+ #'socialauth',
+ 'social_auth',
)
DECOUPAGE_BLACKLIST = (
@@ -141,6 +145,23 @@
"__MACOSX",
)
+AUTHENTICATION_BACKENDS = (
+ 'social_auth.backends.twitter.TwitterBackend',
+ 'social_auth.backends.facebook.FacebookBackend',
+# 'social_auth.backends.google.GoogleOAuthBackend',
+# 'social_auth.backends.google.GoogleOAuth2Backend',
+ 'social_auth.backends.google.GoogleBackend',
+ 'social_auth.backends.yahoo.YahooBackend',
+# 'social_auth.backends.contrib.linkedin.LinkedinBackend',
+# 'social_auth.backends.contrib.LiveJournalBackend',
+# 'social_auth.backends.contrib.orkut.OrkutBackend',
+ 'social_auth.backends.OpenIDBackend',
+ 'django.contrib.auth.backends.ModelBackend',
+)
+SOCIAL_AUTH_IMPORT_BACKENDS = (
+ 'myproy.social_auth_extra_services',
+)
+
ACCOUNT_ACTIVATION_DAYS = 7
LDT_MAX_SEARCH_NUMBER = 50
@@ -152,17 +173,25 @@
OAUTH_PROVIDER_CONSUMER_KEY_SIZE = 256
OAUTH_AUTHORIZE_VIEW = 'oauth_provider.views.fake_authorize_view'
OAUTH_CALLBACK_VIEW = 'oauth_provider.views.fake_callback_view'
-TEST_WEBSERVER_ADDRPORT = "127.0.0.1:8000"
+TEST_WEBSERVER_ADDRPORT = "127.0.0.1:8888"
from config import *
-from socialauthsettings import *
+#from socialauthsettings import *
LOGIN_URL = BASE_URL + 'accounts/login/'
-LOGOUT_URL = BASE_URL + 'accounts/logout/'
+LOGOUT_URL = BASE_URL + 'accounts/disconnect/'
#LOGIN_REDIRECT_URL = BASE_URL + 'ldtplatform'
LOGIN_REDIRECT_URL = BASE_URL + 'ldt/'
LOGOUT_REDIRECT_URL = BASE_URL + 'accounts/login'
-PROFILE_REDIRECT_URL = BASE_URL + 'accounts/create/profile'
+PROFILE_REDIRECT_URL = BASE_URL + 'auth_accounts/create/profile'
+
+LOGIN_ERROR_URL = BASE_URL + 'accounts/login'
+
+FACEBOOK_APP_ID = '163134140411313'
+FACEBOOK_API_SECRET = 'f25e0754a44f0d90d3f4d9ea961ff012'
+
+SOCIAL_AUTH_COMPLETE_URL_NAME = 'complete'
+SOCIAL_AUTH_ASSOCIATE_URL_NAME = 'associate_complete'
GLOBAL_LOG_LEVEL = LOG_LEVEL
GLOBAL_LOG_HANDLERS = [{'handler':logging.FileHandler(LOG_FILE), 'format':"%(asctime)s - %(levelname)s : %(message)s"}]
diff -r 2e2989c3072c -r 125fc6df230d web/ldtplatform/socialauthsettings.py
--- a/web/ldtplatform/socialauthsettings.py Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,102 +0,0 @@
-OPENID_REDIRECT_NEXT = '/accounts/openid/done/'
-
-OPENID_SREG = {"required": "nickname, email, fullname",
- "optional":"postcode, country",
- "policy_url": ""}
-
-#example should be something more like the real thing, i think
-OPENID_AX = [{"type_uri": "http://axschema.org/contact/email",
- "count": 1,
- "required": True,
- "alias": "email"},
- {"type_uri": "http://axschema.org/schema/fullname",
- "count":1 ,
- "required": False,
- "alias": "fname"}]
-
-OPENID_AX_PROVIDER_MAP = {'Google': {'email': 'http://axschema.org/contact/email',
- 'firstname': 'http://axschema.org/namePerson/first',
- 'lastname': 'http://axschema.org/namePerson/last'},
- 'Default': {'email': 'http://axschema.org/contact/email',
- 'fullname': 'http://axschema.org/namePerson',
- 'nickname': 'http://axschema.org/namePerson/friendly'}
- }
-
-TWITTER_CONSUMER_KEY = 'AeTl6TefgICQCXwDf4gOA'
-TWITTER_CONSUMER_SECRET = 'v7XqozHzQuzjyU9RkpJXrXZdTYpCMCpzZCjli62dOA'
-
-FACEBOOK_APP_ID = '168524599848954'
-FACEBOOK_API_KEY = '5fbfc9bc5171449685e54a6fd33038a4'
-FACEBOOK_SECRET_KEY = 'ffd9012b6d974180f2578c09bcb38d9f'
-
-LINKEDIN_CONSUMER_KEY = ''
-LINKEDIN_CONSUMER_SECRET = ''
-
-## if any of this information is desired for your app
-FACEBOOK_EXTENDED_PERMISSIONS = (
- #'publish_stream',
- #'create_event',
- #'rsvp_event',
- #'sms',
- #'offline_access',
- #'email',
- #'read_stream',
- #'user_about_me',
- #'user_activites',
- #'user_birthday',
- #'user_education_history',
- #'user_events',
- #'user_groups',
- #'user_hometown',
- #'user_interests',
- #'user_likes',
- #'user_location',
- #'user_notes',
- #'user_online_presence',
- #'user_photo_video_tags',
- #'user_photos',
- #'user_relationships',
- #'user_religion_politics',
- #'user_status',
- #'user_videos',
- #'user_website',
- #'user_work_history',
- #'read_friendlists',
- #'read_requests',
- #'friend_about_me',
- #'friend_activites',
- #'friend_birthday',
- #'friend_education_history',
- #'friend_events',
- #'friend_groups',
- #'friend_hometown',
- #'friend_interests',
- #'friend_likes',
- #'friend_location',
- #'friend_notes',
- #'friend_online_presence',
- #'friend_photo_video_tags',
- #'friend_photos',
- #'friend_relationships',
- #'friend_religion_politics',
- #'friend_status',
- #'friend_videos',
- #'friend_website',
- #'friend_work_history',
-)
-
-
-AUTHENTICATION_BACKENDS = (
- 'django.contrib.auth.backends.ModelBackend',
- 'socialauth.auth_backends.OpenIdBackend',
- 'socialauth.auth_backends.TwitterBackend',
- 'socialauth.auth_backends.FacebookBackend',
- 'socialauth.auth_backends.LinkedInBackend',
-)
-
-TEMPLATE_CONTEXT_PROCESSORS = (
- "socialauth.context_processors.facebook_api_key",
- 'django.core.context_processors.media',
- "django.contrib.auth.context_processors.auth",
- "django.core.context_processors.request",
-)
diff -r 2e2989c3072c -r 125fc6df230d web/ldtplatform/templates/registration/login.html
--- a/web/ldtplatform/templates/registration/login.html Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/templates/registration/login.html Fri Apr 08 16:33:01 2011 +0200
@@ -32,6 +32,8 @@
+ {% trans "Or login with your external account" %}
+ {{social_list|safe}}
diff -r 2e2989c3072c -r 125fc6df230d web/ldtplatform/urls.py
--- a/web/ldtplatform/urls.py Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/urls.py Fri Apr 08 16:33:01 2011 +0200
@@ -1,7 +1,8 @@
-from django.conf.urls.defaults import patterns, include, handler500, handler404
+from django.conf.urls.defaults import patterns, include, handler500, handler404, url
from django.contrib import admin
from ldt.text import VERSION_STR
from django.conf import settings
+from django.contrib.auth import views as auth_views
# Uncomment the next two lines to enable the admin:
@@ -25,9 +26,13 @@
(r'^auth_accounts/', include('registration.backends.simple.urls')),
- (r'^accounts/', include('socialauth.urls')),
+ #(r'^accounts/', include('socialauth.urls')),
+ (r'^accounts/', include('social_auth.urls')),
+ url(r'^accounts/login/$',auth_views.login,{'template_name': 'registration/login.html'},name='auth_login'),
(r'^oauth/', include('oauth_provider.urls')),
- (r'^$', 'socialauth.views.signin_complete'),
+
+ #(r'^$', 'socialauth.views.signin_complete'),
+ #(r'^$', 'social_auth.views.complete'),
(r'^/?$', 'django.views.generic.simple.redirect_to', {'url': 'ldt'}),
#(r'^static/(?P .*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
diff -r 2e2989c3072c -r 125fc6df230d web/ldtplatform/utils/context_processors.py
--- a/web/ldtplatform/utils/context_processors.py Wed Mar 23 17:34:36 2011 +0100
+++ b/web/ldtplatform/utils/context_processors.py Fri Apr 08 16:33:01 2011 +0200
@@ -2,11 +2,11 @@
import ldtplatform #@UnresolvedImport
import ldt
-def version(request):
- return {'VERSION': "platform: %s - web: %s" % (ldt.get_version(),ldtplatform.get_version()) }
+#def version(request):
+# return {'VERSION': "platform: %s - web: %s" % (ldt.get_version(),ldtplatform.get_version()) }
-def base(request):
- return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL }
+#def base(request):
+# return {'BASE_URL': settings.BASE_URL, 'MEDIA_URL': settings.MEDIA_URL, 'TC2': 'TC2' }
-def web(request):
- return {'WEB_URL': settings.WEB_URL }
+#def web(request):
+# return {'WEB_URL': settings.WEB_URL }
diff -r 2e2989c3072c -r 125fc6df230d web/static/ldt/swf/ldt/LignesDeTempsFlex.swf
Binary file web/static/ldt/swf/ldt/LignesDeTempsFlex.swf has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/css/openid.css
--- a/web/static/socialauth/css/openid.css Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,44 +0,0 @@
-#openid_form {
- width: 400px;
-}
-#openid_form legend {
- font-weight: bold;
-}
-#openid_choice {
- display: none;
-}
-#openid_input_area {
- clear: both;
- padding: 10px;
-}
-#openid_btns, #openid_btns br {
- clear: both;
-}
-#openid_highlight {
- padding: 3px;
- background-color: #FFFCC9;
- float: left;
-}
-.openid_large_btn {
- width: 100px;
- height: 60px;
- border: 1px solid #DDD;
- margin: 3px;
- float: left;
-}
-.openid_small_btn {
- width: 24px;
- height: 24px;
- border: 1px solid #DDD;
- margin: 3px;
- float: left;
-}
-a.openid_large_btn:focus {
- outline: none;
-}
-a.openid_large_btn:focus{
--moz-outline-style: none;
-}
-.openid_selected {
- border: 4px solid #DDD;
-}
\ No newline at end of file
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/css/socialauth.css
--- a/web/static/socialauth/css/socialauth.css Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,73 +0,0 @@
-body {
- margin-left: auto;
- margin-right: auto;
- width: 950px;
-}
-#openid_choice{
- margin-top:10px;
- display: none;
-}
-#openid_input_area{
- padding-top:10px;
- clear: both;
-}
-#openid_username{
- margin-right:5px;
-}
-#openid_btns, #openid_btns br{
- clear: both;
-}
-#openid_btns #facebook{
- height: 60px;
- width: 110px;
- float: left;
- margin-top: 3px;
- display: table-cell;
- text-align: center;
- vertical-align: middle;
- border: 1px solid #ccc;
-}
-#openid_highlight{
- padding: 3px;
- background-color: #FFFCC9;
- float: left;
-}
-.openid_large_btn{
- width: 100px;
- height: 60px;
- border: 1px solid #DDD;
- margin: 3px;
- float: left;
-}
-.openid_small_btn{
- width: 24px;
- height: 24px;
- border: 1px solid #DDD;
- margin: 3px;
- float: left;
-}
-a.openid_large_btn:focus{
- outline: none;
-}
-a.openid_large_btn:focus{
- -moz-outline-style: none;
-}
-.openid_selected{
- border: 4px solid #DDD;
-}
-.linkedin{
- background: #FFF url(../images/linkedin.jpg) no-repeat center center;
-}
-.yahoo{
- background: #FFF url(../images/yahoo.gif) no-repeat center center;
-}
-.google{
- background: #FFF url(../images/google.png) no-repeat center center;
-}
-.openid{
- background: #FFF url(../images/openid.png) no-repeat center center;
-}
-.twitter{
- background: #FFF url(../images/twitter.png) no-repeat center center;
-}
-
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/Thumbs.db
Binary file web/static/socialauth/images/Thumbs.db has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/aol.gif
Binary file web/static/socialauth/images/aol.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/blogger.ico
Binary file web/static/socialauth/images/blogger.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/claimid.ico
Binary file web/static/socialauth/images/claimid.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/facebook.gif
Binary file web/static/socialauth/images/facebook.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/flickr.ico
Binary file web/static/socialauth/images/flickr.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/google.gif
Binary file web/static/socialauth/images/google.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/linkedin.jpg
Binary file web/static/socialauth/images/linkedin.jpg has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/livejournal.ico
Binary file web/static/socialauth/images/livejournal.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/myopenid.ico
Binary file web/static/socialauth/images/myopenid.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/openid-inputicon.gif
Binary file web/static/socialauth/images/openid-inputicon.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/openid.gif
Binary file web/static/socialauth/images/openid.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/technorati.ico
Binary file web/static/socialauth/images/technorati.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/twitter.png
Binary file web/static/socialauth/images/twitter.png has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/verisign.ico
Binary file web/static/socialauth/images/verisign.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/vidoop.ico
Binary file web/static/socialauth/images/vidoop.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/wordpress.ico
Binary file web/static/socialauth/images/wordpress.ico has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/images/yahoo.gif
Binary file web/static/socialauth/images/yahoo.gif has changed
diff -r 2e2989c3072c -r 125fc6df230d web/static/socialauth/js/jquery-1.2.6.min.js
--- a/web/static/socialauth/js/jquery-1.2.6.min.js Wed Mar 23 17:34:36 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,32 +0,0 @@
-/*
- * jQuery 1.2.6 - New Wave Javascript
- *
- * Copyright (c) 2008 John Resig (jquery.com)
- * Dual licensed under the MIT (MIT-LICENSE.txt)
- * and GPL (GPL-LICENSE.txt) licenses.
- *
- * $Date: 2008-05-24 14:22:17 -0400 (Sat, 24 May 2008) $
- * $Rev: 5685 $
- */
-(function(){var _jQuery=window.jQuery,_$=window.$;var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);};var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;if(selector.nodeType){this[0]=selector;this.length=1;return this;}if(typeof selector=="string"){var match=quickExpr.exec(selector);if(match&&(match[1]||!context)){if(match[1])selector=jQuery.clean([match[1]],context);else{var elem=document.getElementById(match[3]);if(elem){if(elem.id!=match[3])return jQuery().find(selector);return jQuery(elem);}selector=[];}}else
-return jQuery(context).find(selector);}else if(jQuery.isFunction(selector))return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);return this.setArray(jQuery.makeArray(selector));},jquery:"1.2.6",size:function(){return this.length;},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];},pushStack:function(elems){var ret=jQuery(elems);ret.prevObject=this;return ret;},setArray:function(elems){this.length=0;Array.prototype.push.apply(this,elems);return this;},each:function(callback,args){return jQuery.each(this,callback,args);},index:function(elem){var ret=-1;return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);},attr:function(name,value,type){var options=name;if(name.constructor==String)if(value===undefined)return this[0]&&jQuery[type||"attr"](this[0],name);else{options={};options[name]=value;}return this.each(function(i){for(name in options)jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));});},css:function(key,value){if((key=='width'||key=='height')&&parseFloat(value)<0)value=undefined;return this.attr(key,value,"curCSS");},text:function(text){if(typeof text!="object"&&text!=null)return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));var ret="";jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8)ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);});});return ret;},wrapAll:function(html){if(this[0])jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;while(elem.firstChild)elem=elem.firstChild;return elem;}).append(this);return this;},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);});},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);});},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1)this.appendChild(elem);});},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1)this.insertBefore(elem,this.firstChild);});},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);});},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);});},end:function(){return this.prevObject||jQuery([]);},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);});return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");container.appendChild(clone);return jQuery.clean([container.innerHTML])[0];}else
-return this.cloneNode(true);});var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined)this[expando]=null;});if(events===true)this.find("*").andSelf().each(function(i){if(this.nodeType==3)return;var events=jQuery.data(this,"events");for(var type in events)for(var handler in events[type])jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);});return ret;},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);})||jQuery.multiFilter(selector,this));},not:function(selector){if(selector.constructor==String)if(isSimple.test(selector))return this.pushStack(jQuery.multiFilter(selector,this,true));else
-selector=jQuery.multiFilter(selector,this);var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;});},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=='string'?jQuery(selector):jQuery.makeArray(selector))));},is:function(selector){return!!selector&&jQuery.multiFilter(selector,this).length>0;},hasClass:function(selector){return this.is("."+selector);},val:function(value){if(value==undefined){if(this.length){var elem=this[0];if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";if(index<0)return null;for(var i=one?index:0,max=one?index+1:options.length;i=0||jQuery.inArray(this.name,value)>=0);else if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);});if(!values.length)this.selectedIndex=-1;}else
-this.value=value;});},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);},replaceWith:function(value){return this.after(value).remove();},eq:function(i){return this.slice(i,i+1);},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);}));},andSelf:function(){return this.add(this.prevObject);},data:function(key,value){var parts=key.split(".");parts[1]=parts[1]?"."+parts[1]:"";if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);if(data===undefined&&this.length)data=jQuery.data(this[0],key);return data===undefined&&parts[1]?this.data(parts[0]):data;}else
-return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);});},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);});},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);if(reverse)elems.reverse();}var obj=this;if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr"))obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));var scripts=jQuery([]);jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;if(jQuery.nodeName(elem,"script"))scripts=scripts.add(elem);else{if(elem.nodeType==1)scripts=scripts.add(jQuery("script",elem).remove());callback.call(obj,elem);}});scripts.each(evalScript);});}};jQuery.fn.init.prototype=jQuery.fn;function evalScript(i,elem){if(elem.src)jQuery.ajax({url:elem.src,async:false,dataType:"script"});else
-jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");if(elem.parentNode)elem.parentNode.removeChild(elem);}function now(){return+new Date;}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;if(target.constructor==Boolean){deep=target;target=arguments[1]||{};i=2;}if(typeof target!="object"&&typeof target!="function")target={};if(length==i){target=this;--i;}for(;i-1;}},swap:function(elem,options,callback){var old={};for(var name in options){old[name]=elem.style[name];elem.style[name]=options[name];}callback.call(elem);for(var name in options)elem.style[name]=old[name];},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;var padding=0,border=0;jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;});val-=Math.round(padding+border);}if(jQuery(elem).is(":visible"))getWH();else
-jQuery.swap(elem,props,getWH);return Math.max(0,val);}return jQuery.curCSS(elem,name,force);},curCSS:function(elem,name,force){var ret,style=elem.style;function color(elem){if(!jQuery.browser.safari)return false;var ret=defaultView.getComputedStyle(elem,null);return!ret||ret.getPropertyValue("color")=="";}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");return ret==""?"1":ret;}if(jQuery.browser.opera&&name=="display"){var save=style.outline;style.outline="0 solid black";style.outline=save;}if(name.match(/float/i))name=styleFloat;if(!force&&style&&style[name])ret=style[name];else if(defaultView.getComputedStyle){if(name.match(/float/i))name="float";name=name.replace(/([A-Z])/g,"-$1").toLowerCase();var computedStyle=defaultView.getComputedStyle(elem,null);if(computedStyle&&!color(elem))ret=computedStyle.getPropertyValue(name);else{var swap=[],stack=[],a=elem,i=0;for(;a&&color(a);a=a.parentNode)stack.unshift(a);for(;i]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+">"+tag+">";});var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");var wrap=!tags.indexOf("",""]||!tags.indexOf("",""]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,""]||!tags.indexOf(" ",""]||(!tags.indexOf(" | "," "]||!tags.indexOf("",""]||jQuery.browser.msie&&[1,"div"," "]||[0,"",""];div.innerHTML=wrap[1]+elem+wrap[2];while(wrap[0]--)div=div.lastChild;if(jQuery.browser.msie){var tbody=!tags.indexOf(""&&tags.indexOf("=0;--j)if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length)tbody[j].parentNode.removeChild(tbody[j]);if(/^\s/.test(elem))div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);}elem=jQuery.makeArray(div.childNodes);}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select")))return;if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options)ret.push(elem);else
-ret=jQuery.merge(ret,elem);});return ret;},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8)return undefined;var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;name=notxml&&jQuery.props[name]||name;if(elem.tagName){var special=/href|src|style/.test(name);if(name=="selected"&&jQuery.browser.safari)elem.parentNode.selectedIndex;if(name in elem&¬xml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode)throw"type property can't be changed";elem[name]=value;}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name))return elem.getAttributeNode(name).nodeValue;return elem[name];}if(msie&¬xml&&name=="style")return jQuery.attr(elem.style,"cssText",value);if(set)elem.setAttribute(name,""+value);var attr=msie&¬xml&&special?elem.getAttribute(name,2):elem.getAttribute(name);return attr===null?undefined:attr;}if(msie&&name=="opacity"){if(set){elem.zoom=1;elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+''=="NaN"?"":"alpha(opacity="+value*100+")");}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+'':"";}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();});if(set)elem[name]=value;return elem[name];},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");},makeArray:function(array){var ret=[];if(array!=null){var i=array.length;if(i==null||array.split||array.setInterval||array.call)ret[0]=array;else
-while(i)ret[--i]=array[i];}return ret;},inArray:function(elem,array){for(var i=0,length=array.length;i*",this).remove();while(this.firstChild)this.removeChild(this.firstChild);}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);};});jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");};});function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);},"#":function(a,i,m){return a.getAttribute("id")==m[2];},":":{lt:function(a,i,m){return im[3]-0;},nth:function(a,i,m){return m[3]-0==i;},eq:function(a,i,m){return m[3]-0==i;},first:function(a,i){return i==0;},last:function(a,i,m,r){return i==r.length-1;},even:function(a,i){return i%2==0;},odd:function(a,i){return i%2;},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;},"only-child":function(a){return!jQuery.nth(a.parentNode.lastChild,2,"previousSibling");},parent:function(a){return a.firstChild;},empty:function(a){return!a.firstChild;},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";},enabled:function(a){return!a.disabled;},disabled:function(a){return a.disabled;},checked:function(a){return a.checked;},selected:function(a){return a.selected||jQuery.attr(a,"selected");},text:function(a){return"text"==a.type;},radio:function(a){return"radio"==a.type;},checkbox:function(a){return"checkbox"==a.type;},file:function(a){return"file"==a.type;},password:function(a){return"password"==a.type;},submit:function(a){return"submit"==a.type;},image:function(a){return"image"==a.type;},reset:function(a){return"reset"==a.type;},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);},has:function(a,i,m){return jQuery.find(m[3],a).length;},header:function(a){return/h\d/i.test(a.nodeName);},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;}).length;}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];while(expr&&expr!=old){old=expr;var f=jQuery.filter(expr,elems,not);expr=f.t.replace(/^\s*,\s*/,"");cur=not?elems=f.r:jQuery.merge(cur,f.r);}return cur;},find:function(t,context){if(typeof t!="string")return[t];if(context&&context.nodeType!=1&&context.nodeType!=9)return[];context=context||document;var ret=[context],done=[],last,nodeName;while(t&&last!=t){var r=[];last=t;t=jQuery.trim(t);var foundToken=false,re=quickChild,m=re.exec(t);if(m){nodeName=m[1].toUpperCase();for(var i=0;ret[i];i++)for(var c=ret[i].firstChild;c;c=c.nextSibling)if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName))r.push(c);ret=r;t=t.replace(re,"");if(t.indexOf(" ")==0)continue;foundToken=true;}else{re=/^([>+~])\s*(\w*)/i;if((m=re.exec(t))!=null){r=[];var merge={};nodeName=m[2].toUpperCase();m=m[1];for(var j=0,rl=ret.length;j=0;if(!not&&pass||not&&!pass)tmp.push(r[i]);}return tmp;},filter:function(t,r,not){var last;while(t&&t!=last){last=t;var p=jQuery.parse,m;for(var i=0;p[i];i++){m=p[i].exec(t);if(m){t=t.substring(m[0].length);m[2]=m[2].replace(/\\/g,"");break;}}if(!m)break;if(m[1]==":"&&m[2]=="not")r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);else if(m[1]==".")r=jQuery.classFilter(r,m[2],not);else if(m[1]=="["){var tmp=[],type=m[3];for(var i=0,rl=r.length;i=0)^not)tmp.push(a);}r=tmp;}else if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;for(var i=0,rl=r.length;i=0)add=true;if(add^not)tmp.push(node);}r=tmp;}else{var fn=jQuery.expr[m[1]];if(typeof fn=="object")fn=fn[m[2]];if(typeof fn=="string")fn=eval("false||function(a,i){return "+fn+";}");r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);},not);}}return{r:r,t:t};},dir:function(elem,dir){var matched=[],cur=elem[dir];while(cur&&cur!=document){if(cur.nodeType==1)matched.push(cur);cur=cur[dir];}return matched;},nth:function(cur,result,dir,elem){result=result||1;var num=0;for(;cur;cur=cur[dir])if(cur.nodeType==1&&++num==result)break;return cur;},sibling:function(n,elem){var r=[];for(;n;n=n.nextSibling){if(n.nodeType==1&&n!=elem)r.push(n);}return r;}});jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8)return;if(jQuery.browser.msie&&elem.setInterval)elem=window;if(!handler.guid)handler.guid=this.guid++;if(data!=undefined){var fn=handler;handler=this.proxy(fn,function(){return fn.apply(this,arguments);});handler.data=data;}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered)return jQuery.event.handle.apply(arguments.callee.elem,arguments);});handle.elem=elem;jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];handler.type=parts[1];var handlers=events[type];if(!handlers){handlers=events[type]={};if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener)elem.addEventListener(type,handle,false);else if(elem.attachEvent)elem.attachEvent("on"+type,handle);}}handlers[handler.guid]=handler;jQuery.event.global[type]=true;});elem=null;},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8)return;var events=jQuery.data(elem,"events"),ret,index;if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)=="."))for(var type in events)this.remove(elem,type+(types||""));else{if(types.type){handler=types.handler;types=types.type;}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];if(events[type]){if(handler)delete events[type][handler.guid];else
-for(handler in events[type])if(!parts[1]||events[type][handler].type==parts[1])delete events[type][handler];for(ret in events[type])break;if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener)elem.removeEventListener(type,jQuery.data(elem,"handle"),false);else if(elem.detachEvent)elem.detachEvent("on"+type,jQuery.data(elem,"handle"));}ret=null;delete events[type];}}});}for(ret in events)break;if(!ret){var handle=jQuery.data(elem,"handle");if(handle)handle.elem=null;jQuery.removeData(elem,"events");jQuery.removeData(elem,"handle");}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);if(type.indexOf("!")>=0){type=type.slice(0,-1);var exclusive=true;}if(!elem){if(this.global[type])jQuery("*").add([window,document]).trigger(type,data);}else{if(elem.nodeType==3||elem.nodeType==8)return undefined;var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});data[0][expando]=true;}data[0].type=type;if(exclusive)data[0].exclusive=true;var handle=jQuery.data(elem,"handle");if(handle)val=handle.apply(elem,data);if((!fn||(jQuery.nodeName(elem,'a')&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false)val=false;if(event)data.shift();if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));if(ret!==undefined)val=ret;}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,'a')&&type=="click")){this.triggered=true;try{elem[type]();}catch(e){}}this.triggered=false;}return val;},handle:function(event){var val,ret,namespace,all,handlers;event=arguments[0]=jQuery.event.fix(event||window.event);namespace=event.type.split(".");event.type=namespace[0];namespace=namespace[1];all=!namespace&&!event.exclusive;handlers=(jQuery.data(this,"events")||{})[event.type];for(var j in handlers){var handler=handlers[j];if(all||handler.type==namespace){event.handler=handler;event.data=handler.data;ret=handler.apply(this,arguments);if(val!==false)val=ret;if(ret===false){event.preventDefault();event.stopPropagation();}}}return val;},fix:function(event){if(event[expando]==true)return event;var originalEvent=event;event={originalEvent:originalEvent};var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");for(var i=props.length;i;i--)event[props[i]]=originalEvent[props[i]];event[expando]=true;event.preventDefault=function(){if(originalEvent.preventDefault)originalEvent.preventDefault();originalEvent.returnValue=false;};event.stopPropagation=function(){if(originalEvent.stopPropagation)originalEvent.stopPropagation();originalEvent.cancelBubble=true;};event.timeStamp=event.timeStamp||now();if(!event.target)event.target=event.srcElement||document;if(event.target.nodeType==3)event.target=event.target.parentNode;if(!event.relatedTarget&&event.fromElement)event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode))event.which=event.charCode||event.keyCode;if(!event.metaKey&&event.ctrlKey)event.metaKey=event.ctrlKey;if(!event.which&&event.button)event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));return event;},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;return proxy;},special:{ready:{setup:function(){bindReady();return;},teardown:function(){return;}},mouseenter:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseenter";return jQuery.event.handle.apply(this,arguments);}},mouseleave:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseleave";return jQuery.event.handle.apply(this,arguments);}}}};jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);});},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);return(fn||data).apply(this,arguments);});return this.each(function(){jQuery.event.add(this,type,one,fn&&data);});},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);});},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);});},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);},toggle:function(fn){var args=arguments,i=1;while(i=0){var selector=url.slice(off,url.length);url=url.slice(0,off);}callback=callback||function(){};var type="GET";if(params)if(jQuery.isFunction(params)){callback=params;params=null;}else{params=jQuery.param(params);type="POST";}var self=this;jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified")self.html(selector?jQuery("").append(res.responseText.replace(/ |