--- a/.hgignore Tue Mar 22 12:35:29 2011 +0100
+++ b/.hgignore Tue Mar 29 13:57:28 2011 +0200
@@ -32,4 +32,10 @@
syntax: regexp
^src/ldt/build$
syntax: regexp
-^src/ldt/ldt\.egg-info$
\ No newline at end of file
+^src/ldt/ldt\.egg-info$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14\.tar\.gz$
+syntax: regexp
+^src/ldt/distribute-0\.6\.14-py2\.6\.egg$
+syntax: regexp
+^src/ldt/MANIFEST\.in$
\ No newline at end of file
--- a/.pydevproject Tue Mar 22 12:35:29 2011 +0100
+++ b/.pydevproject Tue Mar 29 13:57:28 2011 +0200
@@ -1,11 +1,11 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<?eclipse-pydev version="1.0"?>
-
-<pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python_platform</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
-<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/platform/src/ldt</path>
-<path>/platform/web</path>
-</pydev_pathproperty>
-</pydev_project>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?>
+
+<pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python_live</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.6</pydev_property>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/platform/src/ldt</path>
+<path>/platform/web</path>
+</pydev_pathproperty>
+</pydev_project>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/ldt/distribute_setup.py Tue Mar 29 13:57:28 2011 +0200
@@ -0,0 +1,485 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+ from distribute_setup import use_setuptools
+ use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+from distutils import log
+
+try:
+ from site import USER_SITE
+except ImportError:
+ USER_SITE = None
+
+try:
+ import subprocess
+
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+except ImportError:
+ # will be used for python 2.3
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ # quoting arguments if windows
+ if sys.platform == 'win32':
+ def quote(arg):
+ if ' ' in arg:
+ return '"%s"' % arg
+ return arg
+ args = [quote(arg) for arg in args]
+ return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.14"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # installing
+ log.warn('Installing Distribute')
+ if not _python_cmd('setup.py', 'install'):
+ log.warn('Something went wrong during the installation.')
+ log.warn('See the error message above.')
+ finally:
+ os.chdir(old_wd)
+
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a Distribute egg in %s', to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+ egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+ % (version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ tarball = download_setuptools(version, download_base,
+ to_dir, download_delay)
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, download_delay=15, no_fake=True):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ was_imported = 'pkg_resources' in sys.modules or \
+ 'setuptools' in sys.modules
+ try:
+ try:
+ import pkg_resources
+ if not hasattr(pkg_resources, '_distribute'):
+ if not no_fake:
+ _fake_setuptools()
+ raise ImportError
+ except ImportError:
+ return _do_download(version, download_base, to_dir, download_delay)
+ try:
+ pkg_resources.require("distribute>="+version)
+ return
+ except pkg_resources.VersionConflict:
+ e = sys.exc_info()[1]
+ if was_imported:
+ sys.stderr.write(
+ "The required version of distribute (>=%s) is not available,\n"
+ "and can't be installed while this script is running. Please\n"
+ "install a more recent version first, using\n"
+ "'easy_install -U distribute'."
+ "\n\n(Currently using %r)\n" % (version, e.args[0]))
+ sys.exit(2)
+ else:
+ del pkg_resources, sys.modules['pkg_resources'] # reload ok
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ except pkg_resources.DistributionNotFound:
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ finally:
+ if not no_fake:
+ _create_fake_setuptools_pkg_info(to_dir)
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, delay=15):
+ """Download distribute from a specified location and return its filename
+
+ `version` should be a valid distribute version number that is available
+ as an egg for download under the `download_base` URL (which should end
+ with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `delay` is the number of seconds to pause before an actual download
+ attempt.
+ """
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+ tgz_name = "distribute-%s.tar.gz" % version
+ url = download_base + tgz_name
+ saveto = os.path.join(to_dir, tgz_name)
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ log.warn("Downloading %s", url)
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+ dst = open(saveto, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+ return os.path.realpath(saveto)
+
+def _no_sandbox(function):
+ def __no_sandbox(*args, **kw):
+ try:
+ from setuptools.sandbox import DirectorySandbox
+ if not hasattr(DirectorySandbox, '_old'):
+ def violation(*args):
+ pass
+ DirectorySandbox._old = DirectorySandbox._violation
+ DirectorySandbox._violation = violation
+ patched = True
+ else:
+ patched = False
+ except ImportError:
+ patched = False
+
+ try:
+ return function(*args, **kw)
+ finally:
+ if patched:
+ DirectorySandbox._violation = DirectorySandbox._old
+ del DirectorySandbox._old
+
+ return __no_sandbox
+
+def _patch_file(path, content):
+ """Will backup the file then patch it"""
+ existing_content = open(path).read()
+ if existing_content == content:
+ # already patched
+ log.warn('Already patched.')
+ return False
+ log.warn('Patching...')
+ _rename_path(path)
+ f = open(path, 'w')
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ return True
+
+_patch_file = _no_sandbox(_patch_file)
+
+def _same_content(path, content):
+ return open(path).read() == content
+
+def _rename_path(path):
+ new_name = path + '.OLD.%s' % time.time()
+ log.warn('Renaming %s into %s', path, new_name)
+ os.rename(path, new_name)
+ return new_name
+
+def _remove_flat_installation(placeholder):
+ if not os.path.isdir(placeholder):
+ log.warn('Unkown installation at %s', placeholder)
+ return False
+ found = False
+ for file in os.listdir(placeholder):
+ if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+ found = True
+ break
+ if not found:
+ log.warn('Could not locate setuptools*.egg-info')
+ return
+
+ log.warn('Removing elements out of the way...')
+ pkg_info = os.path.join(placeholder, file)
+ if os.path.isdir(pkg_info):
+ patched = _patch_egg_dir(pkg_info)
+ else:
+ patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+ if not patched:
+ log.warn('%s already patched.', pkg_info)
+ return False
+ # now let's move the files out of the way
+ for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+ element = os.path.join(placeholder, element)
+ if os.path.exists(element):
+ _rename_path(element)
+ else:
+ log.warn('Could not find the %s element of the '
+ 'Setuptools distribution', element)
+ return True
+
+_remove_flat_installation = _no_sandbox(_remove_flat_installation)
+
+def _after_install(dist):
+ log.warn('After install bootstrap.')
+ placeholder = dist.get_command_obj('install').install_purelib
+ _create_fake_setuptools_pkg_info(placeholder)
+
+def _create_fake_setuptools_pkg_info(placeholder):
+ if not placeholder or not os.path.exists(placeholder):
+ log.warn('Could not find the install location')
+ return
+ pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+ setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+ (SETUPTOOLS_FAKED_VERSION, pyver)
+ pkg_info = os.path.join(placeholder, setuptools_file)
+ if os.path.exists(pkg_info):
+ log.warn('%s already exists', pkg_info)
+ return
+
+ log.warn('Creating %s', pkg_info)
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+
+ pth_file = os.path.join(placeholder, 'setuptools.pth')
+ log.warn('Creating %s', pth_file)
+ f = open(pth_file, 'w')
+ try:
+ f.write(os.path.join(os.curdir, setuptools_file))
+ finally:
+ f.close()
+
+_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
+
+def _patch_egg_dir(path):
+ # let's check if it's already patched
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ if os.path.exists(pkg_info):
+ if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+ log.warn('%s already patched.', pkg_info)
+ return False
+ _rename_path(path)
+ os.mkdir(path)
+ os.mkdir(os.path.join(path, 'EGG-INFO'))
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+ return True
+
+_patch_egg_dir = _no_sandbox(_patch_egg_dir)
+
+def _before_install():
+ log.warn('Before install bootstrap.')
+ _fake_setuptools()
+
+
+def _under_prefix(location):
+ if 'install' not in sys.argv:
+ return True
+ args = sys.argv[sys.argv.index('install')+1:]
+ for index, arg in enumerate(args):
+ for option in ('--root', '--prefix'):
+ if arg.startswith('%s=' % option):
+ top_dir = arg.split('root=')[-1]
+ return location.startswith(top_dir)
+ elif arg == option:
+ if len(args) > index:
+ top_dir = args[index+1]
+ return location.startswith(top_dir)
+ if arg == '--user' and USER_SITE is not None:
+ return location.startswith(USER_SITE)
+ return True
+
+
+def _fake_setuptools():
+ log.warn('Scanning installed packages')
+ try:
+ import pkg_resources
+ except ImportError:
+ # we're cool
+ log.warn('Setuptools or Distribute does not seem to be installed.')
+ return
+ ws = pkg_resources.working_set
+ try:
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
+ replacement=False))
+ except TypeError:
+ # old distribute API
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+
+ if setuptools_dist is None:
+ log.warn('No setuptools distribution found')
+ return
+ # detecting if it was already faked
+ setuptools_location = setuptools_dist.location
+ log.warn('Setuptools installation detected at %s', setuptools_location)
+
+ # if --root or --preix was provided, and if
+ # setuptools is not located in them, we don't patch it
+ if not _under_prefix(setuptools_location):
+ log.warn('Not patching, --root or --prefix is installing Distribute'
+ ' in another location')
+ return
+
+ # let's see if its an egg
+ if not setuptools_location.endswith('.egg'):
+ log.warn('Non-egg installation')
+ res = _remove_flat_installation(setuptools_location)
+ if not res:
+ return
+ else:
+ log.warn('Egg installation')
+ pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+ if (os.path.exists(pkg_info) and
+ _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+ log.warn('Already patched.')
+ return
+ log.warn('Patching...')
+ # let's create a fake egg replacing setuptools one
+ res = _patch_egg_dir(setuptools_location)
+ if not res:
+ return
+ log.warn('Patched done.')
+ _relaunch()
+
+
+def _relaunch():
+ log.warn('Relaunching...')
+ # we have to relaunch the process
+ # pip marker to avoid a relaunch bug
+ if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
+ sys.argv[0] = 'setup.py'
+ args = [sys.executable] + sys.argv
+ sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
+ else:
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+def main(argv, version=DEFAULT_VERSION):
+ """Install or upgrade setuptools and EasyInstall"""
+ tarball = download_setuptools()
+ _install(tarball)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
--- a/src/ldt/setup.py Tue Mar 22 12:35:29 2011 +0100
+++ b/src/ldt/setup.py Tue Mar 29 13:57:28 2011 +0200
@@ -1,5 +1,9 @@
import os
-from setuptools import setup, find_packages
+
+from distribute_setup import use_setuptools
+use_setuptools()
+
+from setuptools import setup
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR, 'ldt')
@@ -43,13 +47,22 @@
new_data_files.extend([os.path.join(ldirpath[len(base_path):], f) for f in lfilenames])
data_files.setdefault(key,[]).extend(new_data_files)
+#write MANIFEST.in
+
+m = open("MANIFEST.in", "w")
+
+m.write("exclude MANIFEST.in\n")
+for key,file_list in data_files.iteritems():
+ for filename in file_list:
+ m.write("include %s/%s\n" % (key.replace(".","/"), filename))
+m.close()
setup(
name='ldt',
version=version,
author='Yves-Marie Haussonne (IRI)',
author_email='contact@iri.centrepompidou.fr',
- packages = find_packages(),
+ packages = packages,
package_data = data_files,
scripts=[],
url='https://www.iri.centrepompidou.fr/dev/hg/platform',
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/lib_create_env.py Tue Mar 29 13:57:28 2011 +0200
@@ -0,0 +1,292 @@
+import sys
+import os
+import os.path
+import shutil
+import tarfile
+import zipfile
+import urllib
+import platform
+import patch
+
+join = os.path.join
+system_str = platform.system()
+
+
+URLS = {
+ 'DISTRIBUTE': {'setup': 'distribute', 'url':'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local':"distribute-0.6.14.tar.gz"},
+ 'DJANGO': {'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local':"Django-1.2.4.tar.gz"},
+ 'JOGGING': {'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local':"jogging-0.2.2.tar.gz"},
+ 'DJANGO-EXTENSIONS': { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':"django-extensions-0.6.tar.gz"},
+ 'DJANGO-REGISTRATION': { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':"django-registration.tar.gz"},
+ 'DJANGO-TAGGING': { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':"django-tagging-0.3.1.tar.gz"},
+ 'DJANGO-PISTON': { 'setup': 'django-piston', 'url':"django-piston-0.2.2-modified.tar.gz", 'local':"django-piston-0.2.2-modified.tar.gz"},
+ 'OAUTH2': { 'setup': 'python-oauth2', 'url':"python-oauth2-1.2.1-modified.tar.gz", 'local':"python-oauth2-1.2.1-modified.tar.gz"},
+ 'HTTPLIB2': { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':"httplib2-0.6.0.tar.gz"},
+ 'DJANGO-OAUTH-PLUS': { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':"django-oauth-plus.tar.gz"},
+ 'MYSQL': { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local':"MySQL-python-1.2.3.tar.gz"},
+ 'SETUPTOOLS-HG': { 'setup':'mercurial_hg', 'url':'http://pypi.python.org/packages/source/s/setuptools_hg/setuptools_hg-0.2.tar.gz', 'local':"setuptools_hg-0.2.tar.gz"},
+ 'MERCURIAL': {'setup':'mercurial', 'url':'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local':"mercurial-1.7.5.tar.gz"}
+}
+
+if system_str == 'Windows':
+ URLS.update({
+ 'PSYCOPG2': {'setup': 'psycopg2','url': 'psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local':"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip"},
+ 'JCC': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local':"JCC-2.6-py2.6-win32.egg"},
+ 'PYLUCENE': {'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local':"lucene-3.0.2-py2.6-win32.egg"},
+ 'PIL': {'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local':"PIL-1.1.7.win32-py2.6.exe"},
+ 'LXML': {'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local':"lxml-2.2.8-py2.6-win32.egg"}
+ })
+else:
+ URLS.update({
+ 'PSYCOPG2': {'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local':"psycopg2-2.3.2.tar.gz"},
+ 'PYLUCENE': {'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local':"pylucene-3.0.3-1-src.tar.gz"},
+ 'PIL': {'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local':"Imaging-1.1.7.tar.gz"},
+ 'LXML': {'setup': 'lxml', 'url':"lxml_2.2.8.tar.gz", 'local':"lxml-2.2.8.tar.gz"}
+ })
+
+
+
+class ResourcesEnv(object):
+
+ def __init__(self, src_base, urls, normal_installs):
+ self.src_base = src_base
+ self.URLS = {}
+ self.__init_url(urls)
+ self.NORMAL_INSTALL = normal_installs
+
+ def get_src_base_path(self, fpath):
+ return os.path.abspath(os.path.join(self.src_base, fpath)).replace("\\","/")
+
+ def __add_package_def(self, key, setup, url, local):
+ self.URLS[key] = {'setup':setup, 'url':url, 'local':self.get_src_base_path(local)}
+
+ def __init_url(self, urls):
+ for key, url_dict in urls.items():
+ url = url_dict['url']
+ if not url.startswith("http://"):
+ url = self.get_src_base_path(url)
+ self.__add_package_def(key, url_dict["setup"], url, url_dict["local"])
+
+def ensure_dir(dir, logger):
+ if not os.path.exists(dir):
+ logger.notify('Creating directory %s' % dir)
+ os.makedirs(dir)
+
+def extend_parser(parser):
+ parser.add_option(
+ '--index-url',
+ metavar='INDEX_URL',
+ dest='index_url',
+ default='http://pypi.python.org/simple/',
+ help='base URL of Python Package Index')
+ parser.add_option(
+ '--type-install',
+ metavar='type_install',
+ dest='type_install',
+ default='local',
+ help='type install : local, url, setup')
+ parser.add_option(
+ '--ignore-packages',
+ metavar='ignore_packages',
+ dest='ignore_packages',
+ default=None,
+ help='list of comma separated keys for package to ignore')
+
+def adjust_options(options, args):
+ pass
+
+
+def install_pylucene(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+
+ logger.notify("Get Pylucene from %s " % res_env.URLS['PYLUCENE'][res_source_key])
+ pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
+ if res_source_key == 'local':
+ shutil.copy(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+ else:
+ urllib.urlretrieve(res_env.URLS['PYLUCENE'][res_source_key], pylucene_src)
+ tf = tarfile.open(pylucene_src,'r:gz')
+ pylucene_base_path = os.path.join(src_dir,"pylucene")
+ logger.notify("Extract Pylucene to %s " % pylucene_base_path)
+ tf.extractall(pylucene_base_path)
+ tf.close()
+
+ pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
+ jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
+
+ #install jcc
+
+ #patch for linux
+ if system_str == 'Linux' :
+ olddir = os.getcwd()
+ patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
+ if os.path.isfile(patch_dest_path):
+ # must unzip egg
+ # rename file and etract all
+ shutil.move(patch_dest_path, patch_dest_path + ".zip")
+ zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
+ zf.extractall(patch_dest_path)
+ os.remove(patch_dest_path + ".zip")
+ logger.notify("Patch jcc : %s " % (patch_dest_path))
+ os.chdir(patch_dest_path)
+ p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
+ p.apply()
+ os.chdir(olddir)
+
+ logger.notify("Install jcc")
+ call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
+ cwd=jcc_src_path,
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+ #install pylucene
+
+ logger.notify("Install pylucene")
+ #modify makefile
+ makefile_path = os.path.join(pylucene_src_path,"Makefile")
+ logger.notify("Modify makefile %s " % makefile_path)
+ shutil.move( makefile_path, makefile_path+"~" )
+
+ destination= open( makefile_path, "w" )
+ source= open( makefile_path+"~", "r" )
+ destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\n")
+ destination.write("ANT=ant\n")
+ destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\n")
+
+ if system_str == "Darwin":
+ if sys.version_info >= (2,6):
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+ else:
+ destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\n")
+ destination.write("NUM_FILES=2\n")
+ elif system_str == "Windows":
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\n")
+ destination.write("NUM_FILES=2\n")
+ else:
+ if sys.version_info >= (2,6) and sys.version_info <= (2,7):
+ destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\n")
+ else:
+ destination.write("JCC=$(PYTHON) -m jcc --shared\n")
+ destination.write("NUM_FILES=2\n")
+ for line in source:
+ destination.write( line )
+ source.close()
+ destination.close()
+ os.remove(makefile_path+"~" )
+
+ logger.notify("pylucene make")
+ call_subprocess(['make'],
+ cwd=os.path.abspath(pylucene_src_path),
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+
+ logger.notify("pylucene make install")
+ call_subprocess(['make', 'install'],
+ cwd=os.path.abspath(pylucene_src_path),
+ filter_stdout=filter_python_develop,
+ show_stdout=True)
+
+
+def install_psycopg2(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop):
+ psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
+ shutil.copy(res_env.URLS['PSYCOPG2'][res_source_key], psycopg2_src)
+ #extract psycopg2
+ zf = zipfile.ZipFile(psycopg2_src)
+ psycopg2_base_path = os.path.join(src_dir,"psycopg2")
+ zf.extractall(psycopg2_base_path)
+ zf.close()
+
+ psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
+ shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
+ shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
+
+
+
+def lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, normal_installs, urls=None):
+
+ all_urls = URLS.copy()
+ if urls is not None:
+ all_urls.update(urls)
+
+ res_env = ResourcesEnv(src_base, all_urls, normal_installs)
+
+ def filter_python_develop(line):
+ if not line.strip():
+ return Logger.DEBUG
+ for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
+ 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
+ 'creating ', 'Copying ']:
+ if line.startswith(prefix):
+ return Logger.DEBUG
+ return Logger.NOTIFY
+
+
+ def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess):
+ logger.notify("Install %s from %s with %s" % (key,res_env.URLS[key][res_source_key],method))
+ if method == 'pip':
+ if sys.platform == 'win32':
+ args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+ else:
+ args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), res_env.URLS[key][res_source_key]]
+ if option_str :
+ args.insert(4,option_str)
+ call_subprocess(args,
+ cwd=os.path.abspath(tmp_dir),
+ filter_stdout=filter_python_develop,
+ show_stdout=True,
+ extra_env=extra_env)
+ else:
+ if sys.platform == 'win32':
+ args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), res_env.URLS[key][res_source_key]]
+ else:
+ args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), res_env.URLS[key][res_source_key]]
+ if option_str :
+ args.insert(1,option_str)
+ call_subprocess(args,
+ cwd=os.path.abspath(tmp_dir),
+ filter_stdout=filter_python_develop,
+ show_stdout=True,
+ extra_env=extra_env)
+
+
+ def after_install(options, home_dir):
+
+ global logger
+
+ verbosity = options.verbose - options.quiet
+ logger = Logger([(Logger.level_for_integer(2-verbosity), sys.stdout)])
+
+
+ home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
+ base_dir = os.path.dirname(home_dir)
+ src_dir = os.path.join(home_dir, 'src')
+ tmp_dir = os.path.join(home_dir, 'tmp')
+ ensure_dir(src_dir, logger)
+ ensure_dir(tmp_dir, logger)
+ system_str = platform.system()
+
+ res_source_key = options.type_install
+
+ ignore_packages = []
+
+ if options.ignore_packages :
+ ignore_packages = options.ignore_packages.split(",")
+
+ logger.indent += 2
+ try:
+ for key, method, option_str, extra_env in res_env.NORMAL_INSTALL:
+ if key not in ignore_packages:
+ if callable(method):
+ method(option_str, extra_env, res_source_key, home_dir, tmp_dir, src_dir, res_env, logger, call_subprocess, filter_python_develop)
+ else:
+ normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir, res_env, logger, call_subprocess)
+
+ logger.notify("Clear source dir")
+ shutil.rmtree(src_dir)
+
+ finally:
+ logger.indent -= 2
+ script_dir = join(base_dir, bin_dir)
+ logger.notify('Run "%s Package" to install new packages that provide builds'
+ % join(script_dir, 'easy_install'))
+
+
+ return adjust_options, extend_parser, after_install
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/res/lib/patch.py Tue Mar 29 13:57:28 2011 +0200
@@ -0,0 +1,589 @@
+""" Patch utility to apply unified diffs
+
+ Brute-force line-by-line non-recursive parsing
+
+ Copyright (c) 2008-2010 anatoly techtonik
+ Available under the terms of MIT license
+
+ Project home: http://code.google.com/p/python-patch/
+
+
+ $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
+ $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+"""
+
+__author__ = "techtonik.rainforce.org"
+__version__ = "10.04"
+
+import copy
+import logging
+import re
+# cStringIO doesn't support unicode in 2.5
+from StringIO import StringIO
+from logging import debug, info, warning
+
+from os.path import exists, isfile, abspath
+from os import unlink
+
+
+#------------------------------------------------
+# Logging is controlled by "python_patch" logger
+
+debugmode = False
+
+logger = logging.getLogger("python_patch")
+loghandler = logging.StreamHandler()
+logger.addHandler(loghandler)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+#: disable library logging by default
+logger.setLevel(logging.CRITICAL)
+
+#------------------------------------------------
+
+
+def fromfile(filename):
+ """ Parse patch file and return Patch() object
+ """
+
+ info("reading patch from file %s" % filename)
+ fp = open(filename, "rb")
+ patch = Patch(fp)
+ fp.close()
+ return patch
+
+
+def fromstring(s):
+ """ Parse text string and return Patch() object
+ """
+
+ return Patch(
+ StringIO.StringIO(s)
+ )
+
+
+
+class HunkInfo(object):
+ """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+ def __init__(self):
+ self.startsrc=None #: line count starts with 1
+ self.linessrc=None
+ self.starttgt=None
+ self.linestgt=None
+ self.invalid=False
+ self.text=[]
+
+ def copy(self):
+ return copy.copy(self)
+
+# def apply(self, estream):
+# """ write hunk data into enumerable stream
+# return strings one by one until hunk is
+# over
+#
+# enumerable stream are tuples (lineno, line)
+# where lineno starts with 0
+# """
+# pass
+
+
+
+class Patch(object):
+
+ def __init__(self, stream=None):
+
+ # define Patch data members
+ # table with a row for every source file
+
+ #: list of source filenames
+ self.source=None
+ self.target=None
+ #: list of lists of hunks
+ self.hunks=None
+ #: file endings statistics for every hunk
+ self.hunkends=None
+
+ if stream:
+ self.parse(stream)
+
+ def copy(self):
+ return copy.copy(self)
+
+ def parse(self, stream):
+ """ parse unified diff """
+ self.source = []
+ self.target = []
+ self.hunks = []
+ self.hunkends = []
+
+ # define possible file regions that will direct the parser flow
+ header = False # comments before the patch body
+ filenames = False # lines starting with --- and +++
+
+ hunkhead = False # @@ -R +R @@ sequence
+ hunkbody = False #
+ hunkskip = False # skipping invalid hunk mode
+
+ header = True
+ lineends = dict(lf=0, crlf=0, cr=0)
+ nextfileno = 0
+ nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
+
+ # hunkinfo holds parsed values, hunkactual - calculated
+ hunkinfo = HunkInfo()
+ hunkactual = dict(linessrc=None, linestgt=None)
+
+ fe = enumerate(stream)
+ for lineno, line in fe:
+
+ # analyze state
+ if header and line.startswith("--- "):
+ header = False
+ # switch to filenames state
+ filenames = True
+ #: skip hunkskip and hunkbody code until you read definition of hunkhead
+ if hunkbody:
+ # process line first
+ if re.match(r"^[- \+\\]", line):
+ # gather stats about line endings
+ if line.endswith("\r\n"):
+ self.hunkends[nextfileno-1]["crlf"] += 1
+ elif line.endswith("\n"):
+ self.hunkends[nextfileno-1]["lf"] += 1
+ elif line.endswith("\r"):
+ self.hunkends[nextfileno-1]["cr"] += 1
+
+ if line.startswith("-"):
+ hunkactual["linessrc"] += 1
+ elif line.startswith("+"):
+ hunkactual["linestgt"] += 1
+ elif not line.startswith("\\"):
+ hunkactual["linessrc"] += 1
+ hunkactual["linestgt"] += 1
+ hunkinfo.text.append(line)
+ # todo: handle \ No newline cases
+ else:
+ warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+ # add hunk status node
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # check exit conditions
+ if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
+ warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
+ # add hunk status node
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+ elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
+ self.hunks[nextfileno-1].append(hunkinfo.copy())
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # detect mixed window/unix line ends
+ ends = self.hunkends[nextfileno-1]
+ if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
+ warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
+ if debugmode:
+ debuglines = dict(ends)
+ debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
+ debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+
+ if hunkskip:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if match:
+ # switch to hunkhead state
+ hunkskip = False
+ hunkhead = True
+ elif line.startswith("--- "):
+ # switch to filenames state
+ hunkskip = False
+ filenames = True
+ if debugmode and len(self.source) > 0:
+ debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+ if filenames:
+ if line.startswith("--- "):
+ if nextfileno in self.source:
+ warning("skipping invalid patch for %s" % self.source[nextfileno])
+ del self.source[nextfileno]
+ # double source filename line is encountered
+ # attempt to restart from this second line
+ re_filename = "^--- ([^\t]+)"
+ match = re.match(re_filename, line)
+ # todo: support spaces in filenames
+ if match:
+ self.source.append(match.group(1).strip())
+ else:
+ warning("skipping invalid filename at line %d" % lineno)
+ # switch back to header state
+ filenames = False
+ header = True
+ elif not line.startswith("+++ "):
+ if nextfileno in self.source:
+ warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
+ del self.source[nextfileno]
+ else:
+ # this should be unreachable
+ warning("skipping invalid target patch")
+ filenames = False
+ header = True
+ else:
+ if nextfileno in self.target:
+ warning("skipping invalid patch - double target at line %d" % lineno)
+ del self.source[nextfileno]
+ del self.target[nextfileno]
+ nextfileno -= 1
+ # double target filename line is encountered
+ # switch back to header state
+ filenames = False
+ header = True
+ else:
+ re_filename = "^\+\+\+ ([^\t]+)"
+ match = re.match(re_filename, line)
+ if not match:
+ warning("skipping invalid patch - no target filename at line %d" % lineno)
+ # switch back to header state
+ filenames = False
+ header = True
+ else:
+ self.target.append(match.group(1).strip())
+ nextfileno += 1
+ # switch to hunkhead state
+ filenames = False
+ hunkhead = True
+ nexthunkno = 0
+ self.hunks.append([])
+ self.hunkends.append(lineends.copy())
+ continue
+
+ if hunkhead:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if not match:
+ if nextfileno-1 not in self.hunks:
+ warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
+ # switch to header state
+ hunkhead = False
+ header = True
+ continue
+ else:
+ # switch to header state
+ hunkhead = False
+ header = True
+ else:
+ hunkinfo.startsrc = int(match.group(1))
+ hunkinfo.linessrc = 1
+ if match.group(3): hunkinfo.linessrc = int(match.group(3))
+ hunkinfo.starttgt = int(match.group(4))
+ hunkinfo.linestgt = 1
+ if match.group(6): hunkinfo.linestgt = int(match.group(6))
+ hunkinfo.invalid = False
+ hunkinfo.text = []
+
+ hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+ # switch to hunkbody state
+ hunkhead = False
+ hunkbody = True
+ nexthunkno += 1
+ continue
+ else:
+ if not hunkskip:
+ warning("patch file incomplete - %s" % filename)
+ # sys.exit(?)
+ else:
+ # duplicated message when an eof is reached
+ if debugmode and len(self.source) > 0:
+ debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
+
+ info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
+
+
+ def apply(self):
+ """ apply parsed patch """
+
+ total = len(self.source)
+ for fileno, filename in enumerate(self.source):
+
+ f2patch = filename
+ if not exists(f2patch):
+ f2patch = self.target[fileno]
+ if not exists(f2patch):
+ warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
+ continue
+ if not isfile(f2patch):
+ warning("not a file - %s" % f2patch)
+ continue
+ filename = f2patch
+
+ info("processing %d/%d:\t %s" % (fileno+1, total, filename))
+
+ # validate before patching
+ f2fp = open(filename)
+ hunkno = 0
+ hunk = self.hunks[fileno][hunkno]
+ hunkfind = []
+ hunkreplace = []
+ validhunks = 0
+ canpatch = False
+ for lineno, line in enumerate(f2fp):
+ if lineno+1 < hunk.startsrc:
+ continue
+ elif lineno+1 == hunk.startsrc:
+ hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+ hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
+ #pprint(hunkreplace)
+ hunklineno = 0
+
+ # todo \ No newline at end of file
+
+ # check hunks in source file
+ if lineno+1 < hunk.startsrc+len(hunkfind)-1:
+ if line.rstrip("\r\n") == hunkfind[hunklineno]:
+ hunklineno+=1
+ else:
+ debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
+ # file may be already patched, but we will check other hunks anyway
+ hunkno += 1
+ if hunkno < len(self.hunks[fileno]):
+ hunk = self.hunks[fileno][hunkno]
+ continue
+ else:
+ break
+
+ # check if processed line is the last line
+ if lineno+1 == hunk.startsrc+len(hunkfind)-1:
+ debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
+ hunkno+=1
+ validhunks+=1
+ if hunkno < len(self.hunks[fileno]):
+ hunk = self.hunks[fileno][hunkno]
+ else:
+ if validhunks == len(self.hunks[fileno]):
+ # patch file
+ canpatch = True
+ break
+ else:
+ if hunkno < len(self.hunks[fileno]):
+ warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
+
+ f2fp.close()
+
+ if validhunks < len(self.hunks[fileno]):
+ if self._match_file_hunks(filename, self.hunks[fileno]):
+ warning("already patched %s" % filename)
+ else:
+ warning("source file is different - %s" % filename)
+ if canpatch:
+ backupname = filename+".orig"
+ if exists(backupname):
+ warning("can't backup original file to %s - aborting" % backupname)
+ else:
+ import shutil
+ shutil.move(filename, backupname)
+ if self.write_hunks(backupname, filename, self.hunks[fileno]):
+ warning("successfully patched %s" % filename)
+ unlink(backupname)
+ else:
+ warning("error patching file %s" % filename)
+ shutil.copy(filename, filename+".invalid")
+ warning("invalid version is saved to %s" % filename+".invalid")
+ # todo: proper rejects
+ shutil.move(backupname, filename)
+
+ # todo: check for premature eof
+
+
+ def can_patch(self, filename):
+ """ Check if specified filename can be patched. Returns None if file can
+ not be found among source filenames. False if patch can not be applied
+ clearly. True otherwise.
+
+ :returns: True, False or None
+ """
+ idx = self._get_file_idx(filename, source=True)
+ if idx == None:
+ return None
+ return self._match_file_hunks(filename, self.hunks[idx])
+
+
+ def _match_file_hunks(self, filepath, hunks):
+ matched = True
+ fp = open(abspath(filepath))
+
+ class NoMatch(Exception):
+ pass
+
+ lineno = 1
+ line = fp.readline()
+ hno = None
+ try:
+ for hno, h in enumerate(hunks):
+ # skip to first line of the hunk
+ while lineno < h.starttgt:
+ if not len(line): # eof
+ debug("check failed - premature eof before hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+ for hline in h.text:
+ if hline.startswith("-"):
+ continue
+ if not len(line):
+ debug("check failed - premature eof on hunk: %d" % (hno+1))
+ # todo: \ No newline at the end of file
+ raise NoMatch
+ if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+ debug("file is not patched - failed hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+
+ except NoMatch:
+ matched = False
+ # todo: display failed hunk, i.e. expected/found
+
+ fp.close()
+ return matched
+
+
+ def patch_stream(self, instream, hunks):
+ """ Generator that yields stream patched with hunks iterable
+
+ Converts lineends in hunk lines to the best suitable format
+ autodetected from input
+ """
+
+ # todo: At the moment substituted lineends may not be the same
+ # at the start and at the end of patching. Also issue a
+ # warning/throw about mixed lineends (is it really needed?)
+
+ hunks = iter(hunks)
+
+ srclineno = 1
+
+ lineends = {'\n':0, '\r\n':0, '\r':0}
+ def get_line():
+ """
+ local utility function - return line from source stream
+ collecting line end statistics on the way
+ """
+ line = instream.readline()
+ # 'U' mode works only with text files
+ if line.endswith("\r\n"):
+ lineends["\r\n"] += 1
+ elif line.endswith("\n"):
+ lineends["\n"] += 1
+ elif line.endswith("\r"):
+ lineends["\r"] += 1
+ return line
+
+ for hno, h in enumerate(hunks):
+ debug("hunk %d" % (hno+1))
+ # skip to line just before hunk starts
+ while srclineno < h.startsrc:
+ yield get_line()
+ srclineno += 1
+
+ for hline in h.text:
+ # todo: check \ No newline at the end of file
+ if hline.startswith("-") or hline.startswith("\\"):
+ get_line()
+ srclineno += 1
+ continue
+ else:
+ if not hline.startswith("+"):
+ get_line()
+ srclineno += 1
+ line2write = hline[1:]
+ # detect if line ends are consistent in source file
+ if sum([bool(lineends[x]) for x in lineends]) == 1:
+ newline = [x for x in lineends if lineends[x] != 0][0]
+ yield line2write.rstrip("\r\n")+newline
+ else: # newlines are mixed
+ yield line2write
+
+ for line in instream:
+ yield line
+
+
+ def write_hunks(self, srcname, tgtname, hunks):
+ src = open(srcname, "rb")
+ tgt = open(tgtname, "wb")
+
+ debug("processing target file %s" % tgtname)
+
+ tgt.writelines(self.patch_stream(src, hunks))
+
+ tgt.close()
+ src.close()
+ return True
+
+
+ def _get_file_idx(self, filename, source=None):
+ """ Detect index of given filename within patch.
+
+ :param filename:
+ :param source: search filename among sources (True),
+ targets (False), or both (None)
+ :returns: int or None
+ """
+ filename = abspath(filename)
+ if source == True or source == None:
+ for i,fnm in enumerate(self.source):
+ if filename == abspath(fnm):
+ return i
+ if source == False or source == None:
+ for i,fnm in enumerate(self.target):
+ if filename == abspath(fnm):
+ return i
+
+
+
+
+from optparse import OptionParser
+from os.path import exists
+import sys
+
+if __name__ == "__main__":
+ opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
+ opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+ (options, args) = opt.parse_args()
+
+ if not args:
+ opt.print_version()
+ opt.print_help()
+ sys.exit()
+ debugmode = options.debugmode
+ patchfile = args[0]
+ if not exists(patchfile) or not isfile(patchfile):
+ sys.exit("patch file does not exist - %s" % patchfile)
+
+
+ if debugmode:
+ loglevel = logging.DEBUG
+ logformat = "%(levelname)8s %(message)s"
+ else:
+ loglevel = logging.INFO
+ logformat = "%(message)s"
+ logger.setLevel(loglevel)
+ loghandler.setFormatter(logging.Formatter(logformat))
+
+
+
+ patch = fromfile(patchfile)
+ #pprint(patch)
+ patch.apply()
+
+ # todo: document and test line ends handling logic - patch.py detects proper line-endings
+ # for inserted hunks and issues a warning if patched file has incosistent line ends
Binary file virtualenv/res/src/Django-1.2.4.tar.gz has changed
Binary file virtualenv/res/src/Imaging-1.1.7.tar.gz has changed
Binary file virtualenv/res/src/JCC-2.6-py2.6-win32.egg has changed
Binary file virtualenv/res/src/MySQL-python-1.2.3.tar.gz has changed
Binary file virtualenv/res/src/PIL-1.1.7.win32-py2.6.exe has changed
Binary file virtualenv/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/res/src/django-extensions-0.6.tar.gz has changed
Binary file virtualenv/res/src/django-oauth-plus.tar.gz has changed
Binary file virtualenv/res/src/django-piston-0.2.2-modified.tar.gz has changed
Binary file virtualenv/res/src/django-registration.tar.gz has changed
Binary file virtualenv/res/src/django-tagging-0.3.1.tar.gz has changed
Binary file virtualenv/res/src/httplib2-0.6.0.tar.gz has changed
Binary file virtualenv/res/src/jogging-0.2.2.tar.gz has changed
Binary file virtualenv/res/src/lucene-3.0.2-py2.6-win32.egg has changed
Binary file virtualenv/res/src/lxml-2.2.8-py2.6-win32.egg has changed
Binary file virtualenv/res/src/lxml-2.2.8.tar.gz has changed
Binary file virtualenv/res/src/mercurial-1.7.5.tar.gz has changed
Binary file virtualenv/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
Binary file virtualenv/res/src/psycopg2-2.3.2.tar.gz has changed
Binary file virtualenv/res/src/pylucene-3.0.3-1-src.tar.gz has changed
Binary file virtualenv/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
Binary file virtualenv/res/src/setuptools_hg-0.2.tar.gz has changed
--- a/virtualenv/setup/create_python_env.py Tue Mar 22 12:35:29 2011 +0100
+++ b/virtualenv/setup/create_python_env.py Tue Mar 29 13:57:28 2011 +0200
@@ -29,137 +29,17 @@
# - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
# - pylucene - script
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
-
-EXTRA_TEXT = "URLS = { \n"
-
-EXTRA_TEXT += " 'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'MERCURIAL' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/mercurial/mercurial-1.7.5.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"mercurial-1.7.5.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
- parser.add_option(
- '--index-url',
- metavar='INDEX_URL',
- dest='index_url',
- default='',
- help='base URL of Python Package Index')
- parser.add_option(
- '--type-install',
- metavar='type_install',
- dest='type_install',
- default='local',
- help='type install : local, url, setup')
- parser.add_option(
- '--ignore-packages',
- metavar='ignore_packages',
- dest='ignore_packages',
- default=None,
- help='list of comma separated keys for package to ignore')
-
-
-
-def adjust_options(options, args):
- pass
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
+patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
-def after_install(options, home_dir):
- home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
- base_dir = os.path.dirname(home_dir)
- src_dir = join(home_dir, 'src')
- tmp_dir = join(home_dir, 'tmp')
- ensure_dir(src_dir)
- ensure_dir(tmp_dir)
- system_str = platform.system()
-
- res_source_key = options.type_install
-
- ignore_packages = []
-
- if options.ignore_packages :
- ignore_packages = options.ignore_packages.split(",")
-
- logger.indent += 2
- try:
- NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
- ('DISTRIBUTE', 'pip', None, None),
- ('MERCURIAL', 'pip', None, None),
- ]
-
-
- for key, method, option_str, extra_env in NORMAL_INSTALL:
- if key not in ignore_packages:
- normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-
- logger.notify("Clear source dir")
- shutil.rmtree(src_dir)
-
- finally:
- logger.indent -= 2
- script_dir = join(base_dir, bin_dir)
- logger.notify('Run "%s Package" to install new packages that provide builds'
- % join(script_dir, 'easy_install'))
-
+EXTRA_TEXT = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
- logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
- if method == 'pip':
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- if option_str :
- args.insert(4,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
- else:
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
- if option_str :
- args.insert(1,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
-
-
-def ensure_dir(dir):
- if not os.path.exists(dir):
- logger.notify('Creating directory %s' % dir)
- os.makedirs(dir)
-
-def filter_python_develop(line):
- if not line.strip():
- return Logger.DEBUG
- for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
- 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
- 'creating ', 'Copying ']:
- if line.startswith(prefix):
- return Logger.DEBUG
- return Logger.NOTIFY
-"""
def main():
python_version = ".".join(map(str,sys.version_info[0:2]))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/setup/res/res_create_env.py Tue Mar 29 13:57:28 2011 +0200
@@ -0,0 +1,15 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+INSTALLS = [#(key,method, option_str, dict_extra_env)
+ ('DISTRIBUTE', 'pip', None, None),
+ ('MERCURIAL', 'pip', None, None),
+ ('SETUPTOOLS-HG', 'pip', None, None),
+]
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):
+ return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
Binary file virtualenv/setup/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/setup/res/src/mercurial-1.7.5.tar.gz has changed
--- a/virtualenv/web/create_python_env.py Tue Mar 22 12:35:29 2011 +0100
+++ b/virtualenv/web/create_python_env.py Tue Mar 29 13:57:28 2011 +0200
@@ -8,7 +8,8 @@
- distribute
- psycopg2 requires the PostgreSQL libpq libraries and the pg_config utility
-- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local <path_to_venv>
+- python project-boot.py --distribute --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local --ignore-packages=MYSQL <path_to_venv>
+- python project-boot.py --no-site-packages --clear --ignore-packages=MYSQL --type-install=local <path_to_venv>
- For Linux :
python project-boot.py --unzip-setuptools --no-site-packages --index-url=http://pypi.websushi.org/ --clear --type-install=local <path_to_venv>
@@ -33,293 +34,23 @@
# - 4Suite-xml - easy_install ftp://ftp.4suite.org/pub/4Suite/4Suite-XML-1.0.2.tar.bz2
# - pylucene - script
-src_base = os.path.join(here,"res","src").replace("\\","/")
-lib_path = os.path.abspath(os.path.join(here,"res","lib")).replace("\\","/")
+src_base = os.path.abspath(os.path.join(here,"..","res","src")).replace("\\","/")
+lib_path = os.path.abspath(os.path.join(here,"..","res","lib")).replace("\\","/")
patch_path = os.path.abspath(os.path.join(here,"res","patch")).replace("\\","/")
-EXTRA_TEXT = "URLS = { \n"
-EXTRA_TEXT += " 'DISTRIBUTE' : { 'setup': 'distribute', 'url': 'http://pypi.python.org/packages/source/d/distribute/distribute-0.6.14.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"distribute-0.6.14.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PSYCOPG2' : { 'setup': 'psycopg2','url': 'D:/wakimd/dev/hg/platform/virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PSYCOPG2' : { 'setup': 'psycopg2','url': 'http://initd.org/psycopg/tarballs/PSYCOPG-2-3/psycopg2-2.3.2.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"psycopg2-2.3.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'MYSQL' : { 'setup': 'mysql-python', 'url': 'http://sourceforge.net/projects/mysql-python/files/mysql-python/1.2.3/MySQL-python-1.2.3.tar.gz/download', 'local' : '"+ os.path.abspath(os.path.join(src_base,"MySQL-python-1.2.3.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PYLUCENE' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/lucene-3.0.2-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lucene-3.0.2-py2.6-win32.egg")).replace("\\","/")+"'},\n"
- EXTRA_TEXT += " 'JCC' : { 'setup': 'http://pylucene-win32-binary.googlecode.com/files/JCC-2.6-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"JCC-2.6-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PYLUCENE' : { 'setup': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'url': 'http://apache.crihan.fr/dist/lucene/pylucene/pylucene-3.0.3-1-src.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"pylucene-3.0.3-1-src.tar.gz")).replace("\\","/")+"'},\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/media/downloads/PIL-1.1.7.win32-py2.6.exe', 'local': '"+ os.path.abspath(os.path.join(src_base,"PIL-1.1.7.win32-py2.6.exe")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'PIL' : { 'setup': 'pil', 'url': 'http://effbot.org/downloads/Imaging-1.1.7.tar.gz', 'local': '"+ os.path.abspath(os.path.join(src_base,"Imaging-1.1.7.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'DJANGO' : { 'setup': 'django', 'url': 'http://www.djangoproject.com/download/1.2.4/tarball/', 'local': '"+ os.path.abspath(os.path.join(src_base,"Django-1.2.4.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'JOGGING' : { 'setup': 'jogging', 'url': 'http://github.com/zain/jogging/tarball/v0.2.2', 'local': '"+ os.path.abspath(os.path.join(src_base,"jogging-0.2.2.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'DJANGO-EXTENSIONS' : { 'setup': 'django-extensions', 'url':'https://github.com/django-extensions/django-extensions/tarball/0.6', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-extensions-0.6.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-REGISTRATION' : { 'setup': 'django-registration', 'url':'http://bitbucket.org/ubernostrum/django-registration/get/tip.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-registration.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-TAGGING' : { 'setup': 'django-tagging', 'url':'http://django-tagging.googlecode.com/files/django-tagging-0.3.1.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-tagging-0.3.1.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-PISTON' : { 'setup': 'django-piston', 'url':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-piston-0.2.2-modified.tar.gz")).replace("\\","/")+"' },\n"
-if sys.platform == 'win32':
- EXTRA_TEXT += " 'LXML' : { 'setup': 'lxml', 'url': 'http://pypi.python.org/packages/2.6/l/lxml/lxml-2.2.8-py2.6-win32.egg', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8-py2.6-win32.egg")).replace("\\","/")+"'},\n"
-else:
- EXTRA_TEXT += " 'LXML' : { 'setup': 'lxml', 'url': '"+ os.path.abspath(os.path.join(src_base,"lxml_2.2.8.tar.gz"))+"', 'local': '"+ os.path.abspath(os.path.join(src_base,"lxml-2.2.8.tar.gz")).replace("\\","/")+"'},\n"
-EXTRA_TEXT += " 'SETUPTOOLS-HG' : { 'setup': 'setuptools-hg', 'url':'http://bitbucket.org/jezdez/setuptools_hg/downloads/setuptools_hg-0.2.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"setuptools_hg-0.2.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'OAUTH2' : { 'setup': 'python-oauth2', 'url':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"', 'local':'"+ os.path.abspath(os.path.join(src_base,"python-oauth2-1.2.1-modified.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'HTTPLIB2' : { 'setup': 'python-oauth2', 'url':'http://httplib2.googlecode.com/files/httplib2-0.6.0.tar.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"httplib2-0.6.0.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += " 'DJANGO-OAUTH-PLUS' : { 'setup': 'django-oauth-plus', 'url':'http://bitbucket.org/david/django-oauth-plus/get/f314f018e473.gz', 'local':'"+ os.path.abspath(os.path.join(src_base,"django-oauth-plus.tar.gz")).replace("\\","/")+"' },\n"
-EXTRA_TEXT += "}\n"
-
-EXTRA_TEXT += "import sys\n"
-EXTRA_TEXT += "sys.path.append('"+lib_path+"')\n"
-
-EXTRA_TEXT += """
-
-import shutil
-import tarfile
-import zipfile
-import urllib
-import platform
-import patch
-
-
-INDEX_URL = 'http://pypi.python.org/simple/'
-
-
-def extend_parser(parser):
- parser.add_option(
- '--index-url',
- metavar='INDEX_URL',
- dest='index_url',
- default='',
- help='base URL of Python Package Index')
- parser.add_option(
- '--type-install',
- metavar='type_install',
- dest='type_install',
- default='local',
- help='type install : local, url, setup')
- parser.add_option(
- '--ignore-packages',
- metavar='ignore_packages',
- dest='ignore_packages',
- default=None,
- help='list of comma separated keys for package to ignore')
+EXTRA_TEXT = "import sys\n"
+EXTRA_TEXT += "sys.path.append('%s')\n" % (lib_path)
+EXTRA_TEXT += "sys.path.append('%s')\n" % (os.path.abspath(os.path.join(here,"res")).replace("\\","/"))
+EXTRA_TEXT += "from res_create_env import generate_install_methods\n"
+EXTRA_TEXT += "adjust_options, extend_parser, after_install = generate_install_methods(path_locations, '%s', Logger, call_subprocess)\n" % (src_base)
-def adjust_options(options, args):
- pass
-
-
-def after_install(options, home_dir):
- home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
- base_dir = os.path.dirname(home_dir)
- src_dir = join(home_dir, 'src')
- tmp_dir = join(home_dir, 'tmp')
- ensure_dir(src_dir)
- ensure_dir(tmp_dir)
- system_str = platform.system()
-
- res_source_key = options.type_install
-
- ignore_packages = []
-
- if options.ignore_packages :
- ignore_packages = options.ignore_packages.split(",")
-
- logger.indent += 2
- try:
-
- if 'PYLUCENE' not in ignore_packages and system_str != "Windows":
- #get pylucene
- logger.notify("Get Pylucene from %s " % URLS['PYLUCENE'][res_source_key])
- pylucene_src = os.path.join(src_dir,"pylucene.tar.gz")
- if res_source_key == 'local':
- shutil.copy(URLS['PYLUCENE'][res_source_key], pylucene_src)
- else:
- urllib.urlretrieve(URLS['PYLUCENE'][res_source_key], pylucene_src)
- tf = tarfile.open(pylucene_src,'r:gz')
- pylucene_base_path = os.path.join(src_dir,"pylucene")
- logger.notify("Extract Pylucene to %s " % pylucene_base_path)
- tf.extractall(pylucene_base_path)
- tf.close()
-
- pylucene_src_path = os.path.join(pylucene_base_path, os.listdir(pylucene_base_path)[0])
- jcc_src_path = os.path.abspath(os.path.join(pylucene_src_path,"jcc"))
-
- #install jcc
-
- #patch for linux
- if system_str == 'Linux' :
- olddir = os.getcwd()
- patch_dest_path = os.path.join(lib_dir,'site-packages','setuptools-0.6c11-py'+'%s.%s' % (sys.version_info[0], sys.version_info[1])+'.egg')
- if os.path.isfile(patch_dest_path):
- # must unzip egg
- # rename file and etract all
- shutil.move(patch_dest_path, patch_dest_path + ".zip")
- zf = zipfile.ZipFile(patch_dest_path + ".zip",'r')
- zf.extractall(patch_dest_path)
- os.remove(patch_dest_path + ".zip")
- logger.notify("Patch jcc : %s " % (patch_dest_path))
- os.chdir(patch_dest_path)
- p = patch.fromfile(os.path.join(jcc_src_path,"jcc","patches","patch.43.0.6c11"))
- p.apply()
- os.chdir(olddir)
-
- logger.notify("Install jcc")
- call_subprocess([os.path.abspath(os.path.join(home_dir, 'bin', 'python')), 'setup.py', 'install'],
- cwd=jcc_src_path,
- filter_stdout=filter_python_develop,
- show_stdout=True)
- #install pylucene
-
- logger.notify("Install pylucene")
- #modify makefile
- makefile_path = os.path.join(pylucene_src_path,"Makefile")
- logger.notify("Modify makefile %s " % makefile_path)
- shutil.move( makefile_path, makefile_path+"~" )
-
- destination= open( makefile_path, "w" )
- source= open( makefile_path+"~", "r" )
- destination.write("PREFIX_PYTHON="+os.path.abspath(home_dir)+"\\n")
- destination.write("ANT=ant\\n")
- destination.write("PYTHON=$(PREFIX_PYTHON)/bin/python\\n")
-
- if system_str == "Darwin":
- if sys.version_info >= (2,6):
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
- else:
- destination.write("JCC=$(PYTHON) -m jcc --shared --arch x86_64 --arch i386\\n")
- destination.write("NUM_FILES=2\\n")
- elif system_str == "Windows":
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared --arch x86_64 --arch i386\\n")
- destination.write("NUM_FILES=2\\n")
- else:
- if sys.version_info >= (2,6) and sys.version_info < (2,7):
- destination.write("JCC=$(PYTHON) -m jcc.__main__ --shared\\n")
- else:
- destination.write("JCC=$(PYTHON) -m jcc --shared\\n")
- destination.write("NUM_FILES=2\\n")
- for line in source:
- destination.write( line )
- source.close()
- destination.close()
- os.remove(makefile_path+"~" )
-
- logger.notify("pylucene make")
- call_subprocess(['make'],
- cwd=os.path.abspath(pylucene_src_path),
- filter_stdout=filter_python_develop,
- show_stdout=True)
-
- logger.notify("pylucene make install")
- call_subprocess(['make', 'install'],
- cwd=os.path.abspath(pylucene_src_path),
- filter_stdout=filter_python_develop,
- show_stdout=True)
-
- if system_str == 'Linux' and 'DISTRIBUTE' not in ignore_packages:
- normal_install('DISTRIBUTE', 'pip', None, None, res_source_key, home_dir, tmp_dir)
-
- NORMAL_INSTALL = [ #(key,method, option_str, extra_env)
- ('MYSQL', 'pip', None, None),
- ('PIL', 'easy_install', None, None),
- ('DJANGO','pip', None, None),
- ('JOGGING','pip', None, None),
- ('DJANGO-EXTENSIONS', 'pip', None, None),
- ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
- ('DJANGO-TAGGING', 'pip', None, None),
- ('DJANGO-PISTON', 'pip', None, None),
- ('SETUPTOOLS-HG', 'pip', None, None),
- ('HTTPLIB2', 'pip', None, None),
- ('OAUTH2', 'pip', None, None),
- ('DJANGO-OAUTH-PLUS', 'pip', None, None),
- ]
-
- if 'PYLUCENE' not in ignore_packages and system_str == "Windows":
- NORMAL_INSTALL.append(('JCC','easy_install',None,None))
- NORMAL_INSTALL.append(('PYLUCENE','easy_install',None,None))
-
- if system_str == "Darwin":
- NORMAL_INSTALL.append(('LXML', 'easy_install', None, {'STATIC_DEPS': 'true'}))
- else:
- NORMAL_INSTALL.append(('LXML', 'easy_install', None, None))
-
- if system_str == "Windows":
- #get psycopg2
- psycopg2_src = os.path.join(src_dir,"psycopg2.zip")
- shutil.copy(URLS['PSYCOPG2'][res_source_key], psycopg2_src)
- #extract psycopg2
- zf = zipfile.ZipFile(psycopg2_src)
- psycopg2_base_path = os.path.join(src_dir,"psycopg2")
- zf.extractall(psycopg2_base_path)
- zf.close()
-
- psycopg2_src_path = os.path.join(psycopg2_base_path, os.listdir(psycopg2_base_path)[0])
- shutil.copytree(os.path.join(psycopg2_src_path, 'psycopg2'), os.path.abspath(os.path.join(home_dir, 'Lib', 'psycopg2')))
- shutil.copy(os.path.join(psycopg2_src_path, 'psycopg2-2.0.10-py2.6.egg-info'), os.path.abspath(os.path.join(home_dir, 'Lib', 'site-packages')))
- else:
- NORMAL_INSTALL.append(('PSYCOPG2', 'pip', None, None))
-
-
- for key, method, option_str, extra_env in NORMAL_INSTALL:
- if key not in ignore_packages:
- normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir)
-
- logger.notify("Clear source dir")
- shutil.rmtree(src_dir)
-
- finally:
- logger.indent -= 2
- script_dir = join(base_dir, bin_dir)
- logger.notify('Run "%s Package" to install new packages that provide builds'
- % join(script_dir, 'easy_install'))
-
-
-def normal_install(key, method, option_str, extra_env, res_source_key, home_dir, tmp_dir):
- logger.notify("Install %s from %s with %s" % (key,URLS[key][res_source_key],method))
- if method == 'pip':
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'pip')), 'install', '-E', os.path.abspath(home_dir), URLS[key][res_source_key]]
- if option_str :
- args.insert(4,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
- else:
- if sys.platform == 'win32':
- args = [os.path.abspath(os.path.join(home_dir, 'Scripts', 'easy_install')), URLS[key][res_source_key]]
- else:
- args = [os.path.abspath(os.path.join(home_dir, 'bin', 'easy_install')), URLS[key][res_source_key]]
- if option_str :
- args.insert(1,option_str)
- call_subprocess(args,
- cwd=os.path.abspath(tmp_dir),
- filter_stdout=filter_python_develop,
- show_stdout=True,
- extra_env=extra_env)
-
-
-def ensure_dir(dir):
- if not os.path.exists(dir):
- logger.notify('Creating directory %s' % dir)
- os.makedirs(dir)
-
-def filter_python_develop(line):
- if not line.strip():
- return Logger.DEBUG
- for prefix in ['Searching for', 'Reading ', 'Best match: ', 'Processing ',
- 'Moving ', 'Adding ', 'running ', 'writing ', 'Creating ',
- 'creating ', 'Copying ']:
- if line.startswith(prefix):
- return Logger.DEBUG
- return Logger.NOTIFY
-"""
+#f = open(os.path.join(os.path. os.path.join(os.path.dirname(os.path.abspath(__file__)),"res"),'res_create_env.py'), 'r')
+#EXTRA_TEXT += f.read()
+#EXTRA_TEXT += "\n"
+#EXTRA_TEXT += "RES_ENV = ResourcesEnv('%s')\n" % (src_base)
def main():
python_version = ".".join(map(str,sys.version_info[0:2]))
--- a/virtualenv/web/res/lib/patch.py Tue Mar 22 12:35:29 2011 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,589 +0,0 @@
-""" Patch utility to apply unified diffs
-
- Brute-force line-by-line non-recursive parsing
-
- Copyright (c) 2008-2010 anatoly techtonik
- Available under the terms of MIT license
-
- Project home: http://code.google.com/p/python-patch/
-
-
- $Id: patch.py 76 2010-04-08 19:10:21Z techtonik $
- $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
-"""
-
-__author__ = "techtonik.rainforce.org"
-__version__ = "10.04"
-
-import copy
-import logging
-import re
-# cStringIO doesn't support unicode in 2.5
-from StringIO import StringIO
-from logging import debug, info, warning
-
-from os.path import exists, isfile, abspath
-from os import unlink
-
-
-#------------------------------------------------
-# Logging is controlled by "python_patch" logger
-
-debugmode = False
-
-logger = logging.getLogger("python_patch")
-loghandler = logging.StreamHandler()
-logger.addHandler(loghandler)
-
-debug = logger.debug
-info = logger.info
-warning = logger.warning
-
-#: disable library logging by default
-logger.setLevel(logging.CRITICAL)
-
-#------------------------------------------------
-
-
-def fromfile(filename):
- """ Parse patch file and return Patch() object
- """
-
- info("reading patch from file %s" % filename)
- fp = open(filename, "rb")
- patch = Patch(fp)
- fp.close()
- return patch
-
-
-def fromstring(s):
- """ Parse text string and return Patch() object
- """
-
- return Patch(
- StringIO.StringIO(s)
- )
-
-
-
-class HunkInfo(object):
- """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
-
- def __init__(self):
- self.startsrc=None #: line count starts with 1
- self.linessrc=None
- self.starttgt=None
- self.linestgt=None
- self.invalid=False
- self.text=[]
-
- def copy(self):
- return copy.copy(self)
-
-# def apply(self, estream):
-# """ write hunk data into enumerable stream
-# return strings one by one until hunk is
-# over
-#
-# enumerable stream are tuples (lineno, line)
-# where lineno starts with 0
-# """
-# pass
-
-
-
-class Patch(object):
-
- def __init__(self, stream=None):
-
- # define Patch data members
- # table with a row for every source file
-
- #: list of source filenames
- self.source=None
- self.target=None
- #: list of lists of hunks
- self.hunks=None
- #: file endings statistics for every hunk
- self.hunkends=None
-
- if stream:
- self.parse(stream)
-
- def copy(self):
- return copy.copy(self)
-
- def parse(self, stream):
- """ parse unified diff """
- self.source = []
- self.target = []
- self.hunks = []
- self.hunkends = []
-
- # define possible file regions that will direct the parser flow
- header = False # comments before the patch body
- filenames = False # lines starting with --- and +++
-
- hunkhead = False # @@ -R +R @@ sequence
- hunkbody = False #
- hunkskip = False # skipping invalid hunk mode
-
- header = True
- lineends = dict(lf=0, crlf=0, cr=0)
- nextfileno = 0
- nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
-
- # hunkinfo holds parsed values, hunkactual - calculated
- hunkinfo = HunkInfo()
- hunkactual = dict(linessrc=None, linestgt=None)
-
- fe = enumerate(stream)
- for lineno, line in fe:
-
- # analyze state
- if header and line.startswith("--- "):
- header = False
- # switch to filenames state
- filenames = True
- #: skip hunkskip and hunkbody code until you read definition of hunkhead
- if hunkbody:
- # process line first
- if re.match(r"^[- \+\\]", line):
- # gather stats about line endings
- if line.endswith("\r\n"):
- self.hunkends[nextfileno-1]["crlf"] += 1
- elif line.endswith("\n"):
- self.hunkends[nextfileno-1]["lf"] += 1
- elif line.endswith("\r"):
- self.hunkends[nextfileno-1]["cr"] += 1
-
- if line.startswith("-"):
- hunkactual["linessrc"] += 1
- elif line.startswith("+"):
- hunkactual["linestgt"] += 1
- elif not line.startswith("\\"):
- hunkactual["linessrc"] += 1
- hunkactual["linestgt"] += 1
- hunkinfo.text.append(line)
- # todo: handle \ No newline cases
- else:
- warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
- # add hunk status node
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
-
- # check exit conditions
- if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
- warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
- # add hunk status node
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
- elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
- self.hunks[nextfileno-1].append(hunkinfo.copy())
- # switch to hunkskip state
- hunkbody = False
- hunkskip = True
-
- # detect mixed window/unix line ends
- ends = self.hunkends[nextfileno-1]
- if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
- warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
- if debugmode:
- debuglines = dict(ends)
- debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
- debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
-
- if hunkskip:
- match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
- if match:
- # switch to hunkhead state
- hunkskip = False
- hunkhead = True
- elif line.startswith("--- "):
- # switch to filenames state
- hunkskip = False
- filenames = True
- if debugmode and len(self.source) > 0:
- debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
- if filenames:
- if line.startswith("--- "):
- if nextfileno in self.source:
- warning("skipping invalid patch for %s" % self.source[nextfileno])
- del self.source[nextfileno]
- # double source filename line is encountered
- # attempt to restart from this second line
- re_filename = "^--- ([^\t]+)"
- match = re.match(re_filename, line)
- # todo: support spaces in filenames
- if match:
- self.source.append(match.group(1).strip())
- else:
- warning("skipping invalid filename at line %d" % lineno)
- # switch back to header state
- filenames = False
- header = True
- elif not line.startswith("+++ "):
- if nextfileno in self.source:
- warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
- del self.source[nextfileno]
- else:
- # this should be unreachable
- warning("skipping invalid target patch")
- filenames = False
- header = True
- else:
- if nextfileno in self.target:
- warning("skipping invalid patch - double target at line %d" % lineno)
- del self.source[nextfileno]
- del self.target[nextfileno]
- nextfileno -= 1
- # double target filename line is encountered
- # switch back to header state
- filenames = False
- header = True
- else:
- re_filename = "^\+\+\+ ([^\t]+)"
- match = re.match(re_filename, line)
- if not match:
- warning("skipping invalid patch - no target filename at line %d" % lineno)
- # switch back to header state
- filenames = False
- header = True
- else:
- self.target.append(match.group(1).strip())
- nextfileno += 1
- # switch to hunkhead state
- filenames = False
- hunkhead = True
- nexthunkno = 0
- self.hunks.append([])
- self.hunkends.append(lineends.copy())
- continue
-
- if hunkhead:
- match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
- if not match:
- if nextfileno-1 not in self.hunks:
- warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
- # switch to header state
- hunkhead = False
- header = True
- continue
- else:
- # switch to header state
- hunkhead = False
- header = True
- else:
- hunkinfo.startsrc = int(match.group(1))
- hunkinfo.linessrc = 1
- if match.group(3): hunkinfo.linessrc = int(match.group(3))
- hunkinfo.starttgt = int(match.group(4))
- hunkinfo.linestgt = 1
- if match.group(6): hunkinfo.linestgt = int(match.group(6))
- hunkinfo.invalid = False
- hunkinfo.text = []
-
- hunkactual["linessrc"] = hunkactual["linestgt"] = 0
-
- # switch to hunkbody state
- hunkhead = False
- hunkbody = True
- nexthunkno += 1
- continue
- else:
- if not hunkskip:
- warning("patch file incomplete - %s" % filename)
- # sys.exit(?)
- else:
- # duplicated message when an eof is reached
- if debugmode and len(self.source) > 0:
- debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
-
- info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
-
-
- def apply(self):
- """ apply parsed patch """
-
- total = len(self.source)
- for fileno, filename in enumerate(self.source):
-
- f2patch = filename
- if not exists(f2patch):
- f2patch = self.target[fileno]
- if not exists(f2patch):
- warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
- continue
- if not isfile(f2patch):
- warning("not a file - %s" % f2patch)
- continue
- filename = f2patch
-
- info("processing %d/%d:\t %s" % (fileno+1, total, filename))
-
- # validate before patching
- f2fp = open(filename)
- hunkno = 0
- hunk = self.hunks[fileno][hunkno]
- hunkfind = []
- hunkreplace = []
- validhunks = 0
- canpatch = False
- for lineno, line in enumerate(f2fp):
- if lineno+1 < hunk.startsrc:
- continue
- elif lineno+1 == hunk.startsrc:
- hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
- hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
- #pprint(hunkreplace)
- hunklineno = 0
-
- # todo \ No newline at end of file
-
- # check hunks in source file
- if lineno+1 < hunk.startsrc+len(hunkfind)-1:
- if line.rstrip("\r\n") == hunkfind[hunklineno]:
- hunklineno+=1
- else:
- debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
- # file may be already patched, but we will check other hunks anyway
- hunkno += 1
- if hunkno < len(self.hunks[fileno]):
- hunk = self.hunks[fileno][hunkno]
- continue
- else:
- break
-
- # check if processed line is the last line
- if lineno+1 == hunk.startsrc+len(hunkfind)-1:
- debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
- hunkno+=1
- validhunks+=1
- if hunkno < len(self.hunks[fileno]):
- hunk = self.hunks[fileno][hunkno]
- else:
- if validhunks == len(self.hunks[fileno]):
- # patch file
- canpatch = True
- break
- else:
- if hunkno < len(self.hunks[fileno]):
- warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
-
- f2fp.close()
-
- if validhunks < len(self.hunks[fileno]):
- if self._match_file_hunks(filename, self.hunks[fileno]):
- warning("already patched %s" % filename)
- else:
- warning("source file is different - %s" % filename)
- if canpatch:
- backupname = filename+".orig"
- if exists(backupname):
- warning("can't backup original file to %s - aborting" % backupname)
- else:
- import shutil
- shutil.move(filename, backupname)
- if self.write_hunks(backupname, filename, self.hunks[fileno]):
- warning("successfully patched %s" % filename)
- unlink(backupname)
- else:
- warning("error patching file %s" % filename)
- shutil.copy(filename, filename+".invalid")
- warning("invalid version is saved to %s" % filename+".invalid")
- # todo: proper rejects
- shutil.move(backupname, filename)
-
- # todo: check for premature eof
-
-
- def can_patch(self, filename):
- """ Check if specified filename can be patched. Returns None if file can
- not be found among source filenames. False if patch can not be applied
- clearly. True otherwise.
-
- :returns: True, False or None
- """
- idx = self._get_file_idx(filename, source=True)
- if idx == None:
- return None
- return self._match_file_hunks(filename, self.hunks[idx])
-
-
- def _match_file_hunks(self, filepath, hunks):
- matched = True
- fp = open(abspath(filepath))
-
- class NoMatch(Exception):
- pass
-
- lineno = 1
- line = fp.readline()
- hno = None
- try:
- for hno, h in enumerate(hunks):
- # skip to first line of the hunk
- while lineno < h.starttgt:
- if not len(line): # eof
- debug("check failed - premature eof before hunk: %d" % (hno+1))
- raise NoMatch
- line = fp.readline()
- lineno += 1
- for hline in h.text:
- if hline.startswith("-"):
- continue
- if not len(line):
- debug("check failed - premature eof on hunk: %d" % (hno+1))
- # todo: \ No newline at the end of file
- raise NoMatch
- if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
- debug("file is not patched - failed hunk: %d" % (hno+1))
- raise NoMatch
- line = fp.readline()
- lineno += 1
-
- except NoMatch:
- matched = False
- # todo: display failed hunk, i.e. expected/found
-
- fp.close()
- return matched
-
-
- def patch_stream(self, instream, hunks):
- """ Generator that yields stream patched with hunks iterable
-
- Converts lineends in hunk lines to the best suitable format
- autodetected from input
- """
-
- # todo: At the moment substituted lineends may not be the same
- # at the start and at the end of patching. Also issue a
- # warning/throw about mixed lineends (is it really needed?)
-
- hunks = iter(hunks)
-
- srclineno = 1
-
- lineends = {'\n':0, '\r\n':0, '\r':0}
- def get_line():
- """
- local utility function - return line from source stream
- collecting line end statistics on the way
- """
- line = instream.readline()
- # 'U' mode works only with text files
- if line.endswith("\r\n"):
- lineends["\r\n"] += 1
- elif line.endswith("\n"):
- lineends["\n"] += 1
- elif line.endswith("\r"):
- lineends["\r"] += 1
- return line
-
- for hno, h in enumerate(hunks):
- debug("hunk %d" % (hno+1))
- # skip to line just before hunk starts
- while srclineno < h.startsrc:
- yield get_line()
- srclineno += 1
-
- for hline in h.text:
- # todo: check \ No newline at the end of file
- if hline.startswith("-") or hline.startswith("\\"):
- get_line()
- srclineno += 1
- continue
- else:
- if not hline.startswith("+"):
- get_line()
- srclineno += 1
- line2write = hline[1:]
- # detect if line ends are consistent in source file
- if sum([bool(lineends[x]) for x in lineends]) == 1:
- newline = [x for x in lineends if lineends[x] != 0][0]
- yield line2write.rstrip("\r\n")+newline
- else: # newlines are mixed
- yield line2write
-
- for line in instream:
- yield line
-
-
- def write_hunks(self, srcname, tgtname, hunks):
- src = open(srcname, "rb")
- tgt = open(tgtname, "wb")
-
- debug("processing target file %s" % tgtname)
-
- tgt.writelines(self.patch_stream(src, hunks))
-
- tgt.close()
- src.close()
- return True
-
-
- def _get_file_idx(self, filename, source=None):
- """ Detect index of given filename within patch.
-
- :param filename:
- :param source: search filename among sources (True),
- targets (False), or both (None)
- :returns: int or None
- """
- filename = abspath(filename)
- if source == True or source == None:
- for i,fnm in enumerate(self.source):
- if filename == abspath(fnm):
- return i
- if source == False or source == None:
- for i,fnm in enumerate(self.target):
- if filename == abspath(fnm):
- return i
-
-
-
-
-from optparse import OptionParser
-from os.path import exists
-import sys
-
-if __name__ == "__main__":
- opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
- opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
- (options, args) = opt.parse_args()
-
- if not args:
- opt.print_version()
- opt.print_help()
- sys.exit()
- debugmode = options.debugmode
- patchfile = args[0]
- if not exists(patchfile) or not isfile(patchfile):
- sys.exit("patch file does not exist - %s" % patchfile)
-
-
- if debugmode:
- loglevel = logging.DEBUG
- logformat = "%(levelname)8s %(message)s"
- else:
- loglevel = logging.INFO
- logformat = "%(message)s"
- logger.setLevel(loglevel)
- loghandler.setFormatter(logging.Formatter(logformat))
-
-
-
- patch = fromfile(patchfile)
- #pprint(patch)
- patch.apply()
-
- # todo: document and test line ends handling logic - patch.py detects proper line-endings
- # for inserted hunks and issues a warning if patched file has incosistent line ends
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/virtualenv/web/res/res_create_env.py Tue Mar 29 13:57:28 2011 +0200
@@ -0,0 +1,54 @@
+import platform
+
+from lib_create_env import lib_generate_install_methods, install_pylucene, install_psycopg2
+
+system_str = platform.system()
+
+
+if system_str == 'Linux':
+ INSTALLS = [
+ ('DISTRIBUTE', 'pip', None, None),
+ ]
+else:
+ INSTALLS = []
+
+
+INSTALLS.extend([ #(key,method, option_str, dict_extra_env)
+ ('SETUPTOOLS-HG', 'pip', None, None),
+ ('MYSQL', 'pip', None, None),
+ ('PIL', 'easy_install', None, None),
+ ('DJANGO','pip', None, None),
+ ('JOGGING','pip', None, None),
+ ('DJANGO-EXTENSIONS', 'pip', None, None),
+ ('DJANGO-REGISTRATION', 'easy_install', '-Z', None),
+ ('DJANGO-TAGGING', 'pip', None, None),
+ ('DJANGO-PISTON', 'pip', None, None),
+ ('HTTPLIB2', 'pip', None, None),
+ ('OAUTH2', 'easy_install', None, None),
+ ('DJANGO-OAUTH-PLUS', 'pip', None, None),
+])
+
+if system_str == 'Windows':
+ INSTALLS.extend([
+ ('JCC','easy_install',None,None),
+ ('PYLUCENE','easy_install',None,None),
+ ('PSYCOPG2',install_psycopg2,None,None),
+ ])
+else:
+ INSTALLS.extend([
+ ('PYLUCENE',install_pylucene,None,None),
+ ('PSYCOPG2', 'pip', None, None),
+ ])
+
+if system_str == "Darwin":
+ INSTALLS.extend([
+ ('LXML', 'pip', None, {'STATIC_DEPS': 'true', 'LIBXML2_VERSION': '2.7.8', 'LIBXSLT_VERSION': '1.1.26'}),
+ ])
+else:
+ INSTALLS.extend([
+ ('LXML', 'pip', None, None),
+ ])
+
+
+def generate_install_methods(path_locations, src_base, Logger, call_subprocess):
+ return lib_generate_install_methods(path_locations, src_base, Logger, call_subprocess, INSTALLS)
Binary file virtualenv/web/res/src/Django-1.2.4.tar.gz has changed
Binary file virtualenv/web/res/src/Imaging-1.1.7.tar.gz has changed
Binary file virtualenv/web/res/src/JCC-2.6-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/MySQL-python-1.2.3.tar.gz has changed
Binary file virtualenv/web/res/src/PIL-1.1.7.win32-py2.6.exe has changed
Binary file virtualenv/web/res/src/distribute-0.6.14.tar.gz has changed
Binary file virtualenv/web/res/src/django-extensions-0.6.tar.gz has changed
Binary file virtualenv/web/res/src/django-oauth-plus.tar.gz has changed
Binary file virtualenv/web/res/src/django-piston-0.2.2-modified.tar.gz has changed
Binary file virtualenv/web/res/src/django-registration.tar.gz has changed
Binary file virtualenv/web/res/src/django-tagging-0.3.1.tar.gz has changed
Binary file virtualenv/web/res/src/httplib2-0.6.0.tar.gz has changed
Binary file virtualenv/web/res/src/jogging-0.2.2.tar.gz has changed
Binary file virtualenv/web/res/src/lucene-3.0.2-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/lxml-2.2.8-py2.6-win32.egg has changed
Binary file virtualenv/web/res/src/lxml-2.2.8.tar.gz has changed
Binary file virtualenv/web/res/src/psycopg2-2.0.10.win32-py2.6-pg8.3.7-release.zip has changed
Binary file virtualenv/web/res/src/psycopg2-2.3.2.tar.gz has changed
Binary file virtualenv/web/res/src/pylucene-3.0.3-1-src.tar.gz has changed
Binary file virtualenv/web/res/src/python-oauth2-1.2.1-modified.tar.gz has changed
Binary file virtualenv/web/res/src/setuptools_hg-0.2.tar.gz has changed