upgrade distribution script and define version 0.1.30
authorymh <ymh.work@gmail.com>
Fri, 22 Jun 2018 17:34:15 +0200
changeset 12 8895d41be7e2
parent 11 93228a694ce7
child 13 56608f42821f
upgrade distribution script and define version
.hgignore
sbin/sync/core.py
sbin/sync/fabfile.py
sbin/sync/requirements.txt
src/iconolab_mcc/__init__.py
--- a/.hgignore	Fri Jun 22 11:34:42 2018 +0200
+++ b/.hgignore	Fri Jun 22 17:34:15 2018 +0200
@@ -46,3 +46,7 @@
 
 sbin/sync/.vagrant
 sbin/sync/Vagrantfile
+
+^sbin/sync/.envrc
+^sbin/sync/.vscode
+^sbin/sync/fabric.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbin/sync/core.py	Fri Jun 22 17:34:15 2018 +0200
@@ -0,0 +1,281 @@
+# -*- coding: utf-8 -*-
+'''
+Created on Feb 20, 2013
+
+@author: ymh
+'''
+# from fabric.api import run, local, env, cd, put, prefix, sudo, lcd
+# from fabric.colors import green
+# from fabric.context_managers import settings
+# from fabric.contrib.files import exists, upload_template
+# from fabric.contrib.project import rsync_project
+# from fabric.tasks import Task
+from fabric import Connection
+import imp
+import os.path
+import re
+import shutil
+import sys
+import urllib.parse
+import requirements
+
+
+# __all__ = ["check_folder_access", "migrate", "collectstatic", "do_relaunch_server",
+#            "export_version", "do_sync_web", "create_config", "clean_export_folder",
+#            "sync_install_build", "do_create_virtualenv", "clean_rsync_folder", "rsync_export",
+#            "do_sync_comp", "get_comp_versions_dict", "SyncComp", "get_src_version", "sync_build",
+#            "install_build", "do_create_virtualenv_requirement", "build_src"]
+
+def get_export_path(env, version):
+    base_path = os.path.join(env.base_export_path,env.export_prefix).rstrip("/")
+    return os.path.expanduser(base_path) + "_%s" % (str(version))
+
+def clean_export_folder(path):
+    print("Removing %s" % path)
+    if os.path.isdir(path):
+        shutil.rmtree(path, ignore_errors=True)
+
+def do_export_version(c, path, **export_keys):
+    print("Export version %s : %s" % (path,repr(export_keys)))
+
+    env = c.env
+
+    for export_key, version in export_keys.items():
+        export_path = os.path.join(path,export_key)
+
+        repo_url = env.repos[export_key]['repo']
+        url_part = urllib.parse.urlparse(repo_url)
+        if url_part.scheme or url_part.netloc:
+            # this is a remote repo. Let's clone first
+            clone_path = os.path.join(path,'clone',export_keys[export_key])
+            os.makedirs(clone_path)
+
+            output = c.run('git ls-remote \"%s\"' % repo_url, warn=True)
+            print("OUTPUT %r" % output)
+            scm = "hg" if output.failed else "git"
+            if scm == "hg":
+                output = c.run("hg clone \"%s\" \"%s\"" % (repo_url,clone_path))
+            else:
+                c.run("git clone \"%s\" \"%s\"" % (repo_url,clone_path))
+        else:
+            clone_path = repo_url
+
+        with c.cd(clone_path):
+            # detetct .git or .hg subfolder
+            if os.path.exists(os.path.join(clone_path,".git")):
+                os.makedirs(export_path)
+                cmd_str = "git archive \'%s\' | tar -x -C \"%s\""
+            else:
+                cmd_str = "hg archive -r \'%s\' \"%s\""
+            c.run(cmd_str % (str(version),export_path))
+
+    print("Export version %s done"%repr(export_keys))
+
+def launch_setup_command(c, command_array, path):
+    f = None
+    sys.path.append(path)
+    current_path = os.getcwd()
+    try:
+        os.chdir(path)
+        try:
+            f, pathname, description = imp.find_module("setup", [path])
+            print("launch_setup_command at %s : found setup" % path)
+            setup_mod = imp.load_module("setup", f, pathname, description)
+            print("launch_setup_command at %s : setup loaded" % path)
+        except:
+            e = sys.exc_info()[0]
+            print("Error launching commands %s : %s" % (path, str(e)))
+            raise
+        finally:
+            if f:
+                f.close()
+
+        return setup_mod.launch_setup("setup.py", command_array)
+    finally:
+        os.chdir(current_path)
+
+
+def get_src_dependencies(c, pkg_name, path):
+    print("Get source dependencies at %s" % path)
+    launch_setup_command(c, ['egg_info'], path)
+    egg_requirement_file = os.path.join(path, "%s.egg-info" % pkg_name, "requires.txt")
+    res = []
+    with open(egg_requirement_file) as f:
+        for req in requirements.parse(f):
+            if req.name in c.env['repos']:
+                r_version = req.specs[0][1] if req.specs else 'tip'
+                res.append((req.name, r_version))
+    print("Build source dist at %s done : %r" % (path, res))
+    return res
+
+
+def get_remote_env(c, remotepath, remotevirtualenvpath, application_module, settings_key, settings_module=''):
+    if not settings_module:
+        settings_module = '%s.%s' % (application_module, 'settings')
+    activate_path = os.path.join(remotevirtualenvpath, "bin/activate")
+
+    env = c.env
+    with Connection(env['hosts'][0]) as rconnection:
+        with rconnection.prefix("echo $SHELL && . \"%s\"" % os.path.join(remotevirtualenvpath, "bin/activate")), rconnection.prefix("export PYTHONPATH=\"%s\"" % remotepath):
+            return rconnection.run("DJANGO_SETTINGS_MODULE=%s python -c 'import django.conf;print(django.conf.settings.%s)'" % (settings_module, settings_key)).stdout
+
+
+# def rsync_export(path, remotepath, filters):
+#     print("Rsync %s to %s",(path,remotepath))
+
+#     filter_option_str = "--progress --stats"
+#     if filters:
+#         filter_option_str += " " + " ".join(["--filter \"%s\"" % (f) for f in filters])
+
+#     run("mkdir -p \"%s\"" % remotepath)
+#     rsync_project(remotepath, local_dir=path, extra_opts=filter_option_str, delete=True)
+#     print("Rsync %s to %s done",(path,remotepath))
+
+# def clean_rsync_folder(remotepath):
+#     print("clean rsync folder %s" % remotepath)
+#     run("rm -fr \"%s\"" % remotepath)
+
+def build_src(c, path):
+    print("Build source dist at %s" % path)
+    launch_setup_command(c, ['sdist'], path)
+    print("Build source dist at %s done" % path)
+
+
+def get_src_version(c, key, path):
+
+    print("get src version for %s at %s" % (key,path))
+
+    env = c.env
+
+    mod_name = env.repos[key].get('module', key) or key
+
+    f = None
+    sys.path.append(path)
+    current_path = os.getcwd()
+    os.chdir(path)
+    try:
+        f, pathname, description = imp.find_module(mod_name, [path])
+        src_mod = imp.load_module(mod_name, f, pathname, description)
+    except:
+        src_mod = None
+        print("Could not import module, trying to parse")
+    finally:
+        os.chdir(current_path)
+        if f:
+            f.close()
+    version = None
+    if src_mod is None:
+        with open(os.path.join(path,mod_name,"__init__.py"),'r') as init_file:
+            for line in init_file:
+                m = re.search('VERSION\s+=\s+\((.+)\)', line, re.I)
+                if m:
+                    version = tuple([re.sub('[\s\"\']','', item) for item in m.group(1).split(',')])
+                    break
+    elif hasattr(src_mod, "VERSION"):
+        version = src_mod.VERSION
+    elif hasattr(src_mod, "__version__"):
+        version = src_mod.__version__
+
+    print("VERSION : %s" % repr(version))
+
+    if version is None:
+        version = ""
+
+    if not isinstance(version, str):
+        if src_mod and hasattr(src_mod, "get_version"):
+            version_str = src_mod.get_version()
+        elif isinstance(version, tuple):
+            #convert num
+            version_str = get_version([int(s) if s.isdigit() else s for s in version])
+        else:
+            version_str = str(version)
+    else:
+        version_str = version
+
+    print("VERSION str : %s" % repr(version_str))
+    return (version, version_str)
+
+
+def sync_build(c, path):
+    print("Sync build %s" % path)
+    env = c.env
+    with Connection(env['hosts'][0]) as host_connection:
+        with host_connection.cd(env.remote_path['build_export']):
+            filename = os.path.basename(path)
+            res_trans = host_connection.put(path, os.path.join(env.remote_path['build_export'], filename))
+            print("Sync build %s to %s" % (path,res_trans.remote))
+            return res_trans
+
+
+def collectstatic(c, remotepath, remotevirtualenvpath, platform_web_module, module_settings="", admin_cmd="python manage.py"):
+    print("Collect static in %s with %s" % (remotepath, remotevirtualenvpath))
+    remotestaticsitepath = get_remote_env(c, remotepath, remotevirtualenvpath, platform_web_module, "STATIC_ROOT", c.env.settings)
+    activate_path = os.path.join(remotevirtualenvpath, "bin/activate")
+    with Connection(c.env['hosts'][0]) as rconnection:
+        with rconnection.prefix("source \"%s\"" % activate_path), rconnection.prefix("export PYTHONPATH=\"%s\"" % remotepath), rconnection.cd(remotepath):
+            #remove old files optio -c of collect static fail !
+            rconnection.run("rm -fr \"%s\"/*" % (remotestaticsitepath))
+            rconnection.run("%s collectstatic --noinput %s" % (admin_cmd, "--settings="+module_settings if module_settings else ""))
+
+
+def migrate(c, remotepath, remotevirtualenvpath, module_settings="", admin_cmd="python manage.py"):
+    activate_path = os.path.join(remotevirtualenvpath, "bin/activate")
+    with Connection(c.env['hosts'][0]) as rconnection:
+        with rconnection.prefix("source \"%s\"" % activate_path), rconnection.prefix("export PYTHONPATH=\"%s\"" % remotepath), rconnection.cd(remotepath):
+            rconnection.run("%s migrate --noinput %s" % (admin_cmd, "--settings="+module_settings if module_settings else ""))
+
+
+def export_version(c, **kwargs):
+    print("export version %s" % (repr(kwargs)))
+
+    export_path = kwargs.get('path', None)
+
+    if not export_path:
+        export_path = get_export_path(c.env, "_".join(["%s_%s" % (k,v) for k,v in kwargs.items()]))
+
+    clean_export_folder(export_path)
+
+    do_export_version(c, export_path,**kwargs)
+
+    return export_path
+
+def do_create_virtualenv(c, remote_venv_export_path, remotevirtualenvpath):
+    print("Create virtualenv export_path : %s - remote venvpath : %s" % (remote_venv_export_path, remotevirtualenvpath))
+    env = c.env
+    activate_path = os.path.join(remotevirtualenvpath, "bin/activate")
+    if env.get('remote_baseline_venv'):
+        prefix_str = "source \"%s\"" % os.path.join(env.get('remote_baseline_venv'), "bin/activate")
+    else:
+        prefix_str = "echo"
+    with Connection(env['hosts'][0]) as rconnection:
+        rconnection.run("rm -fr \"%s\"" % remotevirtualenvpath, warn=True)
+        run("mkdir -p \"%s\"" % remotevirtualenvpath)
+        with rconnection.prefix(prefix_str), rconnection.cd(os.path.join(remote_venv_export_path,"virtualenv","web")):
+            rconnection.run("python create_python_env.py")
+            rconnection.run("python project-boot.py \"%s\"" % remotevirtualenvpath)
+        with rconnection.prefix("source \"%s\"" % activate_path):
+            rconnection.run("pip install --no-cache-dir -r \"%s\"" % os.path.join(remote_venv_export_path,"virtualenv","web","res","srvr_requirements.txt"))
+
+def do_create_virtualenv_requirement(c, remote_venv_requirement_path, remotevirtualenvpath, python_version = "2"):
+    print("Create virtualenv export_path : %s - remote venvpath : %s" % (remote_venv_requirement_path, remotevirtualenvpath))
+    env = c.env
+    with Connection(env['hosts'][0]) as rconnection:
+        rconnection.run("rm -fr \"%s\"" % remotevirtualenvpath, warn=True)
+        rconnection.run("mkdir -p \"%s\"" % remotevirtualenvpath)
+        # rconnection.run("virtualenv -p `which python%s` %s" % (python_version, remotevirtualenvpath))
+        rconnection.run("python%s -m venv %s" % (python_version, remotevirtualenvpath))
+        with rconnection.prefix("echo $SHELL && . \"%s\"" % os.path.join(remotevirtualenvpath, "bin/activate")):
+            rconnection.run("pip install -r \"%s\"" % remote_venv_requirement_path)
+
+
+def do_relaunch_server(c, do_collectstatic, do_migrate):
+    env = c.env
+
+    if do_migrate:
+        migrate(c, env.remote_path['src'], env.remote_path['virtualenv'], env.get('settings', ''), env.get('admin_cmd', 'python manage.py'))
+    if do_collectstatic:
+        collectstatic(c, env.remote_path['src'], env.remote_path['virtualenv'], env.platform_web_module, env.get('settings', ''), env.get('admin_cmd', 'python manage.py'))
+
+    with Connection(env['hosts'][0]) as rconnection:
+        rconnection.sudo(env.web_relaunch_cmd, shell=False)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbin/sync/fabfile.py	Fri Jun 22 17:34:15 2018 +0200
@@ -0,0 +1,90 @@
+import imp
+import os.path
+import io
+
+# import config  # @UnusedImport
+# from fablib import (export_version, do_sync_web, create_config,
+#                     clean_export_folder, do_sync_comp, sync_install_build, do_create_virtualenv,
+#                     clean_rsync_folder, rsync_export, get_src_version, sync_build,
+#                     do_relaunch_server, install_build, do_create_virtualenv_requirement, build_src)
+from core import (export_version, build_src, get_src_version, sync_build,
+                  do_create_virtualenv_requirement, get_src_dependencies,
+                  do_relaunch_server, clean_export_folder)
+# from fabric import task, env, run, cd, put
+from fabric import Connection
+from invoke import task
+from blessings import Terminal
+# from fabric.colors import green
+
+# env.use_ssh_config = True
+
+t = Terminal()
+
+def build_source(c, key, version):
+    print(t.green("build source with version %s" % version))
+    export_path = export_version(c, **{ key: version })
+    export_path_full = os.path.join(export_path, key, c.env.repos[key]['src_root'])
+    build_src(c, export_path_full)
+    (_,version_str) = get_src_version(c, key, export_path_full)
+    src_dep = get_src_dependencies(c, key, export_path_full)
+    return (os.path.join(export_path_full,"dist","%s-%s.tar.gz" % (key,version_str)), src_dep)
+
+
+def do_create_virtualenv(c, remote_build_path, dep_remote_build_path_list):
+    env = c.env
+    requirements_path = os.path.join(remote_build_path, env['repos'][env.key]['requirements'])
+    remotevirtualenvpath = env['remote_path']['virtualenv']
+    do_create_virtualenv_requirement(c, requirements_path, remotevirtualenvpath, env['repos'][env.key]['python_version'])
+    # add setting path to virtualenv
+    ext_path = "import sys; sys.__plen = len(sys.path)\n"
+    for l in env['remote_path'].get('pythonpath', []):
+        ext_path += l + "\n"
+    ext_path += "import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)"
+    with Connection(env['hosts'][0]) as rconnection:
+        rconnection.put(io.StringIO(ext_path), os.path.join(env['remote_path']['virtualenv'], 'lib/python%s/site-packages/_virtualenv_path_extensions.pth'%env['repos'][env.key]['python_version']))
+        for dep_remote_build_path in dep_remote_build_path_list:
+            with rconnection.prefix("echo $SHELL && . \"%s\"" % os.path.join(remotevirtualenvpath, "bin/activate")):
+                rconnection.run("pip install \"%s\"" % dep_remote_build_path)
+        with rconnection.prefix("echo $SHELL && . \"%s\"" % os.path.join(remotevirtualenvpath, "bin/activate")):
+            rconnection.run("pip install \"%s.tar.gz\"" % remote_build_path)
+
+
+@task
+def relaunch_server(c, collectstatic=True, migrate=True):
+    print("Relaunch server")
+    do_relaunch_server(c, collectstatic, migrate)
+
+
+@task
+def create_virtualenv(c, version):
+
+    print(t.green("create virtualenv for version ") + version)
+    build_path, source_dep_list = build_source(c, c.env.key, version)
+    print(t.green("BUILD PATH: ") + build_path + " - %r" % source_dep_list)
+
+    source_dep_build_path_list = []
+    print("Build dependencies : %r" %  source_dep_list)
+    for source_dep_key, source_dep_version in source_dep_list:
+        source_dep_build_path, _ = build_source(c, source_dep_key, source_dep_version)
+        source_dep_build_path_list.append(source_dep_build_path)
+
+    host_connection = Connection(c.env['hosts'][0])
+    host_connection.run('mkdir -p "%s"' % c.env['remote_path']['build_export'])
+
+    res_trans = sync_build(c, build_path)
+    res_trans_dep = [ sync_build(c, source_dep_build_path).remote for source_dep_build_path in source_dep_build_path_list]
+    # untar build
+    print("Untar %s on remote host"%res_trans.remote)
+    with host_connection.cd(c.env['remote_path']['build_export']):
+        host_connection.run('tar zxf %s' % res_trans.remote)
+
+    do_create_virtualenv(c, res_trans.remote[0:-7], res_trans_dep)
+
+    host_connection.run('rm -fr "%s/*"' % (c.env['remote_path']['build_export']))
+    clean_export_folder(c.env.remote_path['build_export'])
+
+
+@task
+def publish_version(c, version):
+    create_virtualenv(c, version)
+    relaunch_server(c, True, True)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/sbin/sync/requirements.txt	Fri Jun 22 17:34:15 2018 +0200
@@ -0,0 +1,2 @@
+fabric
+requirements-parser
--- a/src/iconolab_mcc/__init__.py	Fri Jun 22 11:34:42 2018 +0200
+++ b/src/iconolab_mcc/__init__.py	Fri Jun 22 17:34:15 2018 +0200
@@ -1,4 +1,4 @@
-VERSION = (0, 0, 30, "final", 0)
+VERSION = (0, 1, 30, "final", 0)
 
 VERSION_STR = ".".join(map(lambda i:"%02d" % (i,), VERSION[:2]))