--- a/README.md Tue Oct 28 15:31:55 2014 +0100
+++ b/README.md Tue Oct 28 18:11:16 2014 +0100
@@ -1,14 +1,19 @@
# Mons software suite
-To create the virtualenv:
+- To create the virtualenv:
```
STATIC_DEPS=true pip install -r requirements.txt
```
-Usage for export_annotations.py:
+- Usage for export_annotations.py:
```
python export_annotations.py -a http://localhost:8080/p/api/annotation -b http://localhost/~ymh/platform/ldtplatform/ -p <project_guid> -E test -H ANNOT -v -v -s "2014-06-19T12:14:48+02" -R -P "{\"username\": \"<username>\",\"api_key\":\"<username api key>\"}"
```
+
+- Alembic usage
+ - upgrade: `alembic upgrade head`
+ - migrate an existing database : `alembic stamp 3c78152eb874` then `alembic upgrade head`
+ - Create empty database: launch application then `alembic stamp head`
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/alembic.ini Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,59 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts
+script_location = migrations
+
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# max length of characters to apply to the
+# "slug" field
+#truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
--- a/annot-server/models.py Tue Oct 28 15:31:55 2014 +0100
+++ b/annot-server/models.py Tue Oct 28 18:11:16 2014 +0100
@@ -23,15 +23,11 @@
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(UUID, unique=True, nullable=False)
created = Column(DateTime, nullable=False, server_default=text("(now() at time zone 'utc')") )
- ts = Column(DateTime(timezone=True), nullable=False)
- event_code = Column(String(255), ForeignKey('event.code'), nullable=False)
- channel = Column(String(255), nullable=False)
+ ts = Column(DateTime(timezone=True), nullable=False, index=True)
+ event_code = Column(String(255), ForeignKey('event.code'), nullable=False, index=True)
+ channel = Column(String(255), nullable=False, index=True)
content = Column(JSON)
-Index('idx_annotation_event', Annotation.event_code)
-Index('idx_annotation_channel', Annotation.channel)
-Index('idx_annotation_ts', Annotation.ts)
-
def insert_annot_async(params, conn):
@@ -56,32 +52,26 @@
__tablename__ = 'event'
id = Column(Integer, primary_key=True, nullable=False)
- code = Column(String(255), unique=True, nullable=False)
+ code = Column(String(255), unique=True, nullable=False, index=True)
label = Column(String(2048), nullable=False)
description = Column(Text(), nullable=True)
- start_date = Column(DateTime(), nullable=True)
- active = Column(Boolean(), nullable=False, default=True, server_default='1')
+ start_date = Column(DateTime(), nullable=True, index=True)
+ active = Column(Boolean(), nullable=False, default=True, server_default='1', index=True)
sessions = relationship("EventSession", order_by="EventSession.order", backref="event")
def __unicode__(self):
return self.code
-Index('idx_event_code', Event.code)
-Index('idx_event_active', Event.active)
-Index('idx_event_start_date', Event.start_date)
-
class EventSession(Base):
__tablename__ = 'event_session'
id = Column(Integer, primary_key=True, nullable=False)
+ uuid = Column(UUID, unique=True, nullable=False, default=uuid.uuid4)
+ label = Column(String(2048), nullable=True)
event_id = Column(Integer, ForeignKey(Event.id), nullable=False)
project_id = Column(String(2048), nullable=True)
- order = Column(Integer, nullable=False, default=0)
- start_ts = Column(DateTime(timezone=True), nullable=True)
+ order = Column(Integer, nullable=False, default=0, index=True)
+ start_ts = Column(DateTime(timezone=True), nullable=True, index=True)
duration = Column(Integer, nullable=True)
categories_json = Column(JSON, nullable=True)
-
-
-Index('idx_event_session_order', EventSession.order)
-Index('idx_event_session_start_ts', EventSession.start_ts)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/README Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,1 @@
+Generic single-database configuration.
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/env.py Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,78 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+
+import config as app_config
+import models
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = models.Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(url=url, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ alembic_config = config.get_section(config.config_ini_section)
+ alembic_config['sqlalchemy.url'] = app_config.CONN_STR
+
+ engine = engine_from_config(
+ alembic_config,
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ connection = engine.connect()
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ try:
+ with context.begin_transaction():
+ context.run_migrations()
+ finally:
+ connection.close()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/script.py.mako Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,22 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/versions/37eaf74e46b_use_index_shortcuts.py Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,52 @@
+"""use index shortcuts
+
+Revision ID: 37eaf74e46b
+Revises: 3c78152eb874
+Create Date: 2014-10-28 16:41:05.519404
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '37eaf74e46b'
+down_revision = '3c78152eb874'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ op.create_index(op.f('ix_annotation_channel'), 'annotation', ['channel'], unique=False)
+ op.create_index(op.f('ix_annotation_event_code'), 'annotation', ['event_code'], unique=False)
+ op.create_index(op.f('ix_annotation_ts'), 'annotation', ['ts'], unique=False)
+ op.drop_index('idx_annotation_channel', table_name='annotation')
+ op.drop_index('idx_annotation_event', table_name='annotation')
+ op.drop_index('idx_annotation_ts', table_name='annotation')
+ op.create_index(op.f('ix_event_active'), 'event', ['active'], unique=False)
+ op.create_index(op.f('ix_event_code'), 'event', ['code'], unique=True)
+ op.create_index(op.f('ix_event_start_date'), 'event', ['start_date'], unique=False)
+ op.drop_index('idx_event_active', table_name='event')
+ op.drop_index('idx_event_code', table_name='event')
+ op.drop_index('idx_event_start_date', table_name='event')
+ op.create_index(op.f('ix_event_session_order'), 'event_session', ['order'], unique=False)
+ op.create_index(op.f('ix_event_session_start_ts'), 'event_session', ['start_ts'], unique=False)
+ op.drop_index('idx_event_session_order', table_name='event_session')
+ op.drop_index('idx_event_session_start_ts', table_name='event_session')
+
+
+def downgrade():
+ op.create_index('idx_event_session_start_ts', 'event_session', ['start_ts'], unique=False)
+ op.create_index('idx_event_session_order', 'event_session', ['order'], unique=False)
+ op.drop_index(op.f('ix_event_session_start_ts'), table_name='event_session')
+ op.drop_index(op.f('ix_event_session_order'), table_name='event_session')
+ op.create_index('idx_event_start_date', 'event', ['start_date'], unique=False)
+ op.create_index('idx_event_code', 'event', ['code'], unique=False)
+ op.create_index('idx_event_active', 'event', ['active'], unique=False)
+ op.drop_index(op.f('ix_event_start_date'), table_name='event')
+ op.drop_index(op.f('ix_event_code'), table_name='event')
+ op.drop_index(op.f('ix_event_active'), table_name='event')
+ op.create_index('idx_annotation_ts', 'annotation', ['ts'], unique=False)
+ op.create_index('idx_annotation_event', 'annotation', ['event_code'], unique=False)
+ op.create_index('idx_annotation_channel', 'annotation', ['channel'], unique=False)
+ op.drop_index(op.f('ix_annotation_ts'), table_name='annotation')
+ op.drop_index(op.f('ix_annotation_event_code'), table_name='annotation')
+ op.drop_index(op.f('ix_annotation_channel'), table_name='annotation')
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/versions/3aec56269d7e_add_label_and_uuid.py Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,41 @@
+"""add label and uuid
+
+Revision ID: 3aec56269d7e
+Revises: 37eaf74e46b
+Create Date: 2014-10-28 17:17:19.017259
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3aec56269d7e'
+down_revision = '37eaf74e46b'
+
+import uuid
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+def upgrade():
+ op.add_column('event_session', sa.Column('label', sa.String(length=2048), nullable=True))
+ op.add_column('event_session', sa.Column('uuid', postgresql.UUID(), nullable=True))
+ op.create_unique_constraint(None, 'event_session', ['uuid'])
+ event_session = sa.sql.table('event_session',
+ sa.sql.column('id', sa.Integer),
+ sa.sql.column('uuid', postgresql.UUID)
+ )
+ connection = op.get_bind()
+ for r in connection.execute(event_session.select()):
+ op.execute(event_session.update().values({'uuid':str(uuid.uuid4())}).where(event_session.c.id == r['id']))
+ op.alter_column('event_session', 'uuid',
+ existing_type=postgresql.UUID(),
+ nullable=False)
+
+
+
+def downgrade():
+ ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint("event_session_uuid_key", 'event_session')
+ op.drop_column('event_session', 'uuid')
+ op.drop_column('event_session', 'label')
+ ### end Alembic commands ###
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/migrations/versions/3c78152eb874_first_migration.py Tue Oct 28 18:11:16 2014 +0100
@@ -0,0 +1,71 @@
+"""First migration
+
+Revision ID: 3c78152eb874
+Revises: None
+Create Date: 2014-10-28 15:24:25.401385
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3c78152eb874'
+down_revision = None
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+def upgrade():
+ op.create_table('event',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('code', sa.String(length=255), nullable=False),
+ sa.Column('label', sa.String(length=2048), nullable=False),
+ sa.Column('description', sa.Text(), nullable=True),
+ sa.Column('start_date', sa.DateTime(), nullable=True),
+ sa.Column('active', sa.Boolean(), server_default='1', nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_event_active'), 'event', ['active'], unique=False)
+ op.create_index(op.f('ix_event_code'), 'event', ['code'], unique=True)
+ op.create_index(op.f('ix_event_start_date'), 'event', ['start_date'], unique=False)
+ op.create_table('event_session',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('event_id', sa.Integer(), nullable=False),
+ sa.Column('project_id', sa.String(length=2048), nullable=True),
+ sa.Column('order', sa.Integer(), nullable=False),
+ sa.Column('start_ts', sa.DateTime(timezone=True), nullable=True),
+ sa.Column('duration', sa.Integer(), nullable=True),
+ sa.Column('categories_json', postgresql.JSON(), nullable=True),
+ sa.ForeignKeyConstraint(['event_id'], [u'event.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index(op.f('ix_event_session_order'), 'event_session', ['order'], unique=False)
+ op.create_index(op.f('ix_event_session_start_ts'), 'event_session', ['start_ts'], unique=False)
+ op.create_table('annotation',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('uuid', postgresql.UUID(), nullable=False),
+ sa.Column('created', sa.DateTime(), server_default=sa.text("(now() at time zone 'utc')"), nullable=False),
+ sa.Column('ts', sa.DateTime(timezone=True), nullable=False),
+ sa.Column('event_code', sa.String(length=255), nullable=False),
+ sa.Column('channel', sa.String(length=255), nullable=False),
+ sa.Column('content', postgresql.JSON(), nullable=True),
+ sa.ForeignKeyConstraint(['event_code'], ['event.code'], ),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('uuid')
+ )
+ op.create_index(op.f('ix_annotation_channel'), 'annotation', ['channel'], unique=False)
+ op.create_index(op.f('ix_annotation_event_code'), 'annotation', ['event_code'], unique=False)
+ op.create_index(op.f('ix_annotation_ts'), 'annotation', ['ts'], unique=False)
+
+
+def downgrade():
+ op.drop_index(op.f('ix_annotation_ts'), table_name='annotation')
+ op.drop_index(op.f('ix_annotation_event_code'), table_name='annotation')
+ op.drop_index(op.f('ix_annotation_channel'), table_name='annotation')
+ op.drop_table('annotation')
+ op.drop_index(op.f('ix_event_session_start_ts'), table_name='event_session')
+ op.drop_index(op.f('ix_event_session_order'), table_name='event_session')
+ op.drop_table('event_session')
+ op.drop_index(op.f('ix_event_start_date'), table_name='event')
+ op.drop_index(op.f('ix_event_code'), table_name='event')
+ op.drop_index(op.f('ix_event_active'), table_name='event')
+ op.drop_table('event')
--- a/requirements.txt Tue Oct 28 15:31:55 2014 +0100
+++ b/requirements.txt Tue Oct 28 18:11:16 2014 +0100
@@ -2,11 +2,13 @@
Flask-Admin==1.0.8
Flask-Restless==0.14.2
Jinja2==2.7.3
+Mako==1.0.0
MarkupSafe==0.23
SQLAlchemy==0.9.8
Twisted==14.0.2
Werkzeug==0.9.6
WTForms==1.0.5
+alembic==0.6.7
aniso8601==0.83
autobahn==0.9.1
itsdangerous==0.24