end of admin. change the index page and add a redirect to it on the landing page
#!/usr/bin/env python
# coding=utf-8
from lxml import etree
import argparse
import json
import datetime
import requests
import os.path
import re
import sys
import time
import uuid #@UnresolvedImport
from dateutil.parser import parse as parse_date_raw
from dateutil.tz import tzutc
import bisect
import logging
#class TweetExclude(object):
# def __init__(self, id):
# self.id = id
#
# def __repr__(self):
# return "<TweetExclude(id=%d)>" % (self.id)
LDT_CONTENT_REST_API_PATH = "api/ldt/1.0/contents/"
LDT_PROJECT_REST_API_PATH = "api/ldt/1.0/projects/"
DEFAULT_ANNOTATION_CHANNEL = 'ANNOT'
def parse_date(datestr):
res = parse_date_raw(datestr)
if res.tzinfo is None:
res = res.replace(tzinfo=tzutc())
return res
def get_logger():
return logging.getLogger(__name__)
def get_filter(start_date, end_date, events, channels, user_whitelist):
res = []
#TODO: check timezone...
if start_date:
res.append({'name': 'ts', 'op': ">=", 'val':start_date.isoformat() })
if end_date:
res.append({'name': 'ts', 'op': "<=", 'val':end_date.isoformat() })
if events:
res.append({'name': 'event', 'op': "in", 'val':events })
if channels:
res.append({'name': 'channel', 'op': "in", 'val':channels })
if user_whitelist:
res.append({'name': 'user', 'op': "in", 'val':user_whitelist })
return res
def set_logging(options, plogger=None, queue=None):
logging_config = {
"format" : '%(asctime)s %(levelname)s:%(name)s:%(message)s',
"level" : max(logging.NOTSET, min(logging.CRITICAL, logging.WARNING - 10 * options.verbose + 10 * options.quiet)), #@UndefinedVariable
}
if options.logfile == "stdout":
logging_config["stream"] = sys.stdout
elif options.logfile == "stderr":
logging_config["stream"] = sys.stderr
else:
logging_config["filename"] = options.logfile
logger = plogger
if logger is None:
logger = get_logger() #@UndefinedVariable
if len(logger.handlers) == 0:
filename = logging_config.get("filename")
if queue is not None:
hdlr = QueueHandler(queue, True)
elif filename:
mode = logging_config.get("filemode", 'a')
hdlr = logging.FileHandler(filename, mode) #@UndefinedVariable
else:
stream = logging_config.get("stream")
hdlr = logging.StreamHandler(stream) #@UndefinedVariable
fs = logging_config.get("format", logging.BASIC_FORMAT) #@UndefinedVariable
dfs = logging_config.get("datefmt", None)
fmt = logging.Formatter(fs, dfs) #@UndefinedVariable
hdlr.setFormatter(fmt)
logger.addHandler(hdlr)
level = logging_config.get("level")
if level is not None:
logger.setLevel(level)
options.debug = (options.verbose-options.quiet > 0)
return logger
def set_logging_options(parser):
parser.add_argument("-l", "--log", dest="logfile",
help="log to file", metavar="LOG", default="stderr")
parser.add_argument("-v", dest="verbose", action="count",
help="verbose", default=0)
parser.add_argument("-q", dest="quiet", action="count",
help="quiet", default=0)
def get_options():
parser = argparse.ArgumentParser(description="All date should be given using iso8601 format. If no timezone is used, the date is considered as UTC")
parser.add_argument("-f", "--file", dest="filename",
help="write export to file", metavar="FILE", default="project.ldt")
parser.add_argument("-a", "--annot-url", dest="annot_url",
help="annotation server url", metavar="ANNOT-URL", required=True)
parser.add_argument("-s", "--start-date", dest="start_date",
help="start date", metavar="START_DATE", default=None)
parser.add_argument("-e", "--end-date", dest="end_date",
help="end date", metavar="END_DATE", default=None)
parser.add_argument("-I", "--content-file", dest="content_file",
help="Content file", metavar="CONTENT_FILE")
parser.add_argument("-c", "--content", dest="content",
help="Content url", metavar="CONTENT")
parser.add_argument("-V", "--video-url", dest="video",
help="video url", metavar="VIDEO")
parser.add_argument("-i", "--content-id", dest="content_id",
help="Content id", metavar="CONTENT_ID")
parser.add_argument("-x", "--exclude", dest="exclude",
help="file containing the id to exclude", metavar="EXCLUDE")
parser.add_argument("-C", "--color", dest="color",
help="Color code", metavar="COLOR", default="16763904")
parser.add_argument("-H", "--channel", dest="channels",
help="Channel", metavar="CHANNEL", default=[DEFAULT_ANNOTATION_CHANNEL], action="append")
parser.add_argument("-E", "--event", dest="events",
help="Event", metavar="EVENT", default=[], action="append")
parser.add_argument("-D", "--duration", dest="duration", type=int,
help="Duration", metavar="DURATION", default=None)
parser.add_argument("-n", "--name", dest="name",
help="Cutting name", metavar="NAME", default=u"annotations")
parser.add_argument("-R", "--replace", dest="replace", action="store_true",
help="Replace annotation ensemble", default=False)
parser.add_argument("-m", "--merge", dest="merge", action="store_true",
help="merge annotation ensemble, choose the first ensemble", default=False)
parser.add_argument("-L", "--list-conf", dest="listconf",
help="list of file to process", metavar="LIST_CONF", default=None)
parser.add_argument("-b", "--base-url", dest="base_url",
help="base URL of the platform", metavar="BASE_URL", default="http://ldt.iri.centrepompidou.fr/ldtplatform/")
parser.add_argument("-p", "--project", dest="project_id",
help="Project id", metavar="PROJECT_ID", default=None)
parser.add_argument("-P", "--post-param", dest="post_param",
help="Post param", metavar="POST_PARAM", default=None)
parser.add_argument("-B", "--batch-size", dest="batch_size", type=int,
help="Batch size for annotation request", metavar="BATCH_SIZE", default=500)
parser.add_argument("--user-whitelist", dest="user_whitelist", action="store",
help="A list of user screen name", metavar="USER_WHITELIST",default=None)
parser.add_argument("--cut", dest="cuts", action="append",
help="A cut with the forma <ts in ms>::<duration>", metavar="CUT", default=[])
set_logging_options(parser)
return (parser.parse_args(), parser)
def find_delta(deltas, ts):
i = bisect.bisect_right(deltas, (ts+1,0))
if i:
return deltas[i-1]
return (0,0)
def parse_duration(s):
try:
return int(s)
except ValueError:
parts = s.split(":")
if len(parts) < 2:
raise ValueError("Bad duration format")
time_params = {
'hours': int(parts[0]),
'minutes': int(parts[1]),
'seconds': int(parts[2]) if len(parts)>2 else 0
}
return int(round(datetime.timedelta(**time_params).total_seconds()*1000))
def build_annotation_iterator(url, params, headers):
page = 0
page_nb = 1
while page < page_nb:
page += 1
params['page'] = page
resp = requests.get(url, params=params, headers=headers)
if resp.status_code != requests.codes.ok:
return
resp_json = resp.json()
page_nb = resp_json.get('total_pages', 1)
for item in resp_json.get('objects', []):
#TODO: add progress log
yield item
if __name__ == "__main__" :
(options, parser) = get_options()
set_logging(options)
get_logger().debug("OPTIONS : " + repr(options)) #@UndefinedVariable
deltas = [(0,0)]
total_delta = 0
if options.cuts:
cuts_raw = sorted([tuple([parse_duration(s) for s in c.split("::")]) for c in options.cuts])
for c, d in cuts_raw:
deltas.append((c+total_delta, -1))
total_delta += d
deltas.append((c+total_delta, total_delta))
if len(sys.argv) == 1 or options.annot_url is None:
parser.print_help()
sys.exit(1)
user_whitelist_file = options.user_whitelist
user_whitelist = None
annotation_url = options.annot_url
if options.listconf:
parameters = []
confdoc = etree.parse(options.listconf)
for node in confdoc.xpath("/annotation_export/file"):
params = {}
for snode in node:
if snode.tag == "path":
params['content_file'] = snode.text
params['content_file_write'] = snode.text
elif snode.tag == "project_id":
params['content_file'] = options.base_url + LDT_PROJECT_REST_API_PATH + snode.text + "/?format=json"
params['content_file_write'] = options.base_url + LDT_PROJECT_REST_API_PATH + snode.text + "/?format=json"
params['project_id'] = snode.text
elif snode.tag == "start_date":
params['start_date'] = snode.text
elif snode.tag == "end_date":
params['end_date'] = snode.text
elif snode.tag == "duration":
params['duration'] = int(snode.text)
elif snode.tag == "events":
params['events'] = [snode.text]
elif snode.tag == "channels":
params['channels'] = [snode.text]
if options.events or 'events' not in params :
params['events'] = options.events
if options.channels or 'channels' not in params :
params['channels'] = options.channels
parameters.append(params)
else:
if options.project_id:
content_file = options.base_url + LDT_PROJECT_REST_API_PATH + options.project_id + "/?format=json"
else:
content_file = options.content_file
parameters = [{
'start_date' : options.start_date,
'end_date' : options.end_date,
'duration' : options.duration,
'events' : options.events,
'channels' : options.channels,
'content_file' : content_file,
'content_file_write' : content_file,
'project_id' : options.project_id
}]
post_param = {}
if options.post_param:
post_param = json.loads(options.post_param)
for params in parameters:
get_logger().debug("PARAMETERS " + repr(params)) #@UndefinedVariable
start_date_str = params.get("start_date",None)
end_date_str = params.get("end_date", None)
duration = params.get("duration", None)
content_file = params.get("content_file", None)
content_file_write = params.get("content_file_write", None)
channels = list(set(params.get('channels', [DEFAULT_ANNOTATION_CHANNEL])))
events = list(set(params.get('events', [])))
if user_whitelist_file:
with open(user_whitelist_file, 'r+') as f:
user_whitelist = list(set([s.strip() for s in f]))
start_date = None
if start_date_str:
start_date= parse_date(start_date_str)
root = None
ensemble_parent = None
#to do : analyse situation ldt or iri ? filename set or not ?
if content_file and content_file.find("http") == 0:
get_logger().debug("url : " + content_file) #@UndefinedVariable
r = requests.get(content_file, params=post_param)
get_logger().debug("url response " + repr(r) + " content " + repr(r.text)) #@UndefinedVariable
project = r.json()
text_match = re.match(r"\<\?\s*xml.*?\?\>(.*)", project['ldt'], re.I|re.S)
root = etree.fromstring(text_match.group(1) if text_match else project['ldt'])
elif content_file and os.path.exists(content_file):
doc = etree.parse(content_file)
root = doc.getroot()
content_id = None
if root is None:
root = etree.Element(u"iri")
project = etree.SubElement(root, u"project", {u"abstract":u"Annotations",u"title":u"Annotations", u"user":u"IRI Web", u"id":unicode(uuid.uuid4())})
medias = etree.SubElement(root, u"medias")
media = etree.SubElement(medias, u"media", {u"pict":u"", u"src":unicode(options.content), u"video":unicode(options.video), u"id":unicode(options.content_id), u"extra":u""})
annotations = etree.SubElement(root, u"annotations")
content = etree.SubElement(annotations, u"content", {u"id":unicode(options.content_id)})
ensemble_parent = content
content_id = options.content_id
if ensemble_parent is None:
file_type = None
for node in root:
if node.tag == "project":
file_type = "ldt"
break
elif node.tag == "head":
file_type = "iri"
break
if file_type == "ldt":
media_nodes = root.xpath("//media")
if len(media_nodes) > 0:
media = media_nodes[0]
annotations_node = root.find(u"annotations")
if annotations_node is None:
annotations_node = etree.SubElement(root, u"annotations")
content_node = annotations_node.find(u"content")
if content_node is None:
content_node = etree.SubElement(annotations_node,u"content", id=media.get(u"id"))
ensemble_parent = content_node
content_id = content_node.get(u"id")
display_nodes = root.xpath("//displays/display/content[@id='%s']" % content_id)
if len(display_nodes) == 0:
get_logger().info("No display node found. Will not update display")
display_content_node = None
else:
display_content_node = display_nodes[0]
elif file_type == "iri":
body_node = root.find(u"body")
if body_node is None:
body_node = etree.SubElement(root, u"body")
ensembles_node = body_node.find(u"ensembles")
if ensembles_node is None:
ensembles_node = etree.SubElement(body_node, u"ensembles")
ensemble_parent = ensembles_node
content_id = root.xpath("head/meta[@name='id']/@content")[0]
display_content_node = None
if ensemble_parent is None:
get_logger().error("Can not process file") #@UndefinedVariable
sys.exit()
if options.replace:
for ens in ensemble_parent.iterchildren(tag=u"ensemble"):
ens_id = ens.get("id","")
if ens_id.startswith("annot_"):
ensemble_parent.remove(ens)
# remove in display nodes
if display_content_node is not None:
for cut_display in display_content_node.iterchildren():
if cut_display.get('idens','') == ens_id:
display_content_node.remove(cut_display)
ensemble = None
elements = None
if options.merge:
for ens in ensemble_parent.findall(u"ensemble"):
if ens.get('id',"").startswith("annot_"):
ensemble = ens
break
if ensemble is not None:
elements = ensemble.find(u".//elements")
decoupage = ensemble.find(u"decoupage")
if ensemble is None or elements is None:
ensemble = etree.SubElement(ensemble_parent, u"ensemble", {u"id":u"annot_" + unicode(uuid.uuid4()), u"title":u"Ensemble Annotation", u"author":u"IRI Web", u"abstract":u"Ensemble Annotation"})
decoupage = etree.SubElement(ensemble, u"decoupage", {u"id": unicode(uuid.uuid4()), u"author": u"IRI Web"})
etree.SubElement(decoupage, u"title").text = unicode(options.name)
etree.SubElement(decoupage, u"abstract").text = unicode(options.name)
elements = etree.SubElement(decoupage, u"elements")
ensemble_id = ensemble.get('id', '')
decoupage_id = decoupage.get('id', '') if decoupage is not None else None
end_date = None
if end_date_str:
end_date = parse_date(end_date_str)
elif start_date and duration:
end_date = start_date + datetime.timedelta(seconds=duration)
elif start_date and options.base_url:
# get duration from api
content_url = options.base_url + LDT_CONTENT_REST_API_PATH + content_id + "/?format=json"
get_logger().debug("get duration " + content_url) #@UndefinedVariable
r = requests.get(content_url, params=post_param)
get_logger().debug("get duration resp " + repr(r)) #@UndefinedVariable
duration = int(r.json()['duration'])
get_logger().debug("get duration " + repr(duration)) #@UndefinedVariable
end_date = start_date + datetime.timedelta(seconds=int(duration/1000))
if end_date and deltas:
end_date = end_date + datetime.timedelta(milliseconds=deltas[-1][1])
filters = get_filter(start_date, end_date, events, channels, user_whitelist)
headers = {'Content-Type': 'application/json'}
params = { 'q':json.dumps({'filters':filters}), 'results_per_page': options.batch_size}
for annot in build_annotation_iterator(annotation_url, params, headers):
annot_ts = parse_date(annot['ts'])
if start_date is None:
star_date = annot_ts
annot_ts_rel = annot_ts-start_date
annot_ts_rel_milli = int(round(annot_ts_rel.total_seconds()*1000))
if deltas:
d = find_delta(deltas, annot_ts_rel_milli)
if d[1] < 0:
continue
else :
annot_ts_rel_milli -= d[1]
annot_content = annot.get('content',{'category':'', 'user':None})
username = annot_content.get('user', 'anon.') or 'anon.'
category = annot_content.get('category', None)
if category is None:
continue
element = etree.SubElement(elements, u"element" , {u"id":annot.get('uuid', uuid.uuid4()), u"color":unicode(options.color), u"author":unicode(username), u"date":unicode(annot_ts.strftime("%Y/%m/%d")), u"begin": unicode(annot_ts_rel_milli), u"dur":u"0"})
etree.SubElement(element, u"title").text = unicode(username) + u": " + unicode(category.get('label', category.get('code', '')))
etree.SubElement(element, u"abstract").text = unicode(category.get('label', category.get('code', '')))
tags_node = etree.SubElement(element, u"tags")
etree.SubElement(tags_node,u"tag").text = category.get('code', '')
meta_element = etree.SubElement(element, u'meta')
polemics_element = etree.Element(u'polemics')
etree.SubElement(polemics_element, u'polemic').text = category.get('code', '')
meta_element.append(polemics_element)
etree.SubElement(meta_element, u"source", attrib={"url":annotation_url + "/" + annot['uuid'], "mimetype":u"application/json"}).text = etree.CDATA(json.dumps(annot))
# sort by tc in
if options.merge :
# remove all elements and put them in a array
# sort them with tc
#put them back
elements[:] = sorted(elements,key=lambda n: int(n.get('begin')))
#add to display node
if display_content_node is not None:
display_dec = None
for dec in display_content_node.iterchildren(tag=u"decoupage"):
if dec.get('idens','') == ensemble_id and dec.get('id', '') == decoupage_id:
display_dec = dec
break
if display_dec is None and ensemble_id and decoupage_id:
etree.SubElement(display_content_node, u"decoupage", attrib={'idens': ensemble_id, 'id': decoupage_id, 'tagsSelect':''})
output_data = etree.tostring(root, encoding="utf-8", method="xml", pretty_print=False, xml_declaration=True)
if content_file_write and content_file_write.find("http") == 0:
project["ldt"] = output_data
project['owner'] = project['owner'].replace('%7E','~')
project['contents'] = [c_url.replace('%7E','~') for c_url in project['contents']]
post_param = {}
if options.post_param:
post_param = json.loads(options.post_param)
get_logger().debug("write http " + content_file_write) #@UndefinedVariable
get_logger().debug("write http " + repr(post_param)) #@UndefinedVariable
get_logger().debug("write http " + repr(project)) #@UndefinedVariable
r = requests.put(content_file_write, data=json.dumps(project), headers={'content-type':'application/json'}, params=post_param);
get_logger().debug("write http " + repr(r) + " content " + r.text) #@UndefinedVariable
if r.status_code != requests.codes.ok: # @UndefinedVariable
r.raise_for_status()
else:
if content_file_write and os.path.exists(content_file_write):
dest_file_name = content_file_write
else:
dest_file_name = options.filename
get_logger().debug("WRITE : " + dest_file_name) #@UndefinedVariable
output = open(dest_file_name, "w")
output.write(output_data)
output.flush()
output.close()