--- a/alcatel/controller/DocumentaryFile.py Mon Jul 22 14:56:35 2013 +0200
+++ b/alcatel/controller/DocumentaryFile.py Wed Aug 14 16:36:41 2013 +0200
@@ -5,7 +5,7 @@
'''
import datetime
import logging
-
+import simplejson
from time import mktime
from django.core.cache import cache
@@ -30,28 +30,32 @@
def create(self):
logger.info('create DocumentaryFile')
attr = ClientDocumentaryFileAttributes(self.request)
- logger.info('user = ' + str(attr.get_user()))
+ '''logger.info('user = ' + str(attr.get_user()))
logger.info('query_id = ' + str(attr.get_query_id()))
- logger.info('public = ' + str(attr.get_visibility()))
- query_id = attr.get_query_id()
- print query_id
- key1 = cache.get(query_id)
- if key1:
+ logger.info('public = ' + str(attr.get_visibility()))'''
+
+ #query_id = attr.get_query_id()
+ if attr.get_query_id():
+ key1 = cache.get(attr.get_query_id())
context = cache.get(key1['weblab_data_key'])
- if context == None:
- print "cache empty"
- json = '{"Error": "Invalid query id"}'
- logger.info(json)
- else:
- print "cache not empty"
- list_concepts,concepts_with_detailed_documents_list = context
- logger.info('list_concepts' + str(list_concepts))
- logger.info('concepts_with_detailed_documents_list' + str(concepts_with_detailed_documents_list))
- #parse to get the value to save the documentary file
- json = self.parseAndSaveValue(list_concepts,concepts_with_detailed_documents_list, attr )
+ elif self.request.session['key1']:
+ context = cache.get(self.request.session['key1'])
else:
json = '{"Error": "Invalid query id"}'
logger.info(json)
+
+ if context == None:
+ logger.info("cache empty")
+ json = '{"Error": "cache empty"}'
+ logger.info(json)
+ else:
+ logger.info("cache not empty")
+ list_concepts,concepts_with_detailed_documents_list = context
+ logger.info('list_concepts' + str(list_concepts))
+ logger.info('concepts_with_detailed_documents_list' + str(concepts_with_detailed_documents_list))
+ #parse to get the value to save the documentary file
+ json = self.parseAndSaveValue(list_concepts,concepts_with_detailed_documents_list, attr )
+
return json
def visibilityChange(self):
@@ -94,26 +98,27 @@
return json
- def delete(self):
+ def delete(self,docId):
logger.info('delete DocumentaryFile')
- attr = ClientDocumentaryFileDeleteAttributes(self.request)
- logger.info('get_user = ' + str(attr.get_user()))
+ #attr = ClientDocumentaryFileDeleteAttributes(self.request)
+ '''logger.info('get_user = ' + str(attr.get_user()))
logger.info('get_documentary_file_id = ' + str(attr.get_documentary_file_id()))
- docId = attr.get_documentary_file_id()
+ docId = attr.get_documentary_file_id()'''
if docId == '':
json= '{"Error": "No documentary_file_id attribute in the http post request"}'
logger.info(json)
return json
try:
- documentaryfile = Documentaryfile.objects.get(pk=attr.get_documentary_file_id())
+ documentaryfile = Documentaryfile.objects.get(pk=docId)
except Documentaryfile.DoesNotExist:
json= '{"Error": "Invalid documentary_file_id"}'
logger.info(json)
return json
logger.info('documentaryfile.user.username = ' + str(documentaryfile.user.username))
- if documentaryfile.user.username == attr.get_user():
+ logger.info('self.request.user = ' + str(self.request.user))
+ if str(documentaryfile.user.username) == str(self.request.user):
#verify if the associated documents are associated to another documentaryfile. if not delete the documents
for thecluster in documentaryfile.cluster_set.all():
for thedocument in thecluster.document.all():
@@ -128,19 +133,22 @@
else:
json= '{"Error": "User does not match"}'
logger.info(json)
-
return json
def parseAndSaveValue(self, list_concepts,concepts_with_detailed_documents_list, attr):
#parse the context
-
+ logger.info('query_context ********** ='+str(self.request.session.items()))
try:
- user_attr = User.objects.get(username=attr.get_user)
+ #text = str(((attr.get_user()).replace(' ', '')).replace('\n', ''))
+ text = str(attr.get_user())
+ logger.info('attr.get_user! !!! = '+text)
+ tutu = 'cobled'
+ logger.info('attr.get_user! !!! = '+text)
+ user_attr = User.objects.get(username=tutu)
except User.DoesNotExist, err:
logger.info(' Error: '+ str(err))
json = '{"Error": "User does not existed"}'
return json
-
visibility_attr = attr.get_visibility()
if visibility_attr == 'public':
visibility_bool = True
@@ -151,23 +159,52 @@
description_attr = attr.get_description()
#query_id_attr = attr.get_query_id()
#TODO url image
- image1 = Image(url='url')
- image1.save()
-
+
now = datetime.datetime.now()
mktime(now.timetuple())
- print mktime(now.timetuple())
-
+ logger.info(mktime(now.timetuple()))
+
+ for concept_index, concept_with_detailed_documents_list in enumerate(concepts_with_detailed_documents_list) :
+ logger.info('url image first cluster'+list_concepts[concept_index]['url_image'])
+ image1 = Image(url=list_concepts[concept_index]['url_image'])
+ image1.save()
+ break
+ if attr.get_json_streamgraph():
+ logger.info('attr.get_json_streamgraph ****'+str(attr.get_json_streamgraph()))
+ jsonstreamgraph = attr.get_json_streamgraph()
+ else:
+ logger.info('request_streamgraph ****'+str(self.request.session['jsonStreamgraph']))
+ jsonstreamgraph = self.request.session['jsonStreamgraph']
+
+ if attr.get_json_treemap():
+ logger.info('attr.get_json_streamgraph ****'+str(attr.get_json_treemap()))
+ jsontreemap = attr.get_json_treemap()
+ else:
+ logger.info('request_streamgraph ****'+str(self.request.session['jsonTreemap']))
+ jsontreemap = self.request.session['jsonTreemap']
+
# create the documentary file
- dossierDoc1 = Documentaryfile(title=title_attr, date=now , description=description_attr, visibility=visibility_bool, list_concepts=list_concepts, concepts_with_detailed_documents_list = concepts_with_detailed_documents_list, image=image1, user=user_attr)
+ #dossierDoc1 = Documentaryfile(title=title_attr, date=now , description=description_attr, visibility=visibility_bool, list_concepts=list_concepts, concepts_with_detailed_documents_list = concepts_with_detailed_documents_list, image=image1, user=user_attr)
+ dossierDoc1 = Documentaryfile(title=title_attr, date=now , description=description_attr, visibility=visibility_bool, jsonstreamgraph=jsonstreamgraph, jsontreemap = jsontreemap, image=image1, user=user_attr, list_concepts=list_concepts, concepts_with_detailed_documents_list = concepts_with_detailed_documents_list,)
dossierDoc1.save()
- nb_concept = len(concepts_with_detailed_documents_list)
- logger.info('nb_concept = ' + str(nb_concept))
+
+ data = simplejson.loads(self.request.session['jsonTreemap'])
+ logger.info('DATA BEFORE ***'+str(data))
+ for cluster in data['clusters']:
+ cluster['doc_id'] = int(dossierDoc1.id)
+ cluster['user'] = dossierDoc1.user.username
+ logger.info('DATA AFTER ***'+str(data))
+ jsontreemap = simplejson.dumps(data)
+ dossierDoc1.jsontreemap = jsontreemap
+ dossierDoc1.save()
+ #nb_concept = len(concepts_with_detailed_documents_list)
+
for concept_index, concept_with_detailed_documents_list in enumerate(concepts_with_detailed_documents_list) :
- cluster1 = Cluster.objects.create(title=list_concepts[concept_index]['title'], description=list_concepts[concept_index]['abstract'], weight=list_concepts[concept_index]['score'], documentaryfile=dossierDoc1, image=image1)
+ imagecluster = Image(url=list_concepts[concept_index]['url_image'])
+ imagecluster.save()
+ cluster1 = Cluster.objects.create(title=list_concepts[concept_index]['title'], description=list_concepts[concept_index]['abstract'], weight=list_concepts[concept_index]['score'], documentaryfile=dossierDoc1, image=imagecluster)
+ logger.info('CLUSTERID'+str(cluster1.id))
for detailed_document in concept_with_detailed_documents_list:
- logger.info(detailed_document)
- logger.info('cluster_id and doc_id = ' + str(list_concepts[concept_index]['title']+' and '+detailed_document['id']))
#Verify if the document exist already in database
try:
doc1 = Document.objects.get(pk=detailed_document['id'])