|
288
|
1 |
# -*- coding: utf-8 -*- |
|
|
2 |
''' |
|
|
3 |
Created on Jul 01, 2014 |
|
|
4 |
|
|
|
5 |
@author: tc |
|
|
6 |
''' |
|
289
|
7 |
from datetime import datetime |
|
|
8 |
from django.db.models import Q |
|
288
|
9 |
from django.http.response import HttpResponse |
|
|
10 |
from django.views.generic import View |
|
|
11 |
from django.views.decorators.csrf import csrf_exempt |
|
290
|
12 |
from hdabo.models import Tag |
|
|
13 |
from hdalab.utils import LineNodePlacer |
|
289
|
14 |
from hdalab.views.ajax import filter_generic |
|
288
|
15 |
import json |
|
|
16 |
import uuid |
|
|
17 |
|
|
|
18 |
import logging |
|
|
19 |
logger = logging.getLogger(__name__) |
|
|
20 |
|
|
|
21 |
|
|
|
22 |
class RenkanGetPut(View): |
|
|
23 |
|
|
|
24 |
@csrf_exempt |
|
|
25 |
def dispatch(self, *args, **kwargs): |
|
|
26 |
return super(RenkanGetPut, self).dispatch(*args, **kwargs) |
|
|
27 |
|
|
|
28 |
def get(self, request): |
|
289
|
29 |
#file_path = settings.JSON_TEST_PATH |
|
|
30 |
#content = open(file_path,"r") |
|
|
31 |
|
|
|
32 |
now = datetime.now().strftime("%Y-%m-%d %H:%M") |
|
|
33 |
|
|
|
34 |
content = { |
|
|
35 |
"id": unicode(uuid.uuid1()), |
|
|
36 |
"title": "Renkan généré " + now, |
|
|
37 |
"description": "(empty description)", |
|
|
38 |
"created": now, |
|
|
39 |
"updated": now, |
|
|
40 |
"nodes": [], |
|
|
41 |
"edges": [], |
|
|
42 |
"views": [], |
|
|
43 |
"users": [], |
|
|
44 |
} |
|
|
45 |
|
|
|
46 |
# Get tags and countries |
|
|
47 |
labels = request.GET.get("label", "").split(",") |
|
|
48 |
countries = request.GET.get("country", "").split(",") |
|
|
49 |
# Tags arrive with french label, countries with dbpedia uri |
|
|
50 |
label_list = [t for t in labels if t!=""] |
|
|
51 |
country_list = [c for c in countries if c!=""] |
|
291
|
52 |
all_tags = Tag.objects.filter( Q(label__in=label_list) | Q(dbpedia_uri__in=country_list) ).select_related("dbpedia_fields") |
|
289
|
53 |
|
|
290
|
54 |
# Get datasheets from ajax filter search |
|
|
55 |
filter_output = filter_generic(request.GET.get('lang',request.LANGUAGE_CODE), None, ",".join(label_list), ",".join(country_list)) |
|
|
56 |
filter_output = json.loads(filter_output) |
|
291
|
57 |
#logger.debug("COUCOU") |
|
|
58 |
#logger.debug(json.dumps(filter_output, indent=2)) |
|
|
59 |
#return HttpResponse(json.dumps(filter_output, indent=2), content_type="application/json") |
|
|
60 |
|
|
|
61 |
# Prepare other tags |
|
|
62 |
related_tags = [] |
|
|
63 |
all_labels = [t.label for t in all_tags] |
|
|
64 |
related_tags_dict = {} |
|
|
65 |
for c in filter_output["contents"]: |
|
|
66 |
c["id"] = unicode(uuid.uuid1()) |
|
|
67 |
related_tags_dict[c["id"]] = [] |
|
|
68 |
for t in c["tags"]: |
|
|
69 |
if t["label"] not in all_labels and t["order"]<6: |
|
|
70 |
thumbnail_url = "" |
|
|
71 |
for tt in filter_output["tags"]: |
|
|
72 |
if tt["label"]==t["label"]: |
|
|
73 |
thumbnail_url = tt["thumbnail"] |
|
|
74 |
related_tags.append({"label": t["label"], "thumbnail":thumbnail_url, "id":t["id"]}) |
|
|
75 |
all_labels.append(t["label"]) |
|
|
76 |
related_tags_dict[c["id"]].append(t["id"]) |
|
|
77 |
#return HttpResponse(json.dumps({"t":related_tags_label}, indent=2), content_type="application/json") |
|
290
|
78 |
|
|
|
79 |
# Prepare Node placer : |
|
|
80 |
np = LineNodePlacer() |
|
291
|
81 |
np.init({"tags": (1, len(all_tags)), "datasheet": (2, len(filter_output["contents"])), "related": (3, len(related_tags))}) |
|
290
|
82 |
|
|
289
|
83 |
project_id = unicode(uuid.uuid1()) |
|
288
|
84 |
|
|
290
|
85 |
for t in all_tags: |
|
289
|
86 |
content["nodes"].append({ |
|
|
87 |
"id": unicode(uuid.uuid1()), |
|
|
88 |
"title": t.label, |
|
|
89 |
"description": t.dbpedia_uri, |
|
|
90 |
"uri": t.dbpedia_uri, |
|
290
|
91 |
"position": np.get_place("tags"), |
|
291
|
92 |
"image": t.dbpedia_fields.thumbnail if hasattr(t, 'dbpedia_fields') and t.dbpedia_fields and t.dbpedia_fields.thumbnail else None, |
|
289
|
93 |
"size": 0, |
|
|
94 |
"project_id": project_id, |
|
291
|
95 |
"color": None, |
|
289
|
96 |
#"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" |
|
|
97 |
}) |
|
|
98 |
|
|
291
|
99 |
for c in filter_output["contents"]: |
|
289
|
100 |
content["nodes"].append({ |
|
291
|
101 |
"id": c["id"], |
|
289
|
102 |
"title": c["title"], |
|
|
103 |
"description": c["description"], |
|
|
104 |
"uri": c["url"], |
|
290
|
105 |
"position": np.get_place("datasheet"), |
|
289
|
106 |
"image": None, |
|
|
107 |
"size": 0, |
|
|
108 |
"project_id": project_id, |
|
291
|
109 |
"color": "#FF0033", |
|
289
|
110 |
#"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" |
|
|
111 |
}) |
|
|
112 |
|
|
291
|
113 |
for t in related_tags: |
|
|
114 |
content["nodes"].append({ |
|
|
115 |
"id": t["id"], |
|
|
116 |
"title": t["label"], |
|
|
117 |
"description": "", |
|
|
118 |
"uri": "", |
|
|
119 |
"position": np.get_place("related"), |
|
|
120 |
"image": t["thumbnail"], |
|
|
121 |
"size": 0, |
|
|
122 |
"project_id": project_id, |
|
|
123 |
"color": "#00FF33", |
|
|
124 |
#"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" |
|
|
125 |
}) |
|
|
126 |
|
|
|
127 |
for c_id in related_tags_dict: |
|
|
128 |
for tag_id in related_tags_dict[c_id]: |
|
|
129 |
content["edges"].append({ |
|
|
130 |
"id": unicode(uuid.uuid1()), |
|
|
131 |
"title": "", |
|
|
132 |
"description": "", |
|
|
133 |
"uri": "", |
|
|
134 |
"color": None, |
|
|
135 |
"from": c_id, |
|
|
136 |
"to": tag_id, |
|
|
137 |
"project_id": project_id, |
|
|
138 |
#"created_by": "de68xf75y6hs5rgjhgghxbm217xk" |
|
|
139 |
}) |
|
|
140 |
|
|
289
|
141 |
|
|
|
142 |
|
|
|
143 |
return HttpResponse(json.dumps(content), content_type="application/json") |
|
288
|
144 |
|
|
|
145 |
|
|
|
146 |
def put(self, request): |
|
|
147 |
|
|
289
|
148 |
return HttpResponse("OK") |
|
290
|
149 |
|
|
|
150 |
|