# HG changeset patch # User cavaliet # Date 1405001820 -7200 # Node ID 44af3e5e4114e2a127245aad05a4587d6881096a # Parent fb86765b4c547a8bb9610756336c66c9efe2087e enhance line placer diff -r fb86765b4c54 -r 44af3e5e4114 src/hdalab/utils.py --- a/src/hdalab/utils.py Wed Jul 09 12:26:11 2014 +0200 +++ b/src/hdalab/utils.py Thu Jul 10 16:17:00 2014 +0200 @@ -43,12 +43,33 @@ def init(self, cat_nb_nodes_initial): for c in cat_nb_nodes_initial: + nb = cat_nb_nodes_initial[c] + if isinstance(cat_nb_nodes_initial[c], tuple): + _, nb = nb + self.max_length = nb if nb > self.max_length else self.max_length + for i_cat,c in enumerate(cat_nb_nodes_initial): + self.cat_nb_nodes[c] = [] + order = i_cat + nb = cat_nb_nodes_initial[c] + if isinstance(cat_nb_nodes_initial[c], tuple): + order, nb = nb + offset = float(self.max_length - nb) / 2 + for i in xrange(nb): + self.cat_nb_nodes[c].append({ "x": order*300, "y": 100*(i+offset) }) + #logger.debug(self.cat_nb_nodes) + + + +class CircleNodePlacer(NodePlacer): + + def init(self, cat_nb_nodes_initial): + for c in cat_nb_nodes_initial: self.max_length = cat_nb_nodes_initial[c] if cat_nb_nodes_initial[c] > self.max_length else self.max_length for i_cat,c in enumerate(cat_nb_nodes_initial): self.cat_nb_nodes[c] = [] offset = float(self.max_length - cat_nb_nodes_initial[c]) / 2 for i in xrange(cat_nb_nodes_initial[c]): - self.cat_nb_nodes[c].append({ "x": i_cat*200, "y": 100*(i+offset) }) + self.cat_nb_nodes[c].append({ "x": i_cat*400, "y": 200*(i+offset) }) #logger.debug(self.cat_nb_nodes) diff -r fb86765b4c54 -r 44af3e5e4114 src/hdalab/views/ajax.py --- a/src/hdalab/views/ajax.py Wed Jul 09 12:26:11 2014 +0200 +++ b/src/hdalab/views/ajax.py Thu Jul 10 16:17:00 2014 +0200 @@ -398,7 +398,7 @@ transqs = DbpediaFieldsTranslation.objects.filter(master__in = dbpediafields.values(), language_code = lang) translations = dict([(trans.master_id,trans.label) for trans in transqs]) - tags = [{'id': tag.id, 'label': tag.label, 'score': tag.nb, 'translated_label': translations.get(dbpediafields[tag.id].id, tag.label) if tag.id in dbpediafields else tag.label} for tag in tagqslist] + tags = [{'id': tag.id, 'label': tag.label, 'score': tag.nb, 'thumbnail': dbpediafields[tag.id].thumbnail, 'translated_label': translations.get(dbpediafields[tag.id].id, tag.label) if tag.id in dbpediafields else tag.label} for tag in tagqslist] countryqs = countryqs.annotate(nb=Count('includes__tag__taggedsheet')) countries = dict([(country.dbpedia_uri, country.nb) for country in countryqs]) diff -r fb86765b4c54 -r 44af3e5e4114 src/hdalab/views/renkan.py --- a/src/hdalab/views/renkan.py Wed Jul 09 12:26:11 2014 +0200 +++ b/src/hdalab/views/renkan.py Thu Jul 10 16:17:00 2014 +0200 @@ -49,15 +49,36 @@ # Tags arrive with french label, countries with dbpedia uri label_list = [t for t in labels if t!=""] country_list = [c for c in countries if c!=""] - all_tags = Tag.objects.filter( Q(label__in=label_list) | Q(dbpedia_uri__in=country_list) ) + all_tags = Tag.objects.filter( Q(label__in=label_list) | Q(dbpedia_uri__in=country_list) ).select_related("dbpedia_fields") # Get datasheets from ajax filter search filter_output = filter_generic(request.GET.get('lang',request.LANGUAGE_CODE), None, ",".join(label_list), ",".join(country_list)) filter_output = json.loads(filter_output) + #logger.debug("COUCOU") + #logger.debug(json.dumps(filter_output, indent=2)) + #return HttpResponse(json.dumps(filter_output, indent=2), content_type="application/json") + + # Prepare other tags + related_tags = [] + all_labels = [t.label for t in all_tags] + related_tags_dict = {} + for c in filter_output["contents"]: + c["id"] = unicode(uuid.uuid1()) + related_tags_dict[c["id"]] = [] + for t in c["tags"]: + if t["label"] not in all_labels and t["order"]<6: + thumbnail_url = "" + for tt in filter_output["tags"]: + if tt["label"]==t["label"]: + thumbnail_url = tt["thumbnail"] + related_tags.append({"label": t["label"], "thumbnail":thumbnail_url, "id":t["id"]}) + all_labels.append(t["label"]) + related_tags_dict[c["id"]].append(t["id"]) + #return HttpResponse(json.dumps({"t":related_tags_label}, indent=2), content_type="application/json") # Prepare Node placer : np = LineNodePlacer() - np.init({"tags": len(all_tags), "datasheet":len(filter_output["contents"])}) + np.init({"tags": (1, len(all_tags)), "datasheet": (2, len(filter_output["contents"])), "related": (3, len(related_tags))}) project_id = unicode(uuid.uuid1()) @@ -68,15 +89,16 @@ "description": t.dbpedia_uri, "uri": t.dbpedia_uri, "position": np.get_place("tags"), - "image": None, + "image": t.dbpedia_fields.thumbnail if hasattr(t, 'dbpedia_fields') and t.dbpedia_fields and t.dbpedia_fields.thumbnail else None, "size": 0, "project_id": project_id, + "color": None, #"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" }) - for c in filter_output["contents"] : + for c in filter_output["contents"]: content["nodes"].append({ - "id": unicode(uuid.uuid1()), + "id": c["id"], "title": c["title"], "description": c["description"], "uri": c["url"], @@ -84,9 +106,38 @@ "image": None, "size": 0, "project_id": project_id, + "color": "#FF0033", #"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" }) + for t in related_tags: + content["nodes"].append({ + "id": t["id"], + "title": t["label"], + "description": "", + "uri": "", + "position": np.get_place("related"), + "image": t["thumbnail"], + "size": 0, + "project_id": project_id, + "color": "#00FF33", + #"created_by": "roster_user-84fe909f-ba37-48e6-a25f-9d2f129a95b7" + }) + + for c_id in related_tags_dict: + for tag_id in related_tags_dict[c_id]: + content["edges"].append({ + "id": unicode(uuid.uuid1()), + "title": "", + "description": "", + "uri": "", + "color": None, + "from": c_id, + "to": tag_id, + "project_id": project_id, + #"created_by": "de68xf75y6hs5rgjhgghxbm217xk" + }) + return HttpResponse(json.dumps(content), content_type="application/json")