clean log + session["tasks"] error
authordurandn
Fri, 10 Apr 2015 16:02:55 +0200
changeset 87 e7afb5bd5a85
parent 86 1c84b37adaf4
child 88 1e15fe538d93
clean log + session["tasks"] error
src/catedit/__init__.py
src/catedit/models.py
src/catedit/persistence.py
src/catedit/resources.py
src/catedit/views/categories.py
src/catedit/views/home.py
--- a/src/catedit/__init__.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/__init__.py	Fri Apr 10 16:02:55 2015 +0200
@@ -91,7 +91,6 @@
     """
     session["pagination_links"] = {}
     log_api_rate(r, *args, **kwargs)
-    print("link header: "+r.headers.get("link", "none"))
     if r.headers.get("link", None) is not None:
         session["pagination_links"] = r.links
         for (key, item) in session["pagination_links"].items():
--- a/src/catedit/models.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/models.py	Fri Apr 10 16:02:55 2015 +0200
@@ -96,16 +96,6 @@
         """
             Returns category property list
         """
-        print(str(
-            [
-                predicate_object_tuple for predicate_object_tuple in self.cat_graph.predicate_objects() 
-                if (
-                    predicate_object_tuple[0]!=RDF.ID and 
-                    predicate_object_tuple[0]!=RDFS.label and
-                    predicate_object_tuple[0]!=RDF.Description
-                )
-            ]
-        ))
         return [
             predicate_object_tuple for predicate_object_tuple in self.cat_graph.predicate_objects() 
             if (
--- a/src/catedit/persistence.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/persistence.py	Fri Apr 10 16:02:55 2015 +0200
@@ -223,7 +223,7 @@
                 + "/git/refs/heads/master",
                 hooks=dict(response=log_api_rate)
             )
-            logger.debug(str(ref_master))
+            #TODO: vérifier encoding - logger.debug(str(ref_master))
         except GitHubError as ghe:
             logger.error("GitHubError trying to get the reference "
                          + "to the master branch")
@@ -246,7 +246,7 @@
                 + ref_master["object"]["sha"],
                 hooks=dict(response=log_api_rate)
             )
-            logger.debug(str(last_commit_master))
+            #TODO: vérifier encoding - logger.debug(str(last_commit_master))
         except GitHubError as ghe:
             logger.error("GitHubError trying to get the commit associated "
                          + "to the latest reference to the master branch")
@@ -263,7 +263,7 @@
                 + "?recursive=1",
                 hooks=dict(response=log_api_rate)
             )
-            logger.debug(str(last_commit_tree))
+            #TODO: vérifier encoding - logger.debug(str(last_commit_tree))
         except GitHubError as ghe:
             logger.error("GitHubError trying to get the tree from the commit "
                          + "associated to the latest reference to the master "
@@ -276,7 +276,6 @@
         # First we loop over the existing elements to spot which are modified
         # and which are untouched and create new blobs when needed
         for element in last_commit_tree["tree"]:
-            # logger.debug(element)
             if element["type"] == "blob":
                 # test if element is in deleted categories, if it is,
                 # no point doing anything, the file won't be in the new tree
@@ -343,13 +342,13 @@
                                                   "mode": element["mode"],
                                                   "type": "blob",
                                                   "sha": blob_sha})
-        logger.debug(str(new_tree_data["tree"]))
+        #TODO: vérifier encoding - logger.debug(str(new_tree_data["tree"]))
         # Now we loop over modified categories to find the ones that don't
         # exist yet in the last commit tree in order to create blobs for them
         for (cat_name, cat_content) in modification_dict.items():
             logger.debug(app.config["PERSISTENCE_CONFIG"]
                                    ["CATEGORIES_PATH"]
-                         + cat_content
+                         + cat_name
                          + " should not be in "
                          + str([elt["path"] for
                                 elt in last_commit_tree["tree"]]))
@@ -385,7 +384,7 @@
                     "type": "blob",
                     "sha": new_blob["sha"]
                 })
-        logger.debug(str(new_tree_data))
+        #TODO: vérifier encoding - logger.debug(str(new_tree_data))
 
         # Finally, we post the new tree
         try:
@@ -403,13 +402,13 @@
                 + str(new_tree_data)
             )
             logger.error(ghe.response.text)
-        logger.debug(str(new_tree_response[0]["sha"]))
+        #TODO: vérifier encoding - logger.debug(str(new_tree_response[0]["sha"]))
         
         # Point 5
         new_commit_data = {"message": kwargs["message"],
                            "parents": [last_commit_master["sha"]],
                            "tree": new_tree_response[0]["sha"]}
-        logger.debug(str(new_commit_data))
+        #TODO: vérifier encoding - logger.debug(str(new_commit_data))
         try:
             new_commit = github.post(
                 "repos/"
@@ -419,7 +418,7 @@
                 data=new_commit_data
             ),
             hooks=dict(response=log_api_rate)
-            logger.debug(str(new_commit))
+            #TODO: vérifier encoding - logger.debug(str(new_commit))
         except GitHubError as ghe:
             logger.error(
                 "GitHubError trying to post a new commit with following data: "
@@ -429,7 +428,7 @@
 
         # Point 6
         new_head_data = {"sha": new_commit[0]["sha"], "force": "true"}
-        logger.debug(str(new_head_data))
+        #TODO: vérifier encoding - logger.debug(str(new_head_data))
         try:
             new_head = github.patch(
                 "repos/"
@@ -439,7 +438,7 @@
                 data=json.dumps(new_head_data),
                 hooks=dict(response=log_api_rate)
             )
-            logger.debug(str(new_head))
+            #TODO: vérifier encoding - logger.debug(str(new_head))
         except GitHubError as ghe:
             logger.error(
                 "GitHubError trying to edit the head to the master branch"
--- a/src/catedit/resources.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/resources.py	Fri Apr 10 16:02:55 2015 +0200
@@ -114,7 +114,7 @@
                     routing_key="task_for_"+repository
                 )
                             
-                session["tasks"][repository].append(task.id)
+                session.setdefault("tasks", {}).setdefault(repository, []).append(task.id)
                 session["deleted_categories"][repository] = {}
                 session["modified_categories"][repository] = {}
                 cache.clear()
@@ -184,7 +184,7 @@
         """
         property_list = []
         for (predicate, obj) in cat_data["properties"]:
-            if session["properties"][repository][predicate]["object_type"] == "uriref-category":
+            if session["properties"][repository][predicate]["object_type"] == "uriref-category": # faire des get
                 property_list.append(
                     (URIRef(session["properties"][repository][predicate]["rdflib_class"]), app.config["CATEGORY_NAMESPACE"][obj])
                 )
--- a/src/catedit/views/categories.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/views/categories.py	Fri Apr 10 16:02:55 2015 +0200
@@ -303,7 +303,7 @@
                 abort(404)
             else:
                 specific_serialized_cat = category_api_response[0]
-        logger.debug(specific_serialized_cat)
+        #TODO: vérifier encoding - logger.debug(specific_serialized_cat)
 
         cat_rdf_graph = Graph()
         cat_rdf_graph.parse(source=StringIO(specific_serialized_cat),
@@ -370,11 +370,6 @@
         #  "cat_label": cat.label,
         #  "cat_description": cat.description,
         #  "cat_properties": cat.properties}
-        logger.debug("category list that can be linked :" + str(cat_list))
-        logger.debug(
-            "deleted categories list that can't be linked :"
-            + str(list(deleted_cat_dict.keys()))
-        )
         return render_template('categories/editor.html',
                                cat_id=current_cat_id,
                                cat_properties=current_cat_properties,
@@ -399,14 +394,14 @@
             ]
 
             if cat_id is not None:
-                logger.debug(str(cat_data))
+                #TODO: vérifier encoding - logger.debug(str(cat_data))
                 cat_api_instance.put(
                     repository=repository,
                     cat_id=cat_id,
                     cat_data=cat_data
                 )
             else:
-                logger.debug(str(cat_data))
+                #TODO: vérifier encoding - logger.debug(str(cat_data))
                 cat_api_instance.post(
                     repository=repository,
                     cat_data=cat_data
--- a/src/catedit/views/home.py	Fri Apr 10 13:07:18 2015 +0200
+++ b/src/catedit/views/home.py	Fri Apr 10 16:02:55 2015 +0200
@@ -97,7 +97,7 @@
                     + repo + "/contents/properties/properties.json",
                     hooks=dict(response=log_api_rate)
                 )
-                logger.debug("repo: "+repo+" - properties: "+str(json.loads(str(b64decode(json_file["content"]), "utf-8"))))
+                #TODO: vérifier encoding - logger.debug("repo: "+repo+" - properties: "+str(json.loads(str(b64decode(json_file["content"]), "utf-8")))) #wat
                 session["properties"][repo] = json.loads(str(b64decode(json_file["content"]), "utf-8"))["property_list"]
             except GitHubError as ghe:
                 logger.debug(