# -*- coding: utf-8 -*-
'''
Created on Mar 22, 2013
@author: tc
'''
from django.contrib.auth.models import User, Group
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from ldt.ldt_utils.models import Content, Project
from optparse import make_option
import os.path
import json
from ldt.security.cache import cached_assign
class Command(BaseCommand):
'''
Load users, medias, contents, project and guardian permissions from json file generated by dumpdata
'''
args = 'json_file'
help = 'Load users, medias, contents, project and guardian permissions from json file generated by dumpdata'
option_list = BaseCommand.option_list + (
make_option('-u', '--ignore-users',
dest= 'ignore_users',
default= None,
help= 'list of usernames to ignore (separated by comma)'
),
make_option('-g', '--ignore-groups',
dest= 'ignore_groups',
default= None,
help= 'list of group names to ignore (separated by comma)'
),
make_option('-c', '--ignore-contents',
dest= 'ignore_contents',
default= None,
help= 'list of content iri_id to ignore (separated by comma)'
),
make_option('-n', '--new-group',
dest= 'new_group',
default= None,
help= 'The script will create a new group and assign view permission for all new media/contents to all the new users'
),
)
def __safe_get(self, dict_arg, key, conv = lambda x: x, default= None):
val = dict_arg.get(key, default)
return conv(val) if val else default
def __safe_decode(self, s):
if not isinstance(s, basestring):
return s
try:
return s.decode('utf8')
except:
try:
return s.decode('latin1')
except:
return s.decode('utf8','replace')
def handle(self, *args, **options):
# Test path
if len(args) != 1:
raise CommandError("The command has no argument or too much arguments. Only one is needed : the json file path.")
# Check if temporary files already exist
path = os.path.abspath(args[0])
dir = os.path.dirname(path)
path_file1 = os.path.join(dir, 'temp_data1.json')
path_file2 = os.path.join(dir, 'temp_data2.json')
do_import = True
if os.path.exists(path_file1) or os.path.exists(path_file2):
confirm = raw_input(("""
The folder %s contains the files temp_data1.json or temp_data2.json. These files will be overwritten.
Do you want to continue ?
Type 'y' to continue, or 'n' to quit: """) % dir)
do_import = (confirm == "y")
# Continue
if do_import:
# Init ignore list
user_ignore_list = ["admin","AnonymousUser"]
group_ignore_list = ["everyone","Hashcut IRI","Hashcut BPI"]
content_ignore_list = []
# Update ignore list
ignore_users = options.get('ignore_users', None)
ignore_groups = options.get('ignore_groups', None)
ignore_contents = options.get('ignore_contents', None)
if ignore_users:
for u in ignore_users.split(","):
user_ignore_list.append(u)
if ignore_groups:
for g in ignore_groups.split(","):
group_ignore_list.append(g)
if ignore_contents:
for c in ignore_contents.split(","):
content_ignore_list.append(c)
# Begin work...
print("Opening file...")
json_file = open(path,'rb')
print("Loading datas...")
data = json.load(json_file)
print("%d objects found..." % len(data))
content_pk_id = {}
project_pk_id = {}
# datas for file 1 : users, medias, contents, projects
data_file1 = []
# datas for file 2 : guardian permissions
data_file2 = []
# users
usernames = []
for obj in data:
if "model" in obj:
m = obj["model"]
if m!="guardian.userobjectpermission" and m!="guardian.groupobjectpermission":
# We remove user admin, user AnonymousUser, group everyone and users and contents in ignore list
# (a bit fuzzy for media and src but good for others)
if not ((m=="auth.user" and "username" in obj["fields"] and obj["fields"]["username"] in user_ignore_list) or \
(m=="auth.group" and "name" in obj["fields"] and obj["fields"]["name"] in group_ignore_list) or \
(m=="ldt_utils.media" and "src" in obj["fields"] and any((s+".") in obj["fields"]["src"] for s in content_ignore_list)) or \
(m=="ldt_utils.content" and "iri_id" in obj["fields"] and obj["fields"]["iri_id"] in content_ignore_list)):
data_file1.append(obj)
#else:
# print("I don't keep from datas %s, pk = %s" % (m, obj["pk"]))
if "pk" in obj:
# For both contents and projects, we save 2 dicts [id]=pk and [pk]=id
# It will enable to parse and replace easily the old pk by the new ones in the permission datas
if m=="ldt_utils.project":
pk = str(obj["pk"])
id = obj["fields"]["ldt_id"]
project_pk_id[pk] = id
elif m=="ldt_utils.content":
pk = str(obj["pk"])
id = obj["fields"]["iri_id"]
content_pk_id[pk] = id
obj["pk"] = None
else:
obj["pk"] = None
data_file2.append(obj)
# Save usernames except AnonymousUser
if m=="auth.user" and "username" in obj["fields"] and obj["fields"]["username"]!="AnonymousUser":
usernames.append(obj["fields"]["username"])
json_file.close()
#data_file1.append(project_pk_id)
#data_file1.append(project_id_pk)
#data_file1.append(content_pk_id)
#data_file1.append(content_id_pk)
# Check if import will fail with the usernames
existing_usernames = User.objects.all().values_list("username", flat=True)
for un in usernames:
if un in existing_usernames and un not in user_ignore_list:
print("import will fail with username : %s" % str(un))
do_import = False
# Check if import will fail with the contents's iri_id
existing_iri_ids = Content.objects.all().values_list("iri_id", flat=True)
new_iri_ids = list(content_pk_id.values())
for iri_id in new_iri_ids:
if iri_id in existing_iri_ids and iri_id not in content_ignore_list:
print("import will fail with iri_id : %s" % str(iri_id))
do_import = False
if not do_import:
print("Add the usernames and iri_id to the ignore parameters -u and -c")
return ""
# We save the datas in a file in order to simply call loaddata
print("Writing %s..." % path_file1)
file1 = open(path_file1, 'w')
json.dump(data_file1, file1, indent=2)
file1.close()
print("Updating permissions ids...")
# We replace the old pk by the natural keys in the permission datas
ignored_project_pks = []
ignored_content_pks = []
perm_data = []
for obj in data_file2:
type = obj["fields"]["content_type"][1]
old_pk = obj["fields"]["object_pk"]
if type=="project":
try:
obj["fields"]["object_pk"] = project_pk_id[old_pk]
except:
# The dumpdata can contain permissions for removed projects
ignored_project_pks.append(old_pk)
continue
# Keeping only valuables objs avoids errors when we we get the new pks
perm_data.append(obj)
elif type == "content":
try:
obj["fields"]["object_pk"] = content_pk_id[old_pk]
except:
# The dumpdata can contain permissions for removed contents
ignored_content_pks.append(old_pk)
continue
# Keeping only valuables objs avoids errors when we we get the new pks
obj_id = obj["fields"]["object_pk"]
model = obj["model"] # "guardian.groupobjectpermission" or "guardian.userobjectpermission"
if obj_id in content_ignore_list:
if model=="guardian.groupobjectpermission":
if obj["fields"]["group"][0] in group_ignore_list:
#print("permissions : j'ignore %s pour le groupe %s ..." % (obj_id, obj["fields"]["group"][0]))
continue
elif model=="guardian.userobjectpermission":
if obj["fields"]["user"][0] in user_ignore_list:
#print("permissions : j'ignore %s pour le user %s ..." % (obj_id, obj["fields"]["user"][0]))
continue
perm_data.append(obj)
# We inform the user
print("%d project permissions were ignored because projects do not exist in the current datas." % len(ignored_project_pks))
print("%d content permissions were ignored because contents do not exist in the current datas." % len(ignored_content_pks))
print("Loading datas from temporary file %s ..." % path_file1)
# Loaddata from file 1
call_command("loaddata", path_file1)
# Now users, medias, contents, projects have been saved.
# We can get the new pk for contents and projects
# Careful: in Python 3, dict.copy().values() will be prefered to list(dict.values())
# We use select_related("media_obj") because it will usefull with the new group
contents = Content.objects.filter(iri_id__in=list(content_pk_id.values())).select_related("media_obj")#.values('pk', 'iri_id')
content_id_pk = {}
for c in contents:
content_id_pk[c.iri_id] = str(c.pk)
projects = Project.objects.filter(ldt_id__in=list(project_pk_id.values())).values('pk', 'ldt_id')
project_id_pk = {}
for p in projects:
project_id_pk[p["ldt_id"]] = str(p["pk"])
# Now we reparse the perm_data and update with the new pks
for obj in perm_data:
type = obj["fields"]["content_type"][1]
obj_id = obj["fields"]["object_pk"]
if type=="project":
obj["fields"]["object_pk"] = project_id_pk[obj_id]
elif type == "content":
obj["fields"]["object_pk"] = content_id_pk[obj_id]
# We save the datas in a file in order to simply call loaddata
print("Writing %s..." % path_file2)
file2 = open(path_file2, 'w')
json.dump(perm_data, file2, indent=2)
file2.close()
print("Loading permissions from temporary file %s ..." % path_file2)
call_command("loaddata", path_file2)
# Remove temp files
print("Removing temporary files...")
try:
os.remove(path_file1)
except:
print("Removing temporary files %s failed" % path_file1)
try:
os.remove(path_file2)
except:
print("Removing temporary files %s failed" % path_file2)
# Now that all datas have been imported we can create the new group and assign permissions if asked
new_group = options.get('new_group', None)
if new_group and len(usernames)>0:
print("Set view permissions for the new group %s ..." % new_group)
# Get or create group
new_grp, _ = Group.objects.get_or_create(name=new_group)
# Add users to the group
users = User.objects.filter(username__in=usernames)
for u in users:
new_grp.user_set.add(u)
# Get all contents and medias
for c in contents:
cached_assign('view_content', new_grp, c)
cached_assign('view_media', new_grp, c.media_obj)
print("Indexing imported projects ...")
call_command('reindex', projects=True, no_content=True)
# This is the end
print("This is the end")