# HG changeset patch # User durandn # Date 1480428866 -3600 # Node ID 1ed1a60c4fcc46b566553771d3145967ca2774c1 # Parent d00d1089b2c8792e046a5fddbe99858cf5b18499 config for different log file for import images command diff -r d00d1089b2c8 -r 1ed1a60c4fcc src/iconolab/management/commands/importimages.py --- a/src/iconolab/management/commands/importimages.py Tue Nov 29 10:50:38 2016 +0100 +++ b/src/iconolab/management/commands/importimages.py Tue Nov 29 15:14:26 2016 +0100 @@ -5,13 +5,18 @@ from iconolab.models import Collection, Image, ImageStats, Item, ItemMetadata, MetaCategory from PIL import Image as ImagePIL from sorl.thumbnail import get_thumbnail -import os, csv, pprint, re, json, shutil +import os, csv, pprint, re, json, shutil, logging + +if settings.IMPORT_LOGGER_NAME and settings.LOGGING['loggers'].get(settings.IMPORT_LOGGER_NAME, ''): + logger = logging.getLogger(settings.IMPORT_LOGGER_NAME) +else: + logger = logging.getLogger(__name__) class Command(BaseCommand): - help = "import images from a directory into the media folder and creates item and image objects" + help = 'import images from a directory into the media folder and creates item and image objects' def add_arguments(self, parser): - parser.add_argument("csv_path") + parser.add_argument('csv_path') parser.add_argument( '--jpeg-quality', dest='jpeg_quality', @@ -81,76 +86,78 @@ ) def handle(self, *args, **options): try: + print('# Logging with logger '+logger.name) + logger.debug('# Initializing command with args: %r', options) # Check we have a collection to store data into: - source_dir = os.path.dirname(os.path.realpath(options.get("csv_path"))) - print("# Checking collection args") - if options.get("collection_json"): - print("## Finding collection json data in "+source_dir) - collection_json_path = os.path.join(source_dir, options.get("collection_json")) + source_dir = os.path.dirname(os.path.realpath(options.get('csv_path'))) + print('# Checking collection args') + if options.get('collection_json'): + print('## Finding collection json data in '+source_dir) + collection_json_path = os.path.join(source_dir, options.get('collection_json')) if not os.path.isfile(collection_json_path): - print("### No "+options.get("collection_json")+".json file was found in the source directory") - raise ValueError("!!! Json file "+collection_json_path+" was not found !!!") + print('### No '+options.get('collection_json')+'.json file was found in the source directory') + raise ValueError('!!! Json file '+collection_json_path+' was not found !!!') try: with open(collection_json_path) as json_fixture_file: collection_data = json.loads(json_fixture_file.read()) - for key in ["name", "verbose_name", "description", "image", "height", "width"]: + for key in ['name', 'verbose_name', 'description', 'image', 'height', 'width']: if not key in collection_data.keys(): - print("!!! Json file "+collection_json_path+" has no "+key+" field !!!") + print('!!! Json file '+collection_json_path+' has no '+key+' field !!!') raise ValueError() - if not collection_data.get("name", ""): - print("!!! Collection data key 'name' is empty") + if not collection_data.get('name', ''): + print('!!! Collection data key "name" is empty') raise ValueError() - if Collection.objects.filter(name=collection_data.get("name")).exists(): - print("!!! A Collection with the provided name already exists!") + if Collection.objects.filter(name=collection_data.get('name')).exists(): + print('!!! A Collection with the provided name already exists!') raise ValueError() - if collection_data.get("image", "") and not (collection_data.get("width", 0) and collection_data.get("height", 0)): - print("!!! Collection data has an image but no height and width") + if collection_data.get('image', '') and not (collection_data.get('width', 0) and collection_data.get('height', 0)): + print('!!! Collection data has an image but no height and width') raise ValueError() except ValueError as e: - raise ValueError("!!! JSON Data is invalid. !!!") - elif options.get("collection_id"): - print("## Finding collection with id "+options.get("collection_id")) + raise ValueError('!!! JSON Data is invalid. !!!') + elif options.get('collection_id'): + print('## Finding collection with id '+options.get('collection_id')) try: - collection = Collection.objects.get(pk=options.get("collection_id")) + collection = Collection.objects.get(pk=options.get('collection_id')) except Collection.DoesNotExist: - raise ValueError("!!! Collection with primary key "+options.get("collection_id")+" was not found, aborting !!!") + raise ValueError('!!! Collection with primary key '+options.get('collection_id')+' was not found, aborting !!!') else: - raise ValueError("!!! No collection fixture or collection id, aborting because we can't properly generate data. !!!") + raise ValueError('!!! No collection fixture or collection id, aborting because we can\'t properly generate data. !!!') - if options.get("metacategories_json"): - print("## Finding metacategories fixture json data in "+source_dir) - metacategories_json_path = os.path.join(source_dir, options.get("metacategories_json")) + if options.get('metacategories_json'): + print('## Finding metacategories fixture json data in '+source_dir) + metacategories_json_path = os.path.join(source_dir, options.get('metacategories_json')) if not os.path.isfile(metacategories_json_path): - print("### No "+options.get("metacategories_json")+".json file was found in the source directory") - raise ValueError("!!! Fixture file "+metacategories_json_path+" was not found !!!") + print('### No '+options.get('metacategories_json')+'.json file was found in the source directory') + raise ValueError('!!! Fixture file '+metacategories_json_path+' was not found !!!') with open(metacategories_json_path) as metacategories_json_file: metacategories_data = json.loads(metacategories_json_file.read()) for metacategory in metacategories_data: - if metacategory.get("label", None) is None: - raise ValueError("!!! Metacategory without label !!!") + if metacategory.get('label', None) is None: + raise ValueError('!!! Metacategory without label !!!') # We read the csv delimiter = options.get('csv_delimiter') - if delimiter == "#9": + if delimiter == '#9': delimiter = chr(9) - if delimiter == "#29": + if delimiter == '#29': delimiter = chr(29) - if delimiter == "#30": + if delimiter == '#30': delimiter = chr(30) - if delimiter == "#31": + if delimiter == '#31': delimiter = chr(31) - csvreader = csv.DictReader(open(options.get("csv_path"), encoding=options.get("encoding")), delimiter=delimiter) - print("# Extracting data from csv file and storing it in standardized format") + csvreader = csv.DictReader(open(options.get('csv_path'), encoding=options.get('encoding')), delimiter=delimiter) + print('# Extracting data from csv file and storing it in standardized format') # We store data using the Jocondelab keys, as defined in settings.IMPORT_FIELDS_DICT cleaned_csv_data=[] for row in csvreader: cleaned_row_data = {} for key in settings.IMPORT_FIELDS_DICT.keys(): - cleaned_row_data[key] = "" + cleaned_row_data[key] = '' for row_key in row.keys(): if row_key in settings.IMPORT_FIELDS_DICT[key]: - if key == "INV": - inv_number, _, _ = row[row_key].partition(";") + if key == 'INV': + inv_number, _, _ = row[row_key].partition(';') cleaned_row_data[key] = inv_number.rstrip() else: cleaned_row_data[key] = row[row_key] @@ -160,7 +167,7 @@ image_list = [ f for f in os.listdir(source_dir) if os.path.isfile(os.path.join(source_dir, f)) - and (f.endswith(".jpg") or f.endswith(".tif") or f.endswith(".bmp") or f.endswith(".png")) + and (f.endswith('.jpg') or f.endswith('.tif') or f.endswith('.bmp') or f.endswith('.png')) ] # Maybe check if image another way filtered_csv_data = [] no_image_rows = [] @@ -168,12 +175,12 @@ assigned_images = [] # Now we trim the cleaned_csv_data dict to keep only entries that have at least one image for item in cleaned_csv_data: - item["SRC_IMG_FILES"] = [] + item['SRC_IMG_FILES'] = [] has_image = False for image in image_list: - img_name_pattern = options.get("filename_regexp_prefix")+re.escape(item[options.get("img_filename_identifier")])+options.get("filename_regexp_suffix") + img_name_pattern = options.get('filename_regexp_prefix')+re.escape(item[options.get('img_filename_identifier')])+options.get('filename_regexp_suffix') if re.match(img_name_pattern, image): - item["SRC_IMG_FILES"].append(image) + item['SRC_IMG_FILES'].append(image) assigned_images.append(image) has_image = True if has_image: @@ -186,74 +193,74 @@ if image not in assigned_images: no_data_images.append(image) - print("## found " + str(len(filtered_csv_data))+" items with at least one image") - print("# Importing data into Iconolab") - if options.get("collection_json"): - print("## Loading collection json") + print('## found ' + str(len(filtered_csv_data))+' items with at least one image') + print('# Importing data into Iconolab') + if options.get('collection_json'): + print('## Loading collection json') collection = Collection.objects.create( - name = collection_data.get("name"), - verbose_name = collection_data.get("verbose_name", ""), - description = collection_data.get("description", ""), - image = collection_data.get("image", ""), - height = collection_data.get("height", 0), - width = collection_data.get("width", 0), + name = collection_data.get('name'), + verbose_name = collection_data.get('verbose_name', ''), + description = collection_data.get('description', ''), + image = collection_data.get('image', ''), + height = collection_data.get('height', 0), + width = collection_data.get('width', 0), ) if collection.image: collection_image_path = os.path.join(settings.MEDIA_ROOT, str(collection.image)) if not os.path.isfile(collection_image_path): - print("### Moving collection image") + print('### Moving collection image') _ , collection_image_name = os.path.split(collection_image_path) try: col_im = ImagePIL.open(os.path.join(source_dir, collection_image_name)) - print("##### Generating or copying jpeg for "+collection_image_name) + print('##### Generating or copying jpeg for '+collection_image_name) col_im.thumbnail(col_im.size) - col_im.save(collection_image_path, "JPEG", quality=options.get("jpeg_quality", settings.IMG_JPG_DEFAULT_QUALITY)) + col_im.save(collection_image_path, 'JPEG', quality=options.get('jpeg_quality', settings.IMG_JPG_DEFAULT_QUALITY)) except Exception as e: print(e) - if options.get("metacategories_json"): + if options.get('metacategories_json'): for metacategory in metacategories_data: MetaCategory.objects.create( collection = collection, - label = metacategory.get("label"), - triggers_notifications = metacategory.get("triggers_notifications", 0) + label = metacategory.get('label'), + triggers_notifications = metacategory.get('triggers_notifications', 0) ) - print("## Converting image and moving it to static dir, creating Image and Item objects") - target_dir = os.path.join(settings.MEDIA_ROOT, "uploads") - print("### Images will be stored in "+target_dir) + print('## Converting image and moving it to static dir, creating Image and Item objects') + target_dir = os.path.join(settings.MEDIA_ROOT, 'uploads') + print('### Images will be stored in '+target_dir) for item in filtered_csv_data: - print("#### Computing metadatas for item "+item["INV"]+" (inv number)") - if not item["INV"]: - print("#### No INV number, skipping") + print('#### Computing metadatas for item '+item['INV']+' (inv number)') + if not item['INV']: + print('#### No INV number, skipping') continue - item_authors = item["AUTR"] - item_school = item["ECOLE"] - item_designation = "" - if item.get("TITR", ""): - item_designation = item["TITR"] - elif item.get("DENO", ""): - item_designation = item["DENO"] - elif item.get("APPL", ""): - item_designation = item["APPL"] - item_datation = "" - if item.get("PERI", ""): - item_datation = item["PERI"] - elif item.get("MILL", ""): - item_datation = item["MILL"] - elif item.get("EPOQ", ""): - item_datation = item["EPOQ"] - item_technics = item["TECH"] - item_field = item["DOM"] - item_measurements = item["DIMS"] - item_create_or_usage_location = item["LIEUX"] - item_discovery_context = item["DECV"] - item_conservation_location = item["LOCA"] - item_photo_credits = item["PHOT"] - item_inventory_number = item["INV"] - item_joconde_ref = item["REF"] + item_authors = item['AUTR'] + item_school = item['ECOLE'] + item_designation = '' + if item.get('TITR', ''): + item_designation = item['TITR'] + elif item.get('DENO', ''): + item_designation = item['DENO'] + elif item.get('APPL', ''): + item_designation = item['APPL'] + item_datation = '' + if item.get('PERI', ''): + item_datation = item['PERI'] + elif item.get('MILL', ''): + item_datation = item['MILL'] + elif item.get('EPOQ', ''): + item_datation = item['EPOQ'] + item_technics = item['TECH'] + item_field = item['DOM'] + item_measurements = item['DIMS'] + item_create_or_usage_location = item['LIEUX'] + item_discovery_context = item['DECV'] + item_conservation_location = item['LOCA'] + item_photo_credits = item['PHOT'] + item_inventory_number = item['INV'] + item_joconde_ref = item['REF'] if ItemMetadata.objects.filter(item__collection = collection, inventory_number = item_inventory_number).exists(): - print("#### An item with "+item["INV"]+" for inventory number, already exists in database in the import collection") + print('#### An item with '+item['INV']+' for inventory number, already exists in database in the import collection') else: - print("#### Creating item "+item["INV"]+" (inv number) in database") + print('#### Creating item '+item['INV']+' (inv number) in database') item_object = Item.objects.create( collection = collection ) @@ -273,11 +280,11 @@ inventory_number = item_inventory_number, joconde_ref = item_joconde_ref ) - print("#### Computing item image(s)") - for image in item["SRC_IMG_FILES"]: + print('#### Computing item image(s)') + for image in item['SRC_IMG_FILES']: (image_name, ext) = os.path.splitext(image) - if options.get("no-jpg-conversion") or ext in settings.NO_IMG_CONVERSION_EXTS: - print("##### Copying file "+str(image)+" without converting") + if options.get('no-jpg-conversion') or ext in settings.NO_IMG_CONVERSION_EXTS: + print('##### Copying file '+str(image)+' without converting') image_path = os.path.join(target_dir, image) new_image_name = image shutil.copy(os.path.join(source_dir, image), target_dir) @@ -288,10 +295,10 @@ print(e) continue else: - image_path = os.path.join(target_dir, image_name) + ".jpg" - new_image_name = image_name+".jpg" + image_path = os.path.join(target_dir, image_name) + '.jpg' + new_image_name = image_name+'.jpg' if os.path.isfile(image_path): - print("##### A jpeg file already exists in target dir for "+ image) + print('##### A jpeg file already exists in target dir for '+ image) try: im = ImagePIL.open(image_path) im_width, im_height = im.size @@ -302,16 +309,16 @@ jpeg_img_path = image_path try: im = ImagePIL.open(os.path.join(source_dir, image)) - print("##### Generating or copying jpeg for "+image) + print('##### Generating or copying jpeg for '+image) im.thumbnail(im.size) - im.save(jpeg_img_path, "JPEG", quality=options.get("jpeg_quality", settings.IMG_JPG_DEFAULT_QUALITY)) + im.save(jpeg_img_path, 'JPEG', quality=options.get('jpeg_quality', settings.IMG_JPG_DEFAULT_QUALITY)) im_width, im_height = im.size except Exception as e: print(e) continue new_image = Image.objects.create( item = item_object, - media = "uploads/"+new_image_name, + media = 'uploads/'+new_image_name, name = new_image_name, height = im_height, width = im_width @@ -319,29 +326,38 @@ ImageStats.objects.create( image = new_image ) - print("### Generating thumbnails for item "+item["INV"]) + print('### Generating thumbnails for item '+item['INV']) for image in item_object.images.all(): for size in settings.PREGENERATE_THUMBNAILS_SIZES: - print("#### Thumbnail for size "+size) + print('#### Thumbnail for size '+size) get_thumbnail(image.media, size, crop=False) - print("# All done!") - print("# Images without data: ") + print('# All done!') + + logger.debug('# Recap for import command: ') + print('# Images without data: ') + logger.debug('## Checking images left without data') collection_image_file = os.path.split(str(collection.image))[1] if no_data_images and collection_image_file in no_data_images: no_data_images.remove(collection_image_file) + if no_data_images: for image in no_data_images: - print("## "+image) + logger.debug('### %r', image) + print('## '+image) else: print('## Each image has one corresponding row!') + logger.debug('### Each image has one corresponding row!') print('# CSV Items without image') + logger.debug('## Checking csv rows left without image') if no_image_rows: for item in no_image_rows: - print('## Inv number: '+item["INV"]) + logger.debug('### %r', item['INV']) + print('## Inv number: '+item['INV']) else: print('## Each row found at least one corresponding image!') + logger.debug('### Each row found at least one corresponding image!') except FileNotFoundError: - print("!!! File "+options.get("csv_path")+" does not exist. !!!") + print('!!! File '+options.get('csv_path')+' does not exist. !!!') except ValueError as e: print(str(e)) diff -r d00d1089b2c8 -r 1ed1a60c4fcc src/iconolab/settings/__init__.py --- a/src/iconolab/settings/__init__.py Tue Nov 29 10:50:38 2016 +0100 +++ b/src/iconolab/settings/__init__.py Tue Nov 29 15:14:26 2016 +0100 @@ -168,8 +168,14 @@ IMPORT_DEFAULT_FIELD_TO_FILENAME_IDENTIFIER = "INV" NO_IMG_CONVERSION_EXTS = [".jpg"] IMG_CONVERSION_EXTS = [".tif", ".tiff"] - IMG_JPG_DEFAULT_QUALITY = 80 +PREGENERATE_THUMBNAILS_SIZES = [ + # item_images_preview.html + "250x250", + "100x100", +] +IMPORT_LOG_FILE = "" +IMPORT_LOGGER_NAME = "" DJANGO_RUNSERVER = (len(sys.argv)>1 and sys.argv[1] == 'runserver') @@ -178,16 +184,4 @@ # The different thumbnail sizes that we want to pre-generate when importing or when updating collections using commands # This allows to pre-calculate thumbnails for media-heavy pages such as collection_home -PREGENERATE_THUMBNAILS_SIZES = [#{ -# # Thumbnails that will always be generated without taking image format into account -# "all": [], -# # Thumbnails for images in portrait format -# "portrait": [], -# # Thumbnails for images in landscape format -# "landscape": [], -# } - # item_images_preview.html - "250x250", - "100x100", -] diff -r d00d1089b2c8 -r 1ed1a60c4fcc src/iconolab/settings/dev.py.tmpl --- a/src/iconolab/settings/dev.py.tmpl Tue Nov 29 10:50:38 2016 +0100 +++ b/src/iconolab/settings/dev.py.tmpl Tue Nov 29 15:14:26 2016 +0100 @@ -131,6 +131,8 @@ # Logging LOG_FILE = os.path.abspath(os.path.join(BASE_DIR,"../../run/log/log.txt")) +IMPORT_LOG_FILE = os.path.abspath(os.path.join(BASE_DIR,"../../run/log/import_log.txt")) +IMPORT_LOGGER_NAME = "import_command" LOG_LEVEL = logging.DEBUG LOGGING = { 'version': 1, @@ -164,6 +166,12 @@ 'filename': LOG_FILE, 'formatter': 'semi-verbose', }, + 'import_file': { + 'level': LOG_LEVEL, + 'class': 'logging.FileHandler', + 'filename': IMPORT_LOG_FILE, + 'formatter': 'semi-verbose', + } }, 'loggers': { 'django.request': { @@ -176,10 +184,14 @@ 'level': LOG_LEVEL, 'propagate': True, }, + 'import_command': { + 'handlers': ['import_file'], + 'level': LOG_LEVEL, + 'propagate': True, + }, } } - # Haystack connection HAYSTACK_CONNECTIONS = { 'default': {