--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/lib/django/db/models/base.py Wed Jun 02 18:57:35 2010 +0200
@@ -0,0 +1,974 @@
+import types
+import sys
+import os
+from itertools import izip
+import django.db.models.manager # Imported to register signal handler.
+from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS
+from django.core import validators
+from django.db.models.fields import AutoField, FieldDoesNotExist
+from django.db.models.fields.related import OneToOneRel, ManyToOneRel, OneToOneField
+from django.db.models.query import delete_objects, Q
+from django.db.models.query_utils import CollectedObjects, DeferredAttribute
+from django.db.models.options import Options
+from django.db import connections, router, transaction, DatabaseError, DEFAULT_DB_ALIAS
+from django.db.models import signals
+from django.db.models.loading import register_models, get_model
+from django.utils.translation import ugettext_lazy as _
+import django.utils.copycompat as copy
+from django.utils.functional import curry, update_wrapper
+from django.utils.encoding import smart_str, force_unicode, smart_unicode
+from django.utils.text import get_text_list, capfirst
+from django.conf import settings
+
+class ModelBase(type):
+ """
+ Metaclass for all models.
+ """
+ def __new__(cls, name, bases, attrs):
+ super_new = super(ModelBase, cls).__new__
+ parents = [b for b in bases if isinstance(b, ModelBase)]
+ if not parents:
+ # If this isn't a subclass of Model, don't do anything special.
+ return super_new(cls, name, bases, attrs)
+
+ # Create the class.
+ module = attrs.pop('__module__')
+ new_class = super_new(cls, name, bases, {'__module__': module})
+ attr_meta = attrs.pop('Meta', None)
+ abstract = getattr(attr_meta, 'abstract', False)
+ if not attr_meta:
+ meta = getattr(new_class, 'Meta', None)
+ else:
+ meta = attr_meta
+ base_meta = getattr(new_class, '_meta', None)
+
+ if getattr(meta, 'app_label', None) is None:
+ # Figure out the app_label by looking one level up.
+ # For 'django.contrib.sites.models', this would be 'sites'.
+ model_module = sys.modules[new_class.__module__]
+ kwargs = {"app_label": model_module.__name__.split('.')[-2]}
+ else:
+ kwargs = {}
+
+ new_class.add_to_class('_meta', Options(meta, **kwargs))
+ if not abstract:
+ new_class.add_to_class('DoesNotExist', subclass_exception('DoesNotExist',
+ tuple(x.DoesNotExist
+ for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
+ or (ObjectDoesNotExist,), module))
+ new_class.add_to_class('MultipleObjectsReturned', subclass_exception('MultipleObjectsReturned',
+ tuple(x.MultipleObjectsReturned
+ for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
+ or (MultipleObjectsReturned,), module))
+ if base_meta and not base_meta.abstract:
+ # Non-abstract child classes inherit some attributes from their
+ # non-abstract parent (unless an ABC comes before it in the
+ # method resolution order).
+ if not hasattr(meta, 'ordering'):
+ new_class._meta.ordering = base_meta.ordering
+ if not hasattr(meta, 'get_latest_by'):
+ new_class._meta.get_latest_by = base_meta.get_latest_by
+
+ is_proxy = new_class._meta.proxy
+
+ if getattr(new_class, '_default_manager', None):
+ if not is_proxy:
+ # Multi-table inheritance doesn't inherit default manager from
+ # parents.
+ new_class._default_manager = None
+ new_class._base_manager = None
+ else:
+ # Proxy classes do inherit parent's default manager, if none is
+ # set explicitly.
+ new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
+ new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
+
+ # Bail out early if we have already created this class.
+ m = get_model(new_class._meta.app_label, name, False)
+ if m is not None:
+ return m
+
+ # Add all attributes to the class.
+ for obj_name, obj in attrs.items():
+ new_class.add_to_class(obj_name, obj)
+
+ # All the fields of any type declared on this model
+ new_fields = new_class._meta.local_fields + \
+ new_class._meta.local_many_to_many + \
+ new_class._meta.virtual_fields
+ field_names = set([f.name for f in new_fields])
+
+ # Basic setup for proxy models.
+ if is_proxy:
+ base = None
+ for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
+ if parent._meta.abstract:
+ if parent._meta.fields:
+ raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
+ else:
+ continue
+ if base is not None:
+ raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
+ else:
+ base = parent
+ if base is None:
+ raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
+ if (new_class._meta.local_fields or
+ new_class._meta.local_many_to_many):
+ raise FieldError("Proxy model '%s' contains model fields." % name)
+ while base._meta.proxy:
+ base = base._meta.proxy_for_model
+ new_class._meta.setup_proxy(base)
+
+ # Do the appropriate setup for any model parents.
+ o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields
+ if isinstance(f, OneToOneField)])
+
+ for base in parents:
+ original_base = base
+ if not hasattr(base, '_meta'):
+ # Things without _meta aren't functional models, so they're
+ # uninteresting parents.
+ continue
+
+ parent_fields = base._meta.local_fields + base._meta.local_many_to_many
+ # Check for clashes between locally declared fields and those
+ # on the base classes (we cannot handle shadowed fields at the
+ # moment).
+ for field in parent_fields:
+ if field.name in field_names:
+ raise FieldError('Local field %r in class %r clashes '
+ 'with field of similar name from '
+ 'base class %r' %
+ (field.name, name, base.__name__))
+ if not base._meta.abstract:
+ # Concrete classes...
+ while base._meta.proxy:
+ # Skip over a proxy class to the "real" base it proxies.
+ base = base._meta.proxy_for_model
+ if base in o2o_map:
+ field = o2o_map[base]
+ elif not is_proxy:
+ attr_name = '%s_ptr' % base._meta.module_name
+ field = OneToOneField(base, name=attr_name,
+ auto_created=True, parent_link=True)
+ new_class.add_to_class(attr_name, field)
+ else:
+ field = None
+ new_class._meta.parents[base] = field
+ else:
+ # .. and abstract ones.
+ for field in parent_fields:
+ new_class.add_to_class(field.name, copy.deepcopy(field))
+
+ # Pass any non-abstract parent classes onto child.
+ new_class._meta.parents.update(base._meta.parents)
+
+ # Inherit managers from the abstract base classes.
+ new_class.copy_managers(base._meta.abstract_managers)
+
+ # Proxy models inherit the non-abstract managers from their base,
+ # unless they have redefined any of them.
+ if is_proxy:
+ new_class.copy_managers(original_base._meta.concrete_managers)
+
+ # Inherit virtual fields (like GenericForeignKey) from the parent
+ # class
+ for field in base._meta.virtual_fields:
+ if base._meta.abstract and field.name in field_names:
+ raise FieldError('Local field %r in class %r clashes '\
+ 'with field of similar name from '\
+ 'abstract base class %r' % \
+ (field.name, name, base.__name__))
+ new_class.add_to_class(field.name, copy.deepcopy(field))
+
+ if abstract:
+ # Abstract base models can't be instantiated and don't appear in
+ # the list of models for an app. We do the final setup for them a
+ # little differently from normal models.
+ attr_meta.abstract = False
+ new_class.Meta = attr_meta
+ return new_class
+
+ new_class._prepare()
+ register_models(new_class._meta.app_label, new_class)
+
+ # Because of the way imports happen (recursively), we may or may not be
+ # the first time this model tries to register with the framework. There
+ # should only be one class for each model, so we always return the
+ # registered version.
+ return get_model(new_class._meta.app_label, name, False)
+
+ def copy_managers(cls, base_managers):
+ # This is in-place sorting of an Options attribute, but that's fine.
+ base_managers.sort()
+ for _, mgr_name, manager in base_managers:
+ val = getattr(cls, mgr_name, None)
+ if not val or val is manager:
+ new_manager = manager._copy_to_model(cls)
+ cls.add_to_class(mgr_name, new_manager)
+
+ def add_to_class(cls, name, value):
+ if hasattr(value, 'contribute_to_class'):
+ value.contribute_to_class(cls, name)
+ else:
+ setattr(cls, name, value)
+
+ def _prepare(cls):
+ """
+ Creates some methods once self._meta has been populated.
+ """
+ opts = cls._meta
+ opts._prepare(cls)
+
+ if opts.order_with_respect_to:
+ cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
+ cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
+ setattr(opts.order_with_respect_to.rel.to, 'get_%s_order' % cls.__name__.lower(), curry(method_get_order, cls))
+ setattr(opts.order_with_respect_to.rel.to, 'set_%s_order' % cls.__name__.lower(), curry(method_set_order, cls))
+
+ # Give the class a docstring -- its definition.
+ if cls.__doc__ is None:
+ cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields]))
+
+ if hasattr(cls, 'get_absolute_url'):
+ cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),
+ cls.get_absolute_url)
+
+ signals.class_prepared.send(sender=cls)
+
+class ModelState(object):
+ """
+ A class for storing instance state
+ """
+ def __init__(self, db=None):
+ self.db = db
+
+class Model(object):
+ __metaclass__ = ModelBase
+ _deferred = False
+
+ def __init__(self, *args, **kwargs):
+ signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
+
+ # Set up the storage for instance state
+ self._state = ModelState()
+
+ # There is a rather weird disparity here; if kwargs, it's set, then args
+ # overrides it. It should be one or the other; don't duplicate the work
+ # The reason for the kwargs check is that standard iterator passes in by
+ # args, and instantiation for iteration is 33% faster.
+ args_len = len(args)
+ if args_len > len(self._meta.fields):
+ # Daft, but matches old exception sans the err msg.
+ raise IndexError("Number of args exceeds number of fields")
+
+ fields_iter = iter(self._meta.fields)
+ if not kwargs:
+ # The ordering of the izip calls matter - izip throws StopIteration
+ # when an iter throws it. So if the first iter throws it, the second
+ # is *not* consumed. We rely on this, so don't change the order
+ # without changing the logic.
+ for val, field in izip(args, fields_iter):
+ setattr(self, field.attname, val)
+ else:
+ # Slower, kwargs-ready version.
+ for val, field in izip(args, fields_iter):
+ setattr(self, field.attname, val)
+ kwargs.pop(field.name, None)
+ # Maintain compatibility with existing calls.
+ if isinstance(field.rel, ManyToOneRel):
+ kwargs.pop(field.attname, None)
+
+ # Now we're left with the unprocessed fields that *must* come from
+ # keywords, or default.
+
+ for field in fields_iter:
+ is_related_object = False
+ # This slightly odd construct is so that we can access any
+ # data-descriptor object (DeferredAttribute) without triggering its
+ # __get__ method.
+ if (field.attname not in kwargs and
+ isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)):
+ # This field will be populated on request.
+ continue
+ if kwargs:
+ if isinstance(field.rel, ManyToOneRel):
+ try:
+ # Assume object instance was passed in.
+ rel_obj = kwargs.pop(field.name)
+ is_related_object = True
+ except KeyError:
+ try:
+ # Object instance wasn't passed in -- must be an ID.
+ val = kwargs.pop(field.attname)
+ except KeyError:
+ val = field.get_default()
+ else:
+ # Object instance was passed in. Special case: You can
+ # pass in "None" for related objects if it's allowed.
+ if rel_obj is None and field.null:
+ val = None
+ else:
+ try:
+ val = kwargs.pop(field.attname)
+ except KeyError:
+ # This is done with an exception rather than the
+ # default argument on pop because we don't want
+ # get_default() to be evaluated, and then not used.
+ # Refs #12057.
+ val = field.get_default()
+ else:
+ val = field.get_default()
+ if is_related_object:
+ # If we are passed a related instance, set it using the
+ # field.name instead of field.attname (e.g. "user" instead of
+ # "user_id") so that the object gets properly cached (and type
+ # checked) by the RelatedObjectDescriptor.
+ setattr(self, field.name, rel_obj)
+ else:
+ setattr(self, field.attname, val)
+
+ if kwargs:
+ for prop in kwargs.keys():
+ try:
+ if isinstance(getattr(self.__class__, prop), property):
+ setattr(self, prop, kwargs.pop(prop))
+ except AttributeError:
+ pass
+ if kwargs:
+ raise TypeError("'%s' is an invalid keyword argument for this function" % kwargs.keys()[0])
+ signals.post_init.send(sender=self.__class__, instance=self)
+
+ def __repr__(self):
+ try:
+ u = unicode(self)
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ u = '[Bad Unicode data]'
+ return smart_str(u'<%s: %s>' % (self.__class__.__name__, u))
+
+ def __str__(self):
+ if hasattr(self, '__unicode__'):
+ return force_unicode(self).encode('utf-8')
+ return '%s object' % self.__class__.__name__
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val()
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ return hash(self._get_pk_val())
+
+ def __reduce__(self):
+ """
+ Provide pickling support. Normally, this just dispatches to Python's
+ standard handling. However, for models with deferred field loading, we
+ need to do things manually, as they're dynamically created classes and
+ only module-level classes can be pickled by the default path.
+ """
+ data = self.__dict__
+ model = self.__class__
+ # The obvious thing to do here is to invoke super().__reduce__()
+ # for the non-deferred case. Don't do that.
+ # On Python 2.4, there is something wierd with __reduce__,
+ # and as a result, the super call will cause an infinite recursion.
+ # See #10547 and #12121.
+ defers = []
+ pk_val = None
+ if self._deferred:
+ from django.db.models.query_utils import deferred_class_factory
+ factory = deferred_class_factory
+ for field in self._meta.fields:
+ if isinstance(self.__class__.__dict__.get(field.attname),
+ DeferredAttribute):
+ defers.append(field.attname)
+ if pk_val is None:
+ # The pk_val and model values are the same for all
+ # DeferredAttribute classes, so we only need to do this
+ # once.
+ obj = self.__class__.__dict__[field.attname]
+ model = obj.model_ref()
+ else:
+ factory = simple_class_factory
+ return (model_unpickle, (model, defers, factory), data)
+
+ def _get_pk_val(self, meta=None):
+ if not meta:
+ meta = self._meta
+ return getattr(self, meta.pk.attname)
+
+ def _set_pk_val(self, value):
+ return setattr(self, self._meta.pk.attname, value)
+
+ pk = property(_get_pk_val, _set_pk_val)
+
+ def serializable_value(self, field_name):
+ """
+ Returns the value of the field name for this instance. If the field is
+ a foreign key, returns the id value, instead of the object. If there's
+ no Field object with this name on the model, the model attribute's
+ value is returned directly.
+
+ Used to serialize a field's value (in the serializer, or form output,
+ for example). Normally, you would just access the attribute directly
+ and not use this method.
+ """
+ try:
+ field = self._meta.get_field_by_name(field_name)[0]
+ except FieldDoesNotExist:
+ return getattr(self, field_name)
+ return getattr(self, field.attname)
+
+ def save(self, force_insert=False, force_update=False, using=None):
+ """
+ Saves the current instance. Override this in a subclass if you want to
+ control the saving process.
+
+ The 'force_insert' and 'force_update' parameters can be used to insist
+ that the "save" must be an SQL insert or update (or equivalent for
+ non-SQL backends), respectively. Normally, they should not be set.
+ """
+ if force_insert and force_update:
+ raise ValueError("Cannot force both insert and updating in model saving.")
+ self.save_base(using=using, force_insert=force_insert, force_update=force_update)
+
+ save.alters_data = True
+
+ def save_base(self, raw=False, cls=None, origin=None, force_insert=False,
+ force_update=False, using=None):
+ """
+ Does the heavy-lifting involved in saving. Subclasses shouldn't need to
+ override this method. It's separate from save() in order to hide the
+ need for overrides of save() to pass around internal-only parameters
+ ('raw', 'cls', and 'origin').
+ """
+ using = using or router.db_for_write(self.__class__, instance=self)
+ connection = connections[using]
+ assert not (force_insert and force_update)
+ if cls is None:
+ cls = self.__class__
+ meta = cls._meta
+ if not meta.proxy:
+ origin = cls
+ else:
+ meta = cls._meta
+
+ if origin and not meta.auto_created:
+ signals.pre_save.send(sender=origin, instance=self, raw=raw)
+
+ # If we are in a raw save, save the object exactly as presented.
+ # That means that we don't try to be smart about saving attributes
+ # that might have come from the parent class - we just save the
+ # attributes we have been given to the class we have been given.
+ # We also go through this process to defer the save of proxy objects
+ # to their actual underlying model.
+ if not raw or meta.proxy:
+ if meta.proxy:
+ org = cls
+ else:
+ org = None
+ for parent, field in meta.parents.items():
+ # At this point, parent's primary key field may be unknown
+ # (for example, from administration form which doesn't fill
+ # this field). If so, fill it.
+ if field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None:
+ setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
+
+ self.save_base(cls=parent, origin=org, using=using)
+
+ if field:
+ setattr(self, field.attname, self._get_pk_val(parent._meta))
+ if meta.proxy:
+ return
+
+ if not meta.proxy:
+ non_pks = [f for f in meta.local_fields if not f.primary_key]
+
+ # First, try an UPDATE. If that doesn't update anything, do an INSERT.
+ pk_val = self._get_pk_val(meta)
+ pk_set = pk_val is not None
+ record_exists = True
+ manager = cls._base_manager
+ if pk_set:
+ # Determine whether a record with the primary key already exists.
+ if (force_update or (not force_insert and
+ manager.using(using).filter(pk=pk_val).exists())):
+ # It does already exist, so do an UPDATE.
+ if force_update or non_pks:
+ values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks]
+ rows = manager.using(using).filter(pk=pk_val)._update(values)
+ if force_update and not rows:
+ raise DatabaseError("Forced update did not affect any rows.")
+ else:
+ record_exists = False
+ if not pk_set or not record_exists:
+ if meta.order_with_respect_to:
+ # If this is a model with an order_with_respect_to
+ # autopopulate the _order field
+ field = meta.order_with_respect_to
+ order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count()
+ setattr(self, '_order', order_value)
+
+ if not pk_set:
+ if force_update:
+ raise ValueError("Cannot force an update in save() with no primary key.")
+ values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), connection=connection))
+ for f in meta.local_fields if not isinstance(f, AutoField)]
+ else:
+ values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), connection=connection))
+ for f in meta.local_fields]
+
+ record_exists = False
+
+ update_pk = bool(meta.has_auto_field and not pk_set)
+ if values:
+ # Create a new record.
+ result = manager._insert(values, return_id=update_pk, using=using)
+ else:
+ # Create a new record with defaults for everything.
+ result = manager._insert([(meta.pk, connection.ops.pk_default_value())], return_id=update_pk, raw_values=True, using=using)
+
+ if update_pk:
+ setattr(self, meta.pk.attname, result)
+ transaction.commit_unless_managed(using=using)
+
+ # Store the database on which the object was saved
+ self._state.db = using
+
+ # Signal that the save is complete
+ if origin and not meta.auto_created:
+ signals.post_save.send(sender=origin, instance=self,
+ created=(not record_exists), raw=raw)
+
+ save_base.alters_data = True
+
+ def _collect_sub_objects(self, seen_objs, parent=None, nullable=False):
+ """
+ Recursively populates seen_objs with all objects related to this
+ object.
+
+ When done, seen_objs.items() will be in the format:
+ [(model_class, {pk_val: obj, pk_val: obj, ...}),
+ (model_class, {pk_val: obj, pk_val: obj, ...}), ...]
+ """
+ pk_val = self._get_pk_val()
+ if seen_objs.add(self.__class__, pk_val, self,
+ type(parent), parent, nullable):
+ return
+
+ for related in self._meta.get_all_related_objects():
+ rel_opts_name = related.get_accessor_name()
+ if not related.field.rel.multiple:
+ try:
+ sub_obj = getattr(self, rel_opts_name)
+ except ObjectDoesNotExist:
+ pass
+ else:
+ sub_obj._collect_sub_objects(seen_objs, self, related.field.null)
+ else:
+ # To make sure we can access all elements, we can't use the
+ # normal manager on the related object. So we work directly
+ # with the descriptor object.
+ for cls in self.__class__.mro():
+ if rel_opts_name in cls.__dict__:
+ rel_descriptor = cls.__dict__[rel_opts_name]
+ break
+ else:
+ # in the case of a hidden fkey just skip it, it'll get
+ # processed as an m2m
+ if not related.field.rel.is_hidden():
+ raise AssertionError("Should never get here.")
+ else:
+ continue
+ delete_qs = rel_descriptor.delete_manager(self).all()
+ for sub_obj in delete_qs:
+ sub_obj._collect_sub_objects(seen_objs, self, related.field.null)
+
+ for related in self._meta.get_all_related_many_to_many_objects():
+ if related.field.rel.through:
+ db = router.db_for_write(related.field.rel.through.__class__, instance=self)
+ opts = related.field.rel.through._meta
+ reverse_field_name = related.field.m2m_reverse_field_name()
+ nullable = opts.get_field(reverse_field_name).null
+ filters = {reverse_field_name: self}
+ for sub_obj in related.field.rel.through._base_manager.using(db).filter(**filters):
+ sub_obj._collect_sub_objects(seen_objs, self, nullable)
+
+ for f in self._meta.many_to_many:
+ if f.rel.through:
+ db = router.db_for_write(f.rel.through.__class__, instance=self)
+ opts = f.rel.through._meta
+ field_name = f.m2m_field_name()
+ nullable = opts.get_field(field_name).null
+ filters = {field_name: self}
+ for sub_obj in f.rel.through._base_manager.using(db).filter(**filters):
+ sub_obj._collect_sub_objects(seen_objs, self, nullable)
+ else:
+ # m2m-ish but with no through table? GenericRelation: cascade delete
+ for sub_obj in f.value_from_object(self).all():
+ # Generic relations not enforced by db constraints, thus we can set
+ # nullable=True, order does not matter
+ sub_obj._collect_sub_objects(seen_objs, self, True)
+
+ # Handle any ancestors (for the model-inheritance case). We do this by
+ # traversing to the most remote parent classes -- those with no parents
+ # themselves -- and then adding those instances to the collection. That
+ # will include all the child instances down to "self".
+ parent_stack = [p for p in self._meta.parents.values() if p is not None]
+ while parent_stack:
+ link = parent_stack.pop()
+ parent_obj = getattr(self, link.name)
+ if parent_obj._meta.parents:
+ parent_stack.extend(parent_obj._meta.parents.values())
+ continue
+ # At this point, parent_obj is base class (no ancestor models). So
+ # delete it and all its descendents.
+ parent_obj._collect_sub_objects(seen_objs)
+
+ def delete(self, using=None):
+ using = using or router.db_for_write(self.__class__, instance=self)
+ assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
+
+ # Find all the objects than need to be deleted.
+ seen_objs = CollectedObjects()
+ self._collect_sub_objects(seen_objs)
+
+ # Actually delete the objects.
+ delete_objects(seen_objs, using)
+
+ delete.alters_data = True
+
+ def _get_FIELD_display(self, field):
+ value = getattr(self, field.attname)
+ return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True)
+
+ def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
+ op = is_next and 'gt' or 'lt'
+ order = not is_next and '-' or ''
+ param = smart_str(getattr(self, field.attname))
+ q = Q(**{'%s__%s' % (field.name, op): param})
+ q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk})
+ qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
+ try:
+ return qs[0]
+ except IndexError:
+ raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
+
+ def _get_next_or_previous_in_order(self, is_next):
+ cachename = "__%s_order_cache" % is_next
+ if not hasattr(self, cachename):
+ op = is_next and 'gt' or 'lt'
+ order = not is_next and '-_order' or '_order'
+ order_field = self._meta.order_with_respect_to
+ obj = self._default_manager.filter(**{
+ order_field.name: getattr(self, order_field.attname)
+ }).filter(**{
+ '_order__%s' % op: self._default_manager.values('_order').filter(**{
+ self._meta.pk.name: self.pk
+ })
+ }).order_by(order)[:1].get()
+ setattr(self, cachename, obj)
+ return getattr(self, cachename)
+
+ def prepare_database_save(self, unused):
+ return self.pk
+
+ def clean(self):
+ """
+ Hook for doing any extra model-wide validation after clean() has been
+ called on every field by self.clean_fields. Any ValidationError raised
+ by this method will not be associated with a particular field; it will
+ have a special-case association with the field defined by NON_FIELD_ERRORS.
+ """
+ pass
+
+ def validate_unique(self, exclude=None):
+ """
+ Checks unique constraints on the model and raises ``ValidationError``
+ if any failed.
+ """
+ unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
+
+ errors = self._perform_unique_checks(unique_checks)
+ date_errors = self._perform_date_checks(date_checks)
+
+ for k, v in date_errors.items():
+ errors.setdefault(k, []).extend(v)
+
+ if errors:
+ raise ValidationError(errors)
+
+ def _get_unique_checks(self, exclude=None):
+ """
+ Gather a list of checks to perform. Since validate_unique could be
+ called from a ModelForm, some fields may have been excluded; we can't
+ perform a unique check on a model that is missing fields involved
+ in that check.
+ Fields that did not validate should also be exluded, but they need
+ to be passed in via the exclude argument.
+ """
+ if exclude is None:
+ exclude = []
+ unique_checks = []
+
+ unique_togethers = [(self.__class__, self._meta.unique_together)]
+ for parent_class in self._meta.parents.keys():
+ if parent_class._meta.unique_together:
+ unique_togethers.append((parent_class, parent_class._meta.unique_together))
+
+ for model_class, unique_together in unique_togethers:
+ for check in unique_together:
+ for name in check:
+ # If this is an excluded field, don't add this check.
+ if name in exclude:
+ break
+ else:
+ unique_checks.append((model_class, tuple(check)))
+
+ # These are checks for the unique_for_<date/year/month>.
+ date_checks = []
+
+ # Gather a list of checks for fields declared as unique and add them to
+ # the list of checks.
+
+ fields_with_class = [(self.__class__, self._meta.local_fields)]
+ for parent_class in self._meta.parents.keys():
+ fields_with_class.append((parent_class, parent_class._meta.local_fields))
+
+ for model_class, fields in fields_with_class:
+ for f in fields:
+ name = f.name
+ if name in exclude:
+ continue
+ if f.unique:
+ unique_checks.append((model_class, (name,)))
+ if f.unique_for_date:
+ date_checks.append((model_class, 'date', name, f.unique_for_date))
+ if f.unique_for_year:
+ date_checks.append((model_class, 'year', name, f.unique_for_year))
+ if f.unique_for_month:
+ date_checks.append((model_class, 'month', name, f.unique_for_month))
+ return unique_checks, date_checks
+
+ def _perform_unique_checks(self, unique_checks):
+ errors = {}
+
+ for model_class, unique_check in unique_checks:
+ # Try to look up an existing object with the same values as this
+ # object's values for all the unique field.
+
+ lookup_kwargs = {}
+ for field_name in unique_check:
+ f = self._meta.get_field(field_name)
+ lookup_value = getattr(self, f.attname)
+ if lookup_value is None:
+ # no value, skip the lookup
+ continue
+ if f.primary_key and not getattr(self, '_adding', False):
+ # no need to check for unique primary key when editing
+ continue
+ lookup_kwargs[str(field_name)] = lookup_value
+
+ # some fields were skipped, no reason to do the check
+ if len(unique_check) != len(lookup_kwargs.keys()):
+ continue
+
+ qs = model_class._default_manager.filter(**lookup_kwargs)
+
+ # Exclude the current object from the query if we are editing an
+ # instance (as opposed to creating a new one)
+ if not getattr(self, '_adding', False) and self.pk is not None:
+ qs = qs.exclude(pk=self.pk)
+
+ if qs.exists():
+ if len(unique_check) == 1:
+ key = unique_check[0]
+ else:
+ key = NON_FIELD_ERRORS
+ errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
+
+ return errors
+
+ def _perform_date_checks(self, date_checks):
+ errors = {}
+ for model_class, lookup_type, field, unique_for in date_checks:
+ lookup_kwargs = {}
+ # there's a ticket to add a date lookup, we can remove this special
+ # case if that makes it's way in
+ date = getattr(self, unique_for)
+ if lookup_type == 'date':
+ lookup_kwargs['%s__day' % unique_for] = date.day
+ lookup_kwargs['%s__month' % unique_for] = date.month
+ lookup_kwargs['%s__year' % unique_for] = date.year
+ else:
+ lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
+ lookup_kwargs[field] = getattr(self, field)
+
+ qs = model_class._default_manager.filter(**lookup_kwargs)
+ # Exclude the current object from the query if we are editing an
+ # instance (as opposed to creating a new one)
+ if not getattr(self, '_adding', False) and self.pk is not None:
+ qs = qs.exclude(pk=self.pk)
+
+ if qs.exists():
+ errors.setdefault(field, []).append(
+ self.date_error_message(lookup_type, field, unique_for)
+ )
+ return errors
+
+ def date_error_message(self, lookup_type, field, unique_for):
+ opts = self._meta
+ return _(u"%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
+ 'field_name': unicode(capfirst(opts.get_field(field).verbose_name)),
+ 'date_field': unicode(capfirst(opts.get_field(unique_for).verbose_name)),
+ 'lookup': lookup_type,
+ }
+
+ def unique_error_message(self, model_class, unique_check):
+ opts = model_class._meta
+ model_name = capfirst(opts.verbose_name)
+
+ # A unique field
+ if len(unique_check) == 1:
+ field_name = unique_check[0]
+ field_label = capfirst(opts.get_field(field_name).verbose_name)
+ # Insert the error into the error dict, very sneaky
+ return _(u"%(model_name)s with this %(field_label)s already exists.") % {
+ 'model_name': unicode(model_name),
+ 'field_label': unicode(field_label)
+ }
+ # unique_together
+ else:
+ field_labels = map(lambda f: capfirst(opts.get_field(f).verbose_name), unique_check)
+ field_labels = get_text_list(field_labels, _('and'))
+ return _(u"%(model_name)s with this %(field_label)s already exists.") % {
+ 'model_name': unicode(model_name),
+ 'field_label': unicode(field_labels)
+ }
+
+ def full_clean(self, exclude=None):
+ """
+ Calls clean_fields, clean, and validate_unique, on the model,
+ and raises a ``ValidationError`` for any errors that occured.
+ """
+ errors = {}
+ if exclude is None:
+ exclude = []
+
+ try:
+ self.clean_fields(exclude=exclude)
+ except ValidationError, e:
+ errors = e.update_error_dict(errors)
+
+ # Form.clean() is run even if other validation fails, so do the
+ # same with Model.clean() for consistency.
+ try:
+ self.clean()
+ except ValidationError, e:
+ errors = e.update_error_dict(errors)
+
+ # Run unique checks, but only for fields that passed validation.
+ for name in errors.keys():
+ if name != NON_FIELD_ERRORS and name not in exclude:
+ exclude.append(name)
+ try:
+ self.validate_unique(exclude=exclude)
+ except ValidationError, e:
+ errors = e.update_error_dict(errors)
+
+ if errors:
+ raise ValidationError(errors)
+
+ def clean_fields(self, exclude=None):
+ """
+ Cleans all fields and raises a ValidationError containing message_dict
+ of all validation errors if any occur.
+ """
+ if exclude is None:
+ exclude = []
+
+ errors = {}
+ for f in self._meta.fields:
+ if f.name in exclude:
+ continue
+ # Skip validation for empty fields with blank=True. The developer
+ # is responsible for making sure they have a valid value.
+ raw_value = getattr(self, f.attname)
+ if f.blank and raw_value in validators.EMPTY_VALUES:
+ continue
+ try:
+ setattr(self, f.attname, f.clean(raw_value, self))
+ except ValidationError, e:
+ errors[f.name] = e.messages
+
+ if errors:
+ raise ValidationError(errors)
+
+
+############################################
+# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
+############################################
+
+# ORDERING METHODS #########################
+
+def method_set_order(ordered_obj, self, id_list, using=None):
+ if using is None:
+ using = DEFAULT_DB_ALIAS
+ rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
+ order_name = ordered_obj._meta.order_with_respect_to.name
+ # FIXME: It would be nice if there was an "update many" version of update
+ # for situations like this.
+ for i, j in enumerate(id_list):
+ ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
+ transaction.commit_unless_managed(using=using)
+
+
+def method_get_order(ordered_obj, self):
+ rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
+ order_name = ordered_obj._meta.order_with_respect_to.name
+ pk_name = ordered_obj._meta.pk.name
+ return [r[pk_name] for r in
+ ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
+
+
+##############################################
+# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
+##############################################
+
+def get_absolute_url(opts, func, self, *args, **kwargs):
+ return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self, *args, **kwargs)
+
+
+########
+# MISC #
+########
+
+class Empty(object):
+ pass
+
+def simple_class_factory(model, attrs):
+ """Used to unpickle Models without deferred fields.
+
+ We need to do this the hard way, rather than just using
+ the default __reduce__ implementation, because of a
+ __deepcopy__ problem in Python 2.4
+ """
+ return model
+
+def model_unpickle(model, attrs, factory):
+ """
+ Used to unpickle Model subclasses with deferred fields.
+ """
+ cls = factory(model, attrs)
+ return cls.__new__(cls)
+model_unpickle.__safe_for_unpickle__ = True
+
+if sys.version_info < (2, 5):
+ # Prior to Python 2.5, Exception was an old-style class
+ def subclass_exception(name, parents, unused):
+ return types.ClassType(name, parents, {})
+else:
+ def subclass_exception(name, parents, module):
+ return type(name, parents, {'__module__': module})