|
0
|
1 |
import types |
|
|
2 |
import sys |
|
|
3 |
import os |
|
|
4 |
from itertools import izip |
|
|
5 |
import django.db.models.manager # Imported to register signal handler. |
|
29
|
6 |
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS |
|
|
7 |
from django.core import validators |
|
0
|
8 |
from django.db.models.fields import AutoField, FieldDoesNotExist |
|
|
9 |
from django.db.models.fields.related import OneToOneRel, ManyToOneRel, OneToOneField |
|
|
10 |
from django.db.models.query import delete_objects, Q |
|
|
11 |
from django.db.models.query_utils import CollectedObjects, DeferredAttribute |
|
|
12 |
from django.db.models.options import Options |
|
29
|
13 |
from django.db import connections, router, transaction, DatabaseError, DEFAULT_DB_ALIAS |
|
0
|
14 |
from django.db.models import signals |
|
|
15 |
from django.db.models.loading import register_models, get_model |
|
29
|
16 |
from django.utils.translation import ugettext_lazy as _ |
|
|
17 |
import django.utils.copycompat as copy |
|
|
18 |
from django.utils.functional import curry, update_wrapper |
|
0
|
19 |
from django.utils.encoding import smart_str, force_unicode, smart_unicode |
|
29
|
20 |
from django.utils.text import get_text_list, capfirst |
|
0
|
21 |
from django.conf import settings |
|
|
22 |
|
|
|
23 |
class ModelBase(type): |
|
|
24 |
""" |
|
|
25 |
Metaclass for all models. |
|
|
26 |
""" |
|
|
27 |
def __new__(cls, name, bases, attrs): |
|
|
28 |
super_new = super(ModelBase, cls).__new__ |
|
|
29 |
parents = [b for b in bases if isinstance(b, ModelBase)] |
|
|
30 |
if not parents: |
|
|
31 |
# If this isn't a subclass of Model, don't do anything special. |
|
|
32 |
return super_new(cls, name, bases, attrs) |
|
|
33 |
|
|
|
34 |
# Create the class. |
|
|
35 |
module = attrs.pop('__module__') |
|
|
36 |
new_class = super_new(cls, name, bases, {'__module__': module}) |
|
|
37 |
attr_meta = attrs.pop('Meta', None) |
|
|
38 |
abstract = getattr(attr_meta, 'abstract', False) |
|
|
39 |
if not attr_meta: |
|
|
40 |
meta = getattr(new_class, 'Meta', None) |
|
|
41 |
else: |
|
|
42 |
meta = attr_meta |
|
|
43 |
base_meta = getattr(new_class, '_meta', None) |
|
|
44 |
|
|
|
45 |
if getattr(meta, 'app_label', None) is None: |
|
|
46 |
# Figure out the app_label by looking one level up. |
|
|
47 |
# For 'django.contrib.sites.models', this would be 'sites'. |
|
|
48 |
model_module = sys.modules[new_class.__module__] |
|
|
49 |
kwargs = {"app_label": model_module.__name__.split('.')[-2]} |
|
|
50 |
else: |
|
|
51 |
kwargs = {} |
|
|
52 |
|
|
|
53 |
new_class.add_to_class('_meta', Options(meta, **kwargs)) |
|
|
54 |
if not abstract: |
|
29
|
55 |
new_class.add_to_class('DoesNotExist', subclass_exception('DoesNotExist', |
|
|
56 |
tuple(x.DoesNotExist |
|
|
57 |
for x in parents if hasattr(x, '_meta') and not x._meta.abstract) |
|
|
58 |
or (ObjectDoesNotExist,), module)) |
|
|
59 |
new_class.add_to_class('MultipleObjectsReturned', subclass_exception('MultipleObjectsReturned', |
|
|
60 |
tuple(x.MultipleObjectsReturned |
|
|
61 |
for x in parents if hasattr(x, '_meta') and not x._meta.abstract) |
|
|
62 |
or (MultipleObjectsReturned,), module)) |
|
0
|
63 |
if base_meta and not base_meta.abstract: |
|
|
64 |
# Non-abstract child classes inherit some attributes from their |
|
|
65 |
# non-abstract parent (unless an ABC comes before it in the |
|
|
66 |
# method resolution order). |
|
|
67 |
if not hasattr(meta, 'ordering'): |
|
|
68 |
new_class._meta.ordering = base_meta.ordering |
|
|
69 |
if not hasattr(meta, 'get_latest_by'): |
|
|
70 |
new_class._meta.get_latest_by = base_meta.get_latest_by |
|
|
71 |
|
|
|
72 |
is_proxy = new_class._meta.proxy |
|
|
73 |
|
|
|
74 |
if getattr(new_class, '_default_manager', None): |
|
|
75 |
if not is_proxy: |
|
|
76 |
# Multi-table inheritance doesn't inherit default manager from |
|
|
77 |
# parents. |
|
|
78 |
new_class._default_manager = None |
|
|
79 |
new_class._base_manager = None |
|
|
80 |
else: |
|
|
81 |
# Proxy classes do inherit parent's default manager, if none is |
|
|
82 |
# set explicitly. |
|
|
83 |
new_class._default_manager = new_class._default_manager._copy_to_model(new_class) |
|
|
84 |
new_class._base_manager = new_class._base_manager._copy_to_model(new_class) |
|
|
85 |
|
|
|
86 |
# Bail out early if we have already created this class. |
|
|
87 |
m = get_model(new_class._meta.app_label, name, False) |
|
|
88 |
if m is not None: |
|
|
89 |
return m |
|
|
90 |
|
|
|
91 |
# Add all attributes to the class. |
|
|
92 |
for obj_name, obj in attrs.items(): |
|
|
93 |
new_class.add_to_class(obj_name, obj) |
|
|
94 |
|
|
|
95 |
# All the fields of any type declared on this model |
|
|
96 |
new_fields = new_class._meta.local_fields + \ |
|
|
97 |
new_class._meta.local_many_to_many + \ |
|
|
98 |
new_class._meta.virtual_fields |
|
|
99 |
field_names = set([f.name for f in new_fields]) |
|
|
100 |
|
|
|
101 |
# Basic setup for proxy models. |
|
|
102 |
if is_proxy: |
|
|
103 |
base = None |
|
|
104 |
for parent in [cls for cls in parents if hasattr(cls, '_meta')]: |
|
|
105 |
if parent._meta.abstract: |
|
|
106 |
if parent._meta.fields: |
|
|
107 |
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name) |
|
|
108 |
else: |
|
|
109 |
continue |
|
|
110 |
if base is not None: |
|
|
111 |
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) |
|
|
112 |
else: |
|
|
113 |
base = parent |
|
|
114 |
if base is None: |
|
|
115 |
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) |
|
|
116 |
if (new_class._meta.local_fields or |
|
|
117 |
new_class._meta.local_many_to_many): |
|
29
|
118 |
raise FieldError("Proxy model '%s' contains model fields." % name) |
|
0
|
119 |
while base._meta.proxy: |
|
|
120 |
base = base._meta.proxy_for_model |
|
|
121 |
new_class._meta.setup_proxy(base) |
|
|
122 |
|
|
|
123 |
# Do the appropriate setup for any model parents. |
|
|
124 |
o2o_map = dict([(f.rel.to, f) for f in new_class._meta.local_fields |
|
|
125 |
if isinstance(f, OneToOneField)]) |
|
|
126 |
|
|
|
127 |
for base in parents: |
|
|
128 |
original_base = base |
|
|
129 |
if not hasattr(base, '_meta'): |
|
|
130 |
# Things without _meta aren't functional models, so they're |
|
|
131 |
# uninteresting parents. |
|
|
132 |
continue |
|
|
133 |
|
|
|
134 |
parent_fields = base._meta.local_fields + base._meta.local_many_to_many |
|
|
135 |
# Check for clashes between locally declared fields and those |
|
|
136 |
# on the base classes (we cannot handle shadowed fields at the |
|
|
137 |
# moment). |
|
|
138 |
for field in parent_fields: |
|
|
139 |
if field.name in field_names: |
|
|
140 |
raise FieldError('Local field %r in class %r clashes ' |
|
|
141 |
'with field of similar name from ' |
|
|
142 |
'base class %r' % |
|
|
143 |
(field.name, name, base.__name__)) |
|
|
144 |
if not base._meta.abstract: |
|
|
145 |
# Concrete classes... |
|
|
146 |
while base._meta.proxy: |
|
|
147 |
# Skip over a proxy class to the "real" base it proxies. |
|
|
148 |
base = base._meta.proxy_for_model |
|
|
149 |
if base in o2o_map: |
|
|
150 |
field = o2o_map[base] |
|
|
151 |
elif not is_proxy: |
|
|
152 |
attr_name = '%s_ptr' % base._meta.module_name |
|
|
153 |
field = OneToOneField(base, name=attr_name, |
|
|
154 |
auto_created=True, parent_link=True) |
|
|
155 |
new_class.add_to_class(attr_name, field) |
|
|
156 |
else: |
|
|
157 |
field = None |
|
|
158 |
new_class._meta.parents[base] = field |
|
|
159 |
else: |
|
|
160 |
# .. and abstract ones. |
|
|
161 |
for field in parent_fields: |
|
|
162 |
new_class.add_to_class(field.name, copy.deepcopy(field)) |
|
|
163 |
|
|
|
164 |
# Pass any non-abstract parent classes onto child. |
|
|
165 |
new_class._meta.parents.update(base._meta.parents) |
|
|
166 |
|
|
|
167 |
# Inherit managers from the abstract base classes. |
|
|
168 |
new_class.copy_managers(base._meta.abstract_managers) |
|
|
169 |
|
|
|
170 |
# Proxy models inherit the non-abstract managers from their base, |
|
|
171 |
# unless they have redefined any of them. |
|
|
172 |
if is_proxy: |
|
|
173 |
new_class.copy_managers(original_base._meta.concrete_managers) |
|
|
174 |
|
|
|
175 |
# Inherit virtual fields (like GenericForeignKey) from the parent |
|
|
176 |
# class |
|
|
177 |
for field in base._meta.virtual_fields: |
|
|
178 |
if base._meta.abstract and field.name in field_names: |
|
|
179 |
raise FieldError('Local field %r in class %r clashes '\ |
|
|
180 |
'with field of similar name from '\ |
|
|
181 |
'abstract base class %r' % \ |
|
|
182 |
(field.name, name, base.__name__)) |
|
|
183 |
new_class.add_to_class(field.name, copy.deepcopy(field)) |
|
|
184 |
|
|
|
185 |
if abstract: |
|
|
186 |
# Abstract base models can't be instantiated and don't appear in |
|
|
187 |
# the list of models for an app. We do the final setup for them a |
|
|
188 |
# little differently from normal models. |
|
|
189 |
attr_meta.abstract = False |
|
|
190 |
new_class.Meta = attr_meta |
|
|
191 |
return new_class |
|
|
192 |
|
|
|
193 |
new_class._prepare() |
|
|
194 |
register_models(new_class._meta.app_label, new_class) |
|
|
195 |
|
|
|
196 |
# Because of the way imports happen (recursively), we may or may not be |
|
|
197 |
# the first time this model tries to register with the framework. There |
|
|
198 |
# should only be one class for each model, so we always return the |
|
|
199 |
# registered version. |
|
|
200 |
return get_model(new_class._meta.app_label, name, False) |
|
|
201 |
|
|
|
202 |
def copy_managers(cls, base_managers): |
|
|
203 |
# This is in-place sorting of an Options attribute, but that's fine. |
|
|
204 |
base_managers.sort() |
|
|
205 |
for _, mgr_name, manager in base_managers: |
|
|
206 |
val = getattr(cls, mgr_name, None) |
|
|
207 |
if not val or val is manager: |
|
|
208 |
new_manager = manager._copy_to_model(cls) |
|
|
209 |
cls.add_to_class(mgr_name, new_manager) |
|
|
210 |
|
|
|
211 |
def add_to_class(cls, name, value): |
|
|
212 |
if hasattr(value, 'contribute_to_class'): |
|
|
213 |
value.contribute_to_class(cls, name) |
|
|
214 |
else: |
|
|
215 |
setattr(cls, name, value) |
|
|
216 |
|
|
|
217 |
def _prepare(cls): |
|
|
218 |
""" |
|
|
219 |
Creates some methods once self._meta has been populated. |
|
|
220 |
""" |
|
|
221 |
opts = cls._meta |
|
|
222 |
opts._prepare(cls) |
|
|
223 |
|
|
|
224 |
if opts.order_with_respect_to: |
|
|
225 |
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True) |
|
|
226 |
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False) |
|
|
227 |
setattr(opts.order_with_respect_to.rel.to, 'get_%s_order' % cls.__name__.lower(), curry(method_get_order, cls)) |
|
|
228 |
setattr(opts.order_with_respect_to.rel.to, 'set_%s_order' % cls.__name__.lower(), curry(method_set_order, cls)) |
|
|
229 |
|
|
|
230 |
# Give the class a docstring -- its definition. |
|
|
231 |
if cls.__doc__ is None: |
|
|
232 |
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields])) |
|
|
233 |
|
|
|
234 |
if hasattr(cls, 'get_absolute_url'): |
|
29
|
235 |
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url), |
|
|
236 |
cls.get_absolute_url) |
|
0
|
237 |
|
|
|
238 |
signals.class_prepared.send(sender=cls) |
|
|
239 |
|
|
29
|
240 |
class ModelState(object): |
|
|
241 |
""" |
|
|
242 |
A class for storing instance state |
|
|
243 |
""" |
|
|
244 |
def __init__(self, db=None): |
|
|
245 |
self.db = db |
|
0
|
246 |
|
|
|
247 |
class Model(object): |
|
|
248 |
__metaclass__ = ModelBase |
|
|
249 |
_deferred = False |
|
|
250 |
|
|
|
251 |
def __init__(self, *args, **kwargs): |
|
|
252 |
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) |
|
|
253 |
|
|
29
|
254 |
# Set up the storage for instance state |
|
|
255 |
self._state = ModelState() |
|
|
256 |
|
|
0
|
257 |
# There is a rather weird disparity here; if kwargs, it's set, then args |
|
|
258 |
# overrides it. It should be one or the other; don't duplicate the work |
|
|
259 |
# The reason for the kwargs check is that standard iterator passes in by |
|
|
260 |
# args, and instantiation for iteration is 33% faster. |
|
|
261 |
args_len = len(args) |
|
|
262 |
if args_len > len(self._meta.fields): |
|
|
263 |
# Daft, but matches old exception sans the err msg. |
|
|
264 |
raise IndexError("Number of args exceeds number of fields") |
|
|
265 |
|
|
|
266 |
fields_iter = iter(self._meta.fields) |
|
|
267 |
if not kwargs: |
|
|
268 |
# The ordering of the izip calls matter - izip throws StopIteration |
|
|
269 |
# when an iter throws it. So if the first iter throws it, the second |
|
|
270 |
# is *not* consumed. We rely on this, so don't change the order |
|
|
271 |
# without changing the logic. |
|
|
272 |
for val, field in izip(args, fields_iter): |
|
|
273 |
setattr(self, field.attname, val) |
|
|
274 |
else: |
|
|
275 |
# Slower, kwargs-ready version. |
|
|
276 |
for val, field in izip(args, fields_iter): |
|
|
277 |
setattr(self, field.attname, val) |
|
|
278 |
kwargs.pop(field.name, None) |
|
|
279 |
# Maintain compatibility with existing calls. |
|
|
280 |
if isinstance(field.rel, ManyToOneRel): |
|
|
281 |
kwargs.pop(field.attname, None) |
|
|
282 |
|
|
|
283 |
# Now we're left with the unprocessed fields that *must* come from |
|
|
284 |
# keywords, or default. |
|
|
285 |
|
|
|
286 |
for field in fields_iter: |
|
|
287 |
is_related_object = False |
|
|
288 |
# This slightly odd construct is so that we can access any |
|
|
289 |
# data-descriptor object (DeferredAttribute) without triggering its |
|
|
290 |
# __get__ method. |
|
|
291 |
if (field.attname not in kwargs and |
|
|
292 |
isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)): |
|
|
293 |
# This field will be populated on request. |
|
|
294 |
continue |
|
|
295 |
if kwargs: |
|
|
296 |
if isinstance(field.rel, ManyToOneRel): |
|
|
297 |
try: |
|
|
298 |
# Assume object instance was passed in. |
|
|
299 |
rel_obj = kwargs.pop(field.name) |
|
|
300 |
is_related_object = True |
|
|
301 |
except KeyError: |
|
|
302 |
try: |
|
|
303 |
# Object instance wasn't passed in -- must be an ID. |
|
|
304 |
val = kwargs.pop(field.attname) |
|
|
305 |
except KeyError: |
|
|
306 |
val = field.get_default() |
|
|
307 |
else: |
|
|
308 |
# Object instance was passed in. Special case: You can |
|
|
309 |
# pass in "None" for related objects if it's allowed. |
|
|
310 |
if rel_obj is None and field.null: |
|
|
311 |
val = None |
|
|
312 |
else: |
|
29
|
313 |
try: |
|
|
314 |
val = kwargs.pop(field.attname) |
|
|
315 |
except KeyError: |
|
|
316 |
# This is done with an exception rather than the |
|
|
317 |
# default argument on pop because we don't want |
|
|
318 |
# get_default() to be evaluated, and then not used. |
|
|
319 |
# Refs #12057. |
|
|
320 |
val = field.get_default() |
|
0
|
321 |
else: |
|
|
322 |
val = field.get_default() |
|
|
323 |
if is_related_object: |
|
|
324 |
# If we are passed a related instance, set it using the |
|
|
325 |
# field.name instead of field.attname (e.g. "user" instead of |
|
|
326 |
# "user_id") so that the object gets properly cached (and type |
|
|
327 |
# checked) by the RelatedObjectDescriptor. |
|
|
328 |
setattr(self, field.name, rel_obj) |
|
|
329 |
else: |
|
|
330 |
setattr(self, field.attname, val) |
|
|
331 |
|
|
|
332 |
if kwargs: |
|
|
333 |
for prop in kwargs.keys(): |
|
|
334 |
try: |
|
|
335 |
if isinstance(getattr(self.__class__, prop), property): |
|
|
336 |
setattr(self, prop, kwargs.pop(prop)) |
|
|
337 |
except AttributeError: |
|
|
338 |
pass |
|
|
339 |
if kwargs: |
|
29
|
340 |
raise TypeError("'%s' is an invalid keyword argument for this function" % kwargs.keys()[0]) |
|
0
|
341 |
signals.post_init.send(sender=self.__class__, instance=self) |
|
|
342 |
|
|
|
343 |
def __repr__(self): |
|
|
344 |
try: |
|
|
345 |
u = unicode(self) |
|
|
346 |
except (UnicodeEncodeError, UnicodeDecodeError): |
|
|
347 |
u = '[Bad Unicode data]' |
|
|
348 |
return smart_str(u'<%s: %s>' % (self.__class__.__name__, u)) |
|
|
349 |
|
|
|
350 |
def __str__(self): |
|
|
351 |
if hasattr(self, '__unicode__'): |
|
|
352 |
return force_unicode(self).encode('utf-8') |
|
|
353 |
return '%s object' % self.__class__.__name__ |
|
|
354 |
|
|
|
355 |
def __eq__(self, other): |
|
|
356 |
return isinstance(other, self.__class__) and self._get_pk_val() == other._get_pk_val() |
|
|
357 |
|
|
|
358 |
def __ne__(self, other): |
|
|
359 |
return not self.__eq__(other) |
|
|
360 |
|
|
|
361 |
def __hash__(self): |
|
|
362 |
return hash(self._get_pk_val()) |
|
|
363 |
|
|
|
364 |
def __reduce__(self): |
|
|
365 |
""" |
|
|
366 |
Provide pickling support. Normally, this just dispatches to Python's |
|
|
367 |
standard handling. However, for models with deferred field loading, we |
|
|
368 |
need to do things manually, as they're dynamically created classes and |
|
|
369 |
only module-level classes can be pickled by the default path. |
|
|
370 |
""" |
|
|
371 |
data = self.__dict__ |
|
29
|
372 |
model = self.__class__ |
|
|
373 |
# The obvious thing to do here is to invoke super().__reduce__() |
|
|
374 |
# for the non-deferred case. Don't do that. |
|
|
375 |
# On Python 2.4, there is something wierd with __reduce__, |
|
|
376 |
# and as a result, the super call will cause an infinite recursion. |
|
|
377 |
# See #10547 and #12121. |
|
0
|
378 |
defers = [] |
|
|
379 |
pk_val = None |
|
29
|
380 |
if self._deferred: |
|
|
381 |
from django.db.models.query_utils import deferred_class_factory |
|
|
382 |
factory = deferred_class_factory |
|
|
383 |
for field in self._meta.fields: |
|
|
384 |
if isinstance(self.__class__.__dict__.get(field.attname), |
|
|
385 |
DeferredAttribute): |
|
|
386 |
defers.append(field.attname) |
|
|
387 |
if pk_val is None: |
|
|
388 |
# The pk_val and model values are the same for all |
|
|
389 |
# DeferredAttribute classes, so we only need to do this |
|
|
390 |
# once. |
|
|
391 |
obj = self.__class__.__dict__[field.attname] |
|
|
392 |
model = obj.model_ref() |
|
|
393 |
else: |
|
|
394 |
factory = simple_class_factory |
|
|
395 |
return (model_unpickle, (model, defers, factory), data) |
|
0
|
396 |
|
|
|
397 |
def _get_pk_val(self, meta=None): |
|
|
398 |
if not meta: |
|
|
399 |
meta = self._meta |
|
|
400 |
return getattr(self, meta.pk.attname) |
|
|
401 |
|
|
|
402 |
def _set_pk_val(self, value): |
|
|
403 |
return setattr(self, self._meta.pk.attname, value) |
|
|
404 |
|
|
|
405 |
pk = property(_get_pk_val, _set_pk_val) |
|
|
406 |
|
|
|
407 |
def serializable_value(self, field_name): |
|
|
408 |
""" |
|
|
409 |
Returns the value of the field name for this instance. If the field is |
|
|
410 |
a foreign key, returns the id value, instead of the object. If there's |
|
|
411 |
no Field object with this name on the model, the model attribute's |
|
|
412 |
value is returned directly. |
|
|
413 |
|
|
|
414 |
Used to serialize a field's value (in the serializer, or form output, |
|
|
415 |
for example). Normally, you would just access the attribute directly |
|
|
416 |
and not use this method. |
|
|
417 |
""" |
|
|
418 |
try: |
|
|
419 |
field = self._meta.get_field_by_name(field_name)[0] |
|
|
420 |
except FieldDoesNotExist: |
|
|
421 |
return getattr(self, field_name) |
|
|
422 |
return getattr(self, field.attname) |
|
|
423 |
|
|
29
|
424 |
def save(self, force_insert=False, force_update=False, using=None): |
|
0
|
425 |
""" |
|
|
426 |
Saves the current instance. Override this in a subclass if you want to |
|
|
427 |
control the saving process. |
|
|
428 |
|
|
|
429 |
The 'force_insert' and 'force_update' parameters can be used to insist |
|
|
430 |
that the "save" must be an SQL insert or update (or equivalent for |
|
|
431 |
non-SQL backends), respectively. Normally, they should not be set. |
|
|
432 |
""" |
|
|
433 |
if force_insert and force_update: |
|
29
|
434 |
raise ValueError("Cannot force both insert and updating in model saving.") |
|
|
435 |
self.save_base(using=using, force_insert=force_insert, force_update=force_update) |
|
0
|
436 |
|
|
|
437 |
save.alters_data = True |
|
|
438 |
|
|
29
|
439 |
def save_base(self, raw=False, cls=None, origin=None, force_insert=False, |
|
|
440 |
force_update=False, using=None): |
|
0
|
441 |
""" |
|
|
442 |
Does the heavy-lifting involved in saving. Subclasses shouldn't need to |
|
|
443 |
override this method. It's separate from save() in order to hide the |
|
|
444 |
need for overrides of save() to pass around internal-only parameters |
|
|
445 |
('raw', 'cls', and 'origin'). |
|
|
446 |
""" |
|
29
|
447 |
using = using or router.db_for_write(self.__class__, instance=self) |
|
|
448 |
connection = connections[using] |
|
0
|
449 |
assert not (force_insert and force_update) |
|
|
450 |
if cls is None: |
|
|
451 |
cls = self.__class__ |
|
|
452 |
meta = cls._meta |
|
|
453 |
if not meta.proxy: |
|
|
454 |
origin = cls |
|
|
455 |
else: |
|
|
456 |
meta = cls._meta |
|
|
457 |
|
|
29
|
458 |
if origin and not meta.auto_created: |
|
0
|
459 |
signals.pre_save.send(sender=origin, instance=self, raw=raw) |
|
|
460 |
|
|
|
461 |
# If we are in a raw save, save the object exactly as presented. |
|
|
462 |
# That means that we don't try to be smart about saving attributes |
|
|
463 |
# that might have come from the parent class - we just save the |
|
|
464 |
# attributes we have been given to the class we have been given. |
|
|
465 |
# We also go through this process to defer the save of proxy objects |
|
|
466 |
# to their actual underlying model. |
|
|
467 |
if not raw or meta.proxy: |
|
|
468 |
if meta.proxy: |
|
|
469 |
org = cls |
|
|
470 |
else: |
|
|
471 |
org = None |
|
|
472 |
for parent, field in meta.parents.items(): |
|
|
473 |
# At this point, parent's primary key field may be unknown |
|
|
474 |
# (for example, from administration form which doesn't fill |
|
|
475 |
# this field). If so, fill it. |
|
|
476 |
if field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None: |
|
|
477 |
setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) |
|
|
478 |
|
|
29
|
479 |
self.save_base(cls=parent, origin=org, using=using) |
|
0
|
480 |
|
|
|
481 |
if field: |
|
|
482 |
setattr(self, field.attname, self._get_pk_val(parent._meta)) |
|
|
483 |
if meta.proxy: |
|
|
484 |
return |
|
|
485 |
|
|
|
486 |
if not meta.proxy: |
|
|
487 |
non_pks = [f for f in meta.local_fields if not f.primary_key] |
|
|
488 |
|
|
|
489 |
# First, try an UPDATE. If that doesn't update anything, do an INSERT. |
|
|
490 |
pk_val = self._get_pk_val(meta) |
|
|
491 |
pk_set = pk_val is not None |
|
|
492 |
record_exists = True |
|
|
493 |
manager = cls._base_manager |
|
|
494 |
if pk_set: |
|
|
495 |
# Determine whether a record with the primary key already exists. |
|
|
496 |
if (force_update or (not force_insert and |
|
29
|
497 |
manager.using(using).filter(pk=pk_val).exists())): |
|
0
|
498 |
# It does already exist, so do an UPDATE. |
|
|
499 |
if force_update or non_pks: |
|
|
500 |
values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks] |
|
29
|
501 |
rows = manager.using(using).filter(pk=pk_val)._update(values) |
|
0
|
502 |
if force_update and not rows: |
|
|
503 |
raise DatabaseError("Forced update did not affect any rows.") |
|
|
504 |
else: |
|
|
505 |
record_exists = False |
|
|
506 |
if not pk_set or not record_exists: |
|
29
|
507 |
if meta.order_with_respect_to: |
|
|
508 |
# If this is a model with an order_with_respect_to |
|
|
509 |
# autopopulate the _order field |
|
|
510 |
field = meta.order_with_respect_to |
|
|
511 |
order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count() |
|
|
512 |
setattr(self, '_order', order_value) |
|
|
513 |
|
|
0
|
514 |
if not pk_set: |
|
|
515 |
if force_update: |
|
|
516 |
raise ValueError("Cannot force an update in save() with no primary key.") |
|
29
|
517 |
values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), connection=connection)) |
|
|
518 |
for f in meta.local_fields if not isinstance(f, AutoField)] |
|
0
|
519 |
else: |
|
29
|
520 |
values = [(f, f.get_db_prep_save(raw and getattr(self, f.attname) or f.pre_save(self, True), connection=connection)) |
|
|
521 |
for f in meta.local_fields] |
|
0
|
522 |
|
|
|
523 |
record_exists = False |
|
|
524 |
|
|
|
525 |
update_pk = bool(meta.has_auto_field and not pk_set) |
|
|
526 |
if values: |
|
|
527 |
# Create a new record. |
|
29
|
528 |
result = manager._insert(values, return_id=update_pk, using=using) |
|
0
|
529 |
else: |
|
|
530 |
# Create a new record with defaults for everything. |
|
29
|
531 |
result = manager._insert([(meta.pk, connection.ops.pk_default_value())], return_id=update_pk, raw_values=True, using=using) |
|
0
|
532 |
|
|
|
533 |
if update_pk: |
|
|
534 |
setattr(self, meta.pk.attname, result) |
|
29
|
535 |
transaction.commit_unless_managed(using=using) |
|
0
|
536 |
|
|
29
|
537 |
# Store the database on which the object was saved |
|
|
538 |
self._state.db = using |
|
|
539 |
|
|
|
540 |
# Signal that the save is complete |
|
|
541 |
if origin and not meta.auto_created: |
|
0
|
542 |
signals.post_save.send(sender=origin, instance=self, |
|
|
543 |
created=(not record_exists), raw=raw) |
|
|
544 |
|
|
|
545 |
save_base.alters_data = True |
|
|
546 |
|
|
|
547 |
def _collect_sub_objects(self, seen_objs, parent=None, nullable=False): |
|
|
548 |
""" |
|
|
549 |
Recursively populates seen_objs with all objects related to this |
|
|
550 |
object. |
|
|
551 |
|
|
|
552 |
When done, seen_objs.items() will be in the format: |
|
|
553 |
[(model_class, {pk_val: obj, pk_val: obj, ...}), |
|
|
554 |
(model_class, {pk_val: obj, pk_val: obj, ...}), ...] |
|
|
555 |
""" |
|
|
556 |
pk_val = self._get_pk_val() |
|
29
|
557 |
if seen_objs.add(self.__class__, pk_val, self, |
|
|
558 |
type(parent), parent, nullable): |
|
0
|
559 |
return |
|
|
560 |
|
|
|
561 |
for related in self._meta.get_all_related_objects(): |
|
|
562 |
rel_opts_name = related.get_accessor_name() |
|
29
|
563 |
if not related.field.rel.multiple: |
|
0
|
564 |
try: |
|
|
565 |
sub_obj = getattr(self, rel_opts_name) |
|
|
566 |
except ObjectDoesNotExist: |
|
|
567 |
pass |
|
|
568 |
else: |
|
29
|
569 |
sub_obj._collect_sub_objects(seen_objs, self, related.field.null) |
|
0
|
570 |
else: |
|
|
571 |
# To make sure we can access all elements, we can't use the |
|
|
572 |
# normal manager on the related object. So we work directly |
|
|
573 |
# with the descriptor object. |
|
|
574 |
for cls in self.__class__.mro(): |
|
|
575 |
if rel_opts_name in cls.__dict__: |
|
|
576 |
rel_descriptor = cls.__dict__[rel_opts_name] |
|
|
577 |
break |
|
|
578 |
else: |
|
29
|
579 |
# in the case of a hidden fkey just skip it, it'll get |
|
|
580 |
# processed as an m2m |
|
|
581 |
if not related.field.rel.is_hidden(): |
|
|
582 |
raise AssertionError("Should never get here.") |
|
|
583 |
else: |
|
|
584 |
continue |
|
0
|
585 |
delete_qs = rel_descriptor.delete_manager(self).all() |
|
|
586 |
for sub_obj in delete_qs: |
|
29
|
587 |
sub_obj._collect_sub_objects(seen_objs, self, related.field.null) |
|
|
588 |
|
|
|
589 |
for related in self._meta.get_all_related_many_to_many_objects(): |
|
|
590 |
if related.field.rel.through: |
|
|
591 |
opts = related.field.rel.through._meta |
|
|
592 |
reverse_field_name = related.field.m2m_reverse_field_name() |
|
|
593 |
nullable = opts.get_field(reverse_field_name).null |
|
|
594 |
filters = {reverse_field_name: self} |
|
|
595 |
for sub_obj in related.field.rel.through._base_manager.filter(**filters): |
|
|
596 |
sub_obj._collect_sub_objects(seen_objs, self, nullable) |
|
|
597 |
|
|
|
598 |
for f in self._meta.many_to_many: |
|
|
599 |
if f.rel.through: |
|
|
600 |
opts = f.rel.through._meta |
|
|
601 |
field_name = f.m2m_field_name() |
|
|
602 |
nullable = opts.get_field(field_name).null |
|
|
603 |
filters = {field_name: self} |
|
|
604 |
for sub_obj in f.rel.through._base_manager.filter(**filters): |
|
|
605 |
sub_obj._collect_sub_objects(seen_objs, self, nullable) |
|
|
606 |
else: |
|
|
607 |
# m2m-ish but with no through table? GenericRelation: cascade delete |
|
|
608 |
for sub_obj in f.value_from_object(self).all(): |
|
|
609 |
# Generic relations not enforced by db constraints, thus we can set |
|
|
610 |
# nullable=True, order does not matter |
|
|
611 |
sub_obj._collect_sub_objects(seen_objs, self, True) |
|
0
|
612 |
|
|
|
613 |
# Handle any ancestors (for the model-inheritance case). We do this by |
|
|
614 |
# traversing to the most remote parent classes -- those with no parents |
|
|
615 |
# themselves -- and then adding those instances to the collection. That |
|
|
616 |
# will include all the child instances down to "self". |
|
|
617 |
parent_stack = [p for p in self._meta.parents.values() if p is not None] |
|
|
618 |
while parent_stack: |
|
|
619 |
link = parent_stack.pop() |
|
|
620 |
parent_obj = getattr(self, link.name) |
|
|
621 |
if parent_obj._meta.parents: |
|
|
622 |
parent_stack.extend(parent_obj._meta.parents.values()) |
|
|
623 |
continue |
|
|
624 |
# At this point, parent_obj is base class (no ancestor models). So |
|
|
625 |
# delete it and all its descendents. |
|
|
626 |
parent_obj._collect_sub_objects(seen_objs) |
|
|
627 |
|
|
29
|
628 |
def delete(self, using=None): |
|
|
629 |
using = using or router.db_for_write(self.__class__, instance=self) |
|
|
630 |
connection = connections[using] |
|
0
|
631 |
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) |
|
|
632 |
|
|
|
633 |
# Find all the objects than need to be deleted. |
|
|
634 |
seen_objs = CollectedObjects() |
|
|
635 |
self._collect_sub_objects(seen_objs) |
|
|
636 |
|
|
|
637 |
# Actually delete the objects. |
|
29
|
638 |
delete_objects(seen_objs, using) |
|
0
|
639 |
|
|
|
640 |
delete.alters_data = True |
|
|
641 |
|
|
|
642 |
def _get_FIELD_display(self, field): |
|
|
643 |
value = getattr(self, field.attname) |
|
|
644 |
return force_unicode(dict(field.flatchoices).get(value, value), strings_only=True) |
|
|
645 |
|
|
|
646 |
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): |
|
|
647 |
op = is_next and 'gt' or 'lt' |
|
|
648 |
order = not is_next and '-' or '' |
|
|
649 |
param = smart_str(getattr(self, field.attname)) |
|
|
650 |
q = Q(**{'%s__%s' % (field.name, op): param}) |
|
|
651 |
q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk}) |
|
29
|
652 |
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order) |
|
0
|
653 |
try: |
|
|
654 |
return qs[0] |
|
|
655 |
except IndexError: |
|
29
|
656 |
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) |
|
0
|
657 |
|
|
|
658 |
def _get_next_or_previous_in_order(self, is_next): |
|
|
659 |
cachename = "__%s_order_cache" % is_next |
|
|
660 |
if not hasattr(self, cachename): |
|
29
|
661 |
op = is_next and 'gt' or 'lt' |
|
0
|
662 |
order = not is_next and '-_order' or '_order' |
|
|
663 |
order_field = self._meta.order_with_respect_to |
|
29
|
664 |
obj = self._default_manager.filter(**{ |
|
|
665 |
order_field.name: getattr(self, order_field.attname) |
|
|
666 |
}).filter(**{ |
|
|
667 |
'_order__%s' % op: self._default_manager.values('_order').filter(**{ |
|
|
668 |
self._meta.pk.name: self.pk |
|
|
669 |
}) |
|
|
670 |
}).order_by(order)[:1].get() |
|
0
|
671 |
setattr(self, cachename, obj) |
|
|
672 |
return getattr(self, cachename) |
|
|
673 |
|
|
|
674 |
def prepare_database_save(self, unused): |
|
|
675 |
return self.pk |
|
|
676 |
|
|
29
|
677 |
def clean(self): |
|
|
678 |
""" |
|
|
679 |
Hook for doing any extra model-wide validation after clean() has been |
|
|
680 |
called on every field by self.clean_fields. Any ValidationError raised |
|
|
681 |
by this method will not be associated with a particular field; it will |
|
|
682 |
have a special-case association with the field defined by NON_FIELD_ERRORS. |
|
|
683 |
""" |
|
|
684 |
pass |
|
|
685 |
|
|
|
686 |
def validate_unique(self, exclude=None): |
|
|
687 |
""" |
|
|
688 |
Checks unique constraints on the model and raises ``ValidationError`` |
|
|
689 |
if any failed. |
|
|
690 |
""" |
|
|
691 |
unique_checks, date_checks = self._get_unique_checks(exclude=exclude) |
|
|
692 |
|
|
|
693 |
errors = self._perform_unique_checks(unique_checks) |
|
|
694 |
date_errors = self._perform_date_checks(date_checks) |
|
|
695 |
|
|
|
696 |
for k, v in date_errors.items(): |
|
|
697 |
errors.setdefault(k, []).extend(v) |
|
|
698 |
|
|
|
699 |
if errors: |
|
|
700 |
raise ValidationError(errors) |
|
|
701 |
|
|
|
702 |
def _get_unique_checks(self, exclude=None): |
|
|
703 |
""" |
|
|
704 |
Gather a list of checks to perform. Since validate_unique could be |
|
|
705 |
called from a ModelForm, some fields may have been excluded; we can't |
|
|
706 |
perform a unique check on a model that is missing fields involved |
|
|
707 |
in that check. |
|
|
708 |
Fields that did not validate should also be exluded, but they need |
|
|
709 |
to be passed in via the exclude argument. |
|
|
710 |
""" |
|
|
711 |
if exclude is None: |
|
|
712 |
exclude = [] |
|
|
713 |
unique_checks = [] |
|
|
714 |
|
|
|
715 |
unique_togethers = [(self.__class__, self._meta.unique_together)] |
|
|
716 |
for parent_class in self._meta.parents.keys(): |
|
|
717 |
if parent_class._meta.unique_together: |
|
|
718 |
unique_togethers.append((parent_class, parent_class._meta.unique_together)) |
|
|
719 |
|
|
|
720 |
for model_class, unique_together in unique_togethers: |
|
|
721 |
for check in unique_together: |
|
|
722 |
for name in check: |
|
|
723 |
# If this is an excluded field, don't add this check. |
|
|
724 |
if name in exclude: |
|
|
725 |
break |
|
|
726 |
else: |
|
|
727 |
unique_checks.append((model_class, tuple(check))) |
|
|
728 |
|
|
|
729 |
# These are checks for the unique_for_<date/year/month>. |
|
|
730 |
date_checks = [] |
|
|
731 |
|
|
|
732 |
# Gather a list of checks for fields declared as unique and add them to |
|
|
733 |
# the list of checks. |
|
|
734 |
|
|
|
735 |
fields_with_class = [(self.__class__, self._meta.local_fields)] |
|
|
736 |
for parent_class in self._meta.parents.keys(): |
|
|
737 |
fields_with_class.append((parent_class, parent_class._meta.local_fields)) |
|
|
738 |
|
|
|
739 |
for model_class, fields in fields_with_class: |
|
|
740 |
for f in fields: |
|
|
741 |
name = f.name |
|
|
742 |
if name in exclude: |
|
|
743 |
continue |
|
|
744 |
if f.unique: |
|
|
745 |
unique_checks.append((model_class, (name,))) |
|
|
746 |
if f.unique_for_date: |
|
|
747 |
date_checks.append((model_class, 'date', name, f.unique_for_date)) |
|
|
748 |
if f.unique_for_year: |
|
|
749 |
date_checks.append((model_class, 'year', name, f.unique_for_year)) |
|
|
750 |
if f.unique_for_month: |
|
|
751 |
date_checks.append((model_class, 'month', name, f.unique_for_month)) |
|
|
752 |
return unique_checks, date_checks |
|
|
753 |
|
|
|
754 |
def _perform_unique_checks(self, unique_checks): |
|
|
755 |
errors = {} |
|
|
756 |
|
|
|
757 |
for model_class, unique_check in unique_checks: |
|
|
758 |
# Try to look up an existing object with the same values as this |
|
|
759 |
# object's values for all the unique field. |
|
|
760 |
|
|
|
761 |
lookup_kwargs = {} |
|
|
762 |
for field_name in unique_check: |
|
|
763 |
f = self._meta.get_field(field_name) |
|
|
764 |
lookup_value = getattr(self, f.attname) |
|
|
765 |
if lookup_value is None: |
|
|
766 |
# no value, skip the lookup |
|
|
767 |
continue |
|
|
768 |
if f.primary_key and not getattr(self, '_adding', False): |
|
|
769 |
# no need to check for unique primary key when editing |
|
|
770 |
continue |
|
|
771 |
lookup_kwargs[str(field_name)] = lookup_value |
|
|
772 |
|
|
|
773 |
# some fields were skipped, no reason to do the check |
|
|
774 |
if len(unique_check) != len(lookup_kwargs.keys()): |
|
|
775 |
continue |
|
|
776 |
|
|
|
777 |
qs = model_class._default_manager.filter(**lookup_kwargs) |
|
|
778 |
|
|
|
779 |
# Exclude the current object from the query if we are editing an |
|
|
780 |
# instance (as opposed to creating a new one) |
|
|
781 |
if not getattr(self, '_adding', False) and self.pk is not None: |
|
|
782 |
qs = qs.exclude(pk=self.pk) |
|
|
783 |
|
|
|
784 |
if qs.exists(): |
|
|
785 |
if len(unique_check) == 1: |
|
|
786 |
key = unique_check[0] |
|
|
787 |
else: |
|
|
788 |
key = NON_FIELD_ERRORS |
|
|
789 |
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) |
|
|
790 |
|
|
|
791 |
return errors |
|
|
792 |
|
|
|
793 |
def _perform_date_checks(self, date_checks): |
|
|
794 |
errors = {} |
|
|
795 |
for model_class, lookup_type, field, unique_for in date_checks: |
|
|
796 |
lookup_kwargs = {} |
|
|
797 |
# there's a ticket to add a date lookup, we can remove this special |
|
|
798 |
# case if that makes it's way in |
|
|
799 |
date = getattr(self, unique_for) |
|
|
800 |
if lookup_type == 'date': |
|
|
801 |
lookup_kwargs['%s__day' % unique_for] = date.day |
|
|
802 |
lookup_kwargs['%s__month' % unique_for] = date.month |
|
|
803 |
lookup_kwargs['%s__year' % unique_for] = date.year |
|
|
804 |
else: |
|
|
805 |
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) |
|
|
806 |
lookup_kwargs[field] = getattr(self, field) |
|
|
807 |
|
|
|
808 |
qs = model_class._default_manager.filter(**lookup_kwargs) |
|
|
809 |
# Exclude the current object from the query if we are editing an |
|
|
810 |
# instance (as opposed to creating a new one) |
|
|
811 |
if not getattr(self, '_adding', False) and self.pk is not None: |
|
|
812 |
qs = qs.exclude(pk=self.pk) |
|
|
813 |
|
|
|
814 |
if qs.exists(): |
|
|
815 |
errors.setdefault(field, []).append( |
|
|
816 |
self.date_error_message(lookup_type, field, unique_for) |
|
|
817 |
) |
|
|
818 |
return errors |
|
|
819 |
|
|
|
820 |
def date_error_message(self, lookup_type, field, unique_for): |
|
|
821 |
opts = self._meta |
|
|
822 |
return _(u"%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { |
|
|
823 |
'field_name': unicode(capfirst(opts.get_field(field).verbose_name)), |
|
|
824 |
'date_field': unicode(capfirst(opts.get_field(unique_for).verbose_name)), |
|
|
825 |
'lookup': lookup_type, |
|
|
826 |
} |
|
|
827 |
|
|
|
828 |
def unique_error_message(self, model_class, unique_check): |
|
|
829 |
opts = model_class._meta |
|
|
830 |
model_name = capfirst(opts.verbose_name) |
|
|
831 |
|
|
|
832 |
# A unique field |
|
|
833 |
if len(unique_check) == 1: |
|
|
834 |
field_name = unique_check[0] |
|
|
835 |
field_label = capfirst(opts.get_field(field_name).verbose_name) |
|
|
836 |
# Insert the error into the error dict, very sneaky |
|
|
837 |
return _(u"%(model_name)s with this %(field_label)s already exists.") % { |
|
|
838 |
'model_name': unicode(model_name), |
|
|
839 |
'field_label': unicode(field_label) |
|
|
840 |
} |
|
|
841 |
# unique_together |
|
|
842 |
else: |
|
|
843 |
field_labels = map(lambda f: capfirst(opts.get_field(f).verbose_name), unique_check) |
|
|
844 |
field_labels = get_text_list(field_labels, _('and')) |
|
|
845 |
return _(u"%(model_name)s with this %(field_label)s already exists.") % { |
|
|
846 |
'model_name': unicode(model_name), |
|
|
847 |
'field_label': unicode(field_labels) |
|
|
848 |
} |
|
|
849 |
|
|
|
850 |
def full_clean(self, exclude=None): |
|
|
851 |
""" |
|
|
852 |
Calls clean_fields, clean, and validate_unique, on the model, |
|
|
853 |
and raises a ``ValidationError`` for any errors that occured. |
|
|
854 |
""" |
|
|
855 |
errors = {} |
|
|
856 |
if exclude is None: |
|
|
857 |
exclude = [] |
|
|
858 |
|
|
|
859 |
try: |
|
|
860 |
self.clean_fields(exclude=exclude) |
|
|
861 |
except ValidationError, e: |
|
|
862 |
errors = e.update_error_dict(errors) |
|
|
863 |
|
|
|
864 |
# Form.clean() is run even if other validation fails, so do the |
|
|
865 |
# same with Model.clean() for consistency. |
|
|
866 |
try: |
|
|
867 |
self.clean() |
|
|
868 |
except ValidationError, e: |
|
|
869 |
errors = e.update_error_dict(errors) |
|
|
870 |
|
|
|
871 |
# Run unique checks, but only for fields that passed validation. |
|
|
872 |
for name in errors.keys(): |
|
|
873 |
if name != NON_FIELD_ERRORS and name not in exclude: |
|
|
874 |
exclude.append(name) |
|
|
875 |
try: |
|
|
876 |
self.validate_unique(exclude=exclude) |
|
|
877 |
except ValidationError, e: |
|
|
878 |
errors = e.update_error_dict(errors) |
|
|
879 |
|
|
|
880 |
if errors: |
|
|
881 |
raise ValidationError(errors) |
|
|
882 |
|
|
|
883 |
def clean_fields(self, exclude=None): |
|
|
884 |
""" |
|
|
885 |
Cleans all fields and raises a ValidationError containing message_dict |
|
|
886 |
of all validation errors if any occur. |
|
|
887 |
""" |
|
|
888 |
if exclude is None: |
|
|
889 |
exclude = [] |
|
|
890 |
|
|
|
891 |
errors = {} |
|
|
892 |
for f in self._meta.fields: |
|
|
893 |
if f.name in exclude: |
|
|
894 |
continue |
|
|
895 |
# Skip validation for empty fields with blank=True. The developer |
|
|
896 |
# is responsible for making sure they have a valid value. |
|
|
897 |
raw_value = getattr(self, f.attname) |
|
|
898 |
if f.blank and raw_value in validators.EMPTY_VALUES: |
|
|
899 |
continue |
|
|
900 |
try: |
|
|
901 |
setattr(self, f.attname, f.clean(raw_value, self)) |
|
|
902 |
except ValidationError, e: |
|
|
903 |
errors[f.name] = e.messages |
|
|
904 |
|
|
|
905 |
if errors: |
|
|
906 |
raise ValidationError(errors) |
|
|
907 |
|
|
0
|
908 |
|
|
|
909 |
############################################ |
|
|
910 |
# HELPER FUNCTIONS (CURRIED MODEL METHODS) # |
|
|
911 |
############################################ |
|
|
912 |
|
|
|
913 |
# ORDERING METHODS ######################### |
|
|
914 |
|
|
29
|
915 |
def method_set_order(ordered_obj, self, id_list, using=None): |
|
|
916 |
if using is None: |
|
|
917 |
using = DEFAULT_DB_ALIAS |
|
0
|
918 |
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) |
|
|
919 |
order_name = ordered_obj._meta.order_with_respect_to.name |
|
|
920 |
# FIXME: It would be nice if there was an "update many" version of update |
|
|
921 |
# for situations like this. |
|
|
922 |
for i, j in enumerate(id_list): |
|
|
923 |
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i) |
|
29
|
924 |
transaction.commit_unless_managed(using=using) |
|
0
|
925 |
|
|
|
926 |
|
|
|
927 |
def method_get_order(ordered_obj, self): |
|
|
928 |
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) |
|
|
929 |
order_name = ordered_obj._meta.order_with_respect_to.name |
|
|
930 |
pk_name = ordered_obj._meta.pk.name |
|
|
931 |
return [r[pk_name] for r in |
|
|
932 |
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)] |
|
|
933 |
|
|
|
934 |
|
|
|
935 |
############################################## |
|
|
936 |
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) # |
|
|
937 |
############################################## |
|
|
938 |
|
|
|
939 |
def get_absolute_url(opts, func, self, *args, **kwargs): |
|
|
940 |
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.module_name), func)(self, *args, **kwargs) |
|
|
941 |
|
|
|
942 |
|
|
|
943 |
######## |
|
|
944 |
# MISC # |
|
|
945 |
######## |
|
|
946 |
|
|
|
947 |
class Empty(object): |
|
|
948 |
pass |
|
|
949 |
|
|
29
|
950 |
def simple_class_factory(model, attrs): |
|
|
951 |
"""Used to unpickle Models without deferred fields. |
|
|
952 |
|
|
|
953 |
We need to do this the hard way, rather than just using |
|
|
954 |
the default __reduce__ implementation, because of a |
|
|
955 |
__deepcopy__ problem in Python 2.4 |
|
|
956 |
""" |
|
|
957 |
return model |
|
|
958 |
|
|
|
959 |
def model_unpickle(model, attrs, factory): |
|
0
|
960 |
""" |
|
|
961 |
Used to unpickle Model subclasses with deferred fields. |
|
|
962 |
""" |
|
29
|
963 |
cls = factory(model, attrs) |
|
0
|
964 |
return cls.__new__(cls) |
|
|
965 |
model_unpickle.__safe_for_unpickle__ = True |
|
|
966 |
|
|
|
967 |
if sys.version_info < (2, 5): |
|
|
968 |
# Prior to Python 2.5, Exception was an old-style class |
|
29
|
969 |
def subclass_exception(name, parents, unused): |
|
|
970 |
return types.ClassType(name, parents, {}) |
|
0
|
971 |
else: |
|
29
|
972 |
def subclass_exception(name, parents, module): |
|
|
973 |
return type(name, parents, {'__module__': module}) |