web/lib/django/db/models/query.py
changeset 29 cc9b7e14412b
parent 0 0d40e90630ef
equal deleted inserted replaced
28:b758351d191f 29:cc9b7e14412b
     1 """
     1 """
     2 The main QuerySet implementation. This provides the public API for the ORM.
     2 The main QuerySet implementation. This provides the public API for the ORM.
     3 """
     3 """
     4 
     4 
     5 try:
       
     6     set
       
     7 except NameError:
       
     8     from sets import Set as set     # Python 2.3 fallback
       
     9 
       
    10 from copy import deepcopy
     5 from copy import deepcopy
    11 
     6 from itertools import izip
    12 from django.db import connection, transaction, IntegrityError
     7 
       
     8 from django.db import connections, router, transaction, IntegrityError
    13 from django.db.models.aggregates import Aggregate
     9 from django.db.models.aggregates import Aggregate
    14 from django.db.models.fields import DateField
    10 from django.db.models.fields import DateField
    15 from django.db.models.query_utils import Q, select_related_descend, CollectedObjects, CyclicDependency, deferred_class_factory
    11 from django.db.models.query_utils import Q, select_related_descend, CollectedObjects, CyclicDependency, deferred_class_factory, InvalidQuery
    16 from django.db.models import signals, sql
    12 from django.db.models import signals, sql
    17 
    13 from django.utils.copycompat import deepcopy
    18 
    14 
    19 # Used to control how many objects are worked with at once in some cases (e.g.
    15 # Used to control how many objects are worked with at once in some cases (e.g.
    20 # when deleting objects).
    16 # when deleting objects).
    21 CHUNK_SIZE = 100
    17 CHUNK_SIZE = 100
    22 ITER_CHUNK_SIZE = CHUNK_SIZE
    18 ITER_CHUNK_SIZE = CHUNK_SIZE
    29 
    25 
    30 class QuerySet(object):
    26 class QuerySet(object):
    31     """
    27     """
    32     Represents a lazy database lookup for a set of objects.
    28     Represents a lazy database lookup for a set of objects.
    33     """
    29     """
    34     def __init__(self, model=None, query=None):
    30     def __init__(self, model=None, query=None, using=None):
    35         self.model = model
    31         self.model = model
    36         self.query = query or sql.Query(self.model, connection)
    32         # EmptyQuerySet instantiates QuerySet with model as None
       
    33         self._db = using
       
    34         self.query = query or sql.Query(self.model)
    37         self._result_cache = None
    35         self._result_cache = None
    38         self._iter = None
    36         self._iter = None
    39         self._sticky_filter = False
    37         self._sticky_filter = False
       
    38         self._for_write = False
    40 
    39 
    41     ########################
    40     ########################
    42     # PYTHON MAGIC METHODS #
    41     # PYTHON MAGIC METHODS #
    43     ########################
    42     ########################
    44 
    43 
    45     def __deepcopy__(self, memo):
    44     def __deepcopy__(self, memo):
    46         """
    45         """
    47         Deep copy of a QuerySet doesn't populate the cache
    46         Deep copy of a QuerySet doesn't populate the cache
    48         """
    47         """
    49         obj_dict = deepcopy(self.__dict__, memo)
       
    50         obj_dict['_iter'] = None
       
    51 
       
    52         obj = self.__class__()
    48         obj = self.__class__()
    53         obj.__dict__.update(obj_dict)
    49         for k,v in self.__dict__.items():
       
    50             if k in ('_iter','_result_cache'):
       
    51                 obj.__dict__[k] = None
       
    52             else:
       
    53                 obj.__dict__[k] = deepcopy(v, memo)
    54         return obj
    54         return obj
    55 
    55 
    56     def __getstate__(self):
    56     def __getstate__(self):
    57         """
    57         """
    58         Allows the QuerySet to be pickled.
    58         Allows the QuerySet to be pickled.
   111         try:
   111         try:
   112             iter(self).next()
   112             iter(self).next()
   113         except StopIteration:
   113         except StopIteration:
   114             return False
   114             return False
   115         return True
   115         return True
       
   116 
       
   117     def __contains__(self, val):
       
   118         # The 'in' operator works without this method, due to __iter__. This
       
   119         # implementation exists only to shortcut the creation of Model
       
   120         # instances, by bailing out early if we find a matching element.
       
   121         pos = 0
       
   122         if self._result_cache is not None:
       
   123             if val in self._result_cache:
       
   124                 return True
       
   125             elif self._iter is None:
       
   126                 # iterator is exhausted, so we have our answer
       
   127                 return False
       
   128             # remember not to check these again:
       
   129             pos = len(self._result_cache)
       
   130         else:
       
   131             # We need to start filling the result cache out. The following
       
   132             # ensures that self._iter is not None and self._result_cache is not
       
   133             # None
       
   134             it = iter(self)
       
   135 
       
   136         # Carry on, one result at a time.
       
   137         while True:
       
   138             if len(self._result_cache) <= pos:
       
   139                 self._fill_cache(num=1)
       
   140             if self._iter is None:
       
   141                 # we ran out of items
       
   142                 return False
       
   143             if self._result_cache[pos] == val:
       
   144                 return True
       
   145             pos += 1
   116 
   146 
   117     def __getitem__(self, k):
   147     def __getitem__(self, k):
   118         """
   148         """
   119         Retrieves an item or slice from the set of results.
   149         Retrieves an item or slice from the set of results.
   120         """
   150         """
   156         try:
   186         try:
   157             qs = self._clone()
   187             qs = self._clone()
   158             qs.query.set_limits(k, k + 1)
   188             qs.query.set_limits(k, k + 1)
   159             return list(qs)[0]
   189             return list(qs)[0]
   160         except self.model.DoesNotExist, e:
   190         except self.model.DoesNotExist, e:
   161             raise IndexError, e.args
   191             raise IndexError(e.args)
   162 
   192 
   163     def __and__(self, other):
   193     def __and__(self, other):
   164         self._merge_sanity_check(other)
   194         self._merge_sanity_check(other)
   165         if isinstance(other, EmptyQuerySet):
   195         if isinstance(other, EmptyQuerySet):
   166             return other._clone()
   196             return other._clone()
   233                     skip.add(field.attname)
   263                     skip.add(field.attname)
   234                 else:
   264                 else:
   235                     init_list.append(field.attname)
   265                     init_list.append(field.attname)
   236             model_cls = deferred_class_factory(self.model, skip)
   266             model_cls = deferred_class_factory(self.model, skip)
   237 
   267 
   238         for row in self.query.results_iter():
   268         compiler = self.query.get_compiler(using=self.db)
       
   269         for row in compiler.results_iter():
   239             if fill_cache:
   270             if fill_cache:
   240                 obj, _ = get_cached_row(self.model, row,
   271                 obj, _ = get_cached_row(self.model, row,
   241                             index_start, max_depth,
   272                             index_start, using=self.db, max_depth=max_depth,
   242                             requested=requested, offset=len(aggregate_select),
   273                             requested=requested, offset=len(aggregate_select),
   243                             only_load=only_load)
   274                             only_load=only_load)
   244             else:
   275             else:
   245                 if skip:
   276                 if skip:
   246                     row_data = row[index_start:aggregate_start]
   277                     row_data = row[index_start:aggregate_start]
   248                     obj = model_cls(**dict(zip(init_list, row_data)))
   279                     obj = model_cls(**dict(zip(init_list, row_data)))
   249                 else:
   280                 else:
   250                     # Omit aggregates in object creation.
   281                     # Omit aggregates in object creation.
   251                     obj = self.model(*row[index_start:aggregate_start])
   282                     obj = self.model(*row[index_start:aggregate_start])
   252 
   283 
       
   284                 # Store the source database of the object
       
   285                 obj._state.db = self.db
       
   286 
   253             for i, k in enumerate(extra_select):
   287             for i, k in enumerate(extra_select):
   254                 setattr(obj, k, row[i])
   288                 setattr(obj, k, row[i])
   255 
   289 
   256             # Add the aggregates to the model
   290             # Add the aggregates to the model
   257             for i, aggregate in enumerate(aggregate_select):
   291             for i, aggregate in enumerate(aggregate_select):
   262     def aggregate(self, *args, **kwargs):
   296     def aggregate(self, *args, **kwargs):
   263         """
   297         """
   264         Returns a dictionary containing the calculations (aggregation)
   298         Returns a dictionary containing the calculations (aggregation)
   265         over the current queryset
   299         over the current queryset
   266 
   300 
   267         If args is present the expression is passed as a kwarg ussing
   301         If args is present the expression is passed as a kwarg using
   268         the Aggregate object's default alias.
   302         the Aggregate object's default alias.
   269         """
   303         """
   270         for arg in args:
   304         for arg in args:
   271             kwargs[arg.default_alias] = arg
   305             kwargs[arg.default_alias] = arg
   272 
   306 
   274 
   308 
   275         for (alias, aggregate_expr) in kwargs.items():
   309         for (alias, aggregate_expr) in kwargs.items():
   276             query.add_aggregate(aggregate_expr, self.model, alias,
   310             query.add_aggregate(aggregate_expr, self.model, alias,
   277                 is_summary=True)
   311                 is_summary=True)
   278 
   312 
   279         return query.get_aggregation()
   313         return query.get_aggregation(using=self.db)
   280 
   314 
   281     def count(self):
   315     def count(self):
   282         """
   316         """
   283         Performs a SELECT COUNT() and returns the number of records as an
   317         Performs a SELECT COUNT() and returns the number of records as an
   284         integer.
   318         integer.
   287         of the cached results set to avoid multiple SELECT COUNT(*) calls.
   321         of the cached results set to avoid multiple SELECT COUNT(*) calls.
   288         """
   322         """
   289         if self._result_cache is not None and not self._iter:
   323         if self._result_cache is not None and not self._iter:
   290             return len(self._result_cache)
   324             return len(self._result_cache)
   291 
   325 
   292         return self.query.get_count()
   326         return self.query.get_count(using=self.db)
   293 
   327 
   294     def get(self, *args, **kwargs):
   328     def get(self, *args, **kwargs):
   295         """
   329         """
   296         Performs the query and returns a single object matching the given
   330         Performs the query and returns a single object matching the given
   297         keyword arguments.
   331         keyword arguments.
   298         """
   332         """
   299         clone = self.filter(*args, **kwargs)
   333         clone = self.filter(*args, **kwargs)
       
   334         if self.query.can_filter():
       
   335             clone = clone.order_by()
   300         num = len(clone)
   336         num = len(clone)
   301         if num == 1:
   337         if num == 1:
   302             return clone._result_cache[0]
   338             return clone._result_cache[0]
   303         if not num:
   339         if not num:
   304             raise self.model.DoesNotExist("%s matching query does not exist."
   340             raise self.model.DoesNotExist("%s matching query does not exist."
   310         """
   346         """
   311         Creates a new object with the given kwargs, saving it to the database
   347         Creates a new object with the given kwargs, saving it to the database
   312         and returning the created object.
   348         and returning the created object.
   313         """
   349         """
   314         obj = self.model(**kwargs)
   350         obj = self.model(**kwargs)
   315         obj.save(force_insert=True)
   351         self._for_write = True
       
   352         obj.save(force_insert=True, using=self.db)
   316         return obj
   353         return obj
   317 
   354 
   318     def get_or_create(self, **kwargs):
   355     def get_or_create(self, **kwargs):
   319         """
   356         """
   320         Looks up an object with the given kwargs, creating one if necessary.
   357         Looks up an object with the given kwargs, creating one if necessary.
   323         """
   360         """
   324         assert kwargs, \
   361         assert kwargs, \
   325                 'get_or_create() must be passed at least one keyword argument'
   362                 'get_or_create() must be passed at least one keyword argument'
   326         defaults = kwargs.pop('defaults', {})
   363         defaults = kwargs.pop('defaults', {})
   327         try:
   364         try:
       
   365             self._for_write = True
   328             return self.get(**kwargs), False
   366             return self.get(**kwargs), False
   329         except self.model.DoesNotExist:
   367         except self.model.DoesNotExist:
   330             try:
   368             try:
   331                 params = dict([(k, v) for k, v in kwargs.items() if '__' not in k])
   369                 params = dict([(k, v) for k, v in kwargs.items() if '__' not in k])
   332                 params.update(defaults)
   370                 params.update(defaults)
   333                 obj = self.model(**params)
   371                 obj = self.model(**params)
   334                 sid = transaction.savepoint()
   372                 sid = transaction.savepoint(using=self.db)
   335                 obj.save(force_insert=True)
   373                 obj.save(force_insert=True, using=self.db)
   336                 transaction.savepoint_commit(sid)
   374                 transaction.savepoint_commit(sid, using=self.db)
   337                 return obj, True
   375                 return obj, True
   338             except IntegrityError, e:
   376             except IntegrityError, e:
   339                 transaction.savepoint_rollback(sid)
   377                 transaction.savepoint_rollback(sid, using=self.db)
   340                 try:
   378                 try:
   341                     return self.get(**kwargs), False
   379                     return self.get(**kwargs), False
   342                 except self.model.DoesNotExist:
   380                 except self.model.DoesNotExist:
   343                     raise e
   381                     raise e
   344 
   382 
   361         Returns a dictionary mapping each of the given IDs to the object with
   399         Returns a dictionary mapping each of the given IDs to the object with
   362         that ID.
   400         that ID.
   363         """
   401         """
   364         assert self.query.can_filter(), \
   402         assert self.query.can_filter(), \
   365                 "Cannot use 'limit' or 'offset' with in_bulk"
   403                 "Cannot use 'limit' or 'offset' with in_bulk"
   366         assert isinstance(id_list, (tuple,  list)), \
   404         assert isinstance(id_list, (tuple,  list, set, frozenset)), \
   367                 "in_bulk() must be provided with a list of IDs."
   405                 "in_bulk() must be provided with a list of IDs."
   368         if not id_list:
   406         if not id_list:
   369             return {}
   407             return {}
   370         qs = self._clone()
   408         qs = self._clone()
   371         qs.query.add_filter(('pk__in', id_list))
   409         qs.query.add_filter(('pk__in', id_list))
   378         assert self.query.can_filter(), \
   416         assert self.query.can_filter(), \
   379                 "Cannot use 'limit' or 'offset' with delete."
   417                 "Cannot use 'limit' or 'offset' with delete."
   380 
   418 
   381         del_query = self._clone()
   419         del_query = self._clone()
   382 
   420 
       
   421         # The delete is actually 2 queries - one to find related objects,
       
   422         # and one to delete. Make sure that the discovery of related
       
   423         # objects is performed on the same database as the deletion.
       
   424         del_query._for_write = True
       
   425 
   383         # Disable non-supported fields.
   426         # Disable non-supported fields.
   384         del_query.query.select_related = False
   427         del_query.query.select_related = False
   385         del_query.query.clear_ordering()
   428         del_query.query.clear_ordering()
   386 
   429 
   387         # Delete objects in chunks to prevent the list of related objects from
   430         # Delete objects in chunks to prevent the list of related objects from
   388         # becoming too long.
   431         # becoming too long.
   389         seen_objs = None
   432         seen_objs = None
       
   433         del_itr = iter(del_query)
   390         while 1:
   434         while 1:
   391             # Collect all the objects to be deleted in this chunk, and all the
   435             # Collect a chunk of objects to be deleted, and then all the
   392             # objects that are related to the objects that are to be deleted.
   436             # objects that are related to the objects that are to be deleted.
       
   437             # The chunking *isn't* done by slicing the del_query because we
       
   438             # need to maintain the query cache on del_query (see #12328)
   393             seen_objs = CollectedObjects(seen_objs)
   439             seen_objs = CollectedObjects(seen_objs)
   394             for object in del_query[:CHUNK_SIZE]:
   440             for i, obj in izip(xrange(CHUNK_SIZE), del_itr):
   395                 object._collect_sub_objects(seen_objs)
   441                 obj._collect_sub_objects(seen_objs)
   396 
   442 
   397             if not seen_objs:
   443             if not seen_objs:
   398                 break
   444                 break
   399             delete_objects(seen_objs)
   445             delete_objects(seen_objs, del_query.db)
   400 
   446 
   401         # Clear the result cache, in case this QuerySet gets reused.
   447         # Clear the result cache, in case this QuerySet gets reused.
   402         self._result_cache = None
   448         self._result_cache = None
   403     delete.alters_data = True
   449     delete.alters_data = True
   404 
   450 
   407         Updates all elements in the current QuerySet, setting all the given
   453         Updates all elements in the current QuerySet, setting all the given
   408         fields to the appropriate values.
   454         fields to the appropriate values.
   409         """
   455         """
   410         assert self.query.can_filter(), \
   456         assert self.query.can_filter(), \
   411                 "Cannot update a query once a slice has been taken."
   457                 "Cannot update a query once a slice has been taken."
       
   458         self._for_write = True
   412         query = self.query.clone(sql.UpdateQuery)
   459         query = self.query.clone(sql.UpdateQuery)
   413         query.add_update_values(kwargs)
   460         query.add_update_values(kwargs)
   414         if not transaction.is_managed():
   461         if not transaction.is_managed(using=self.db):
   415             transaction.enter_transaction_management()
   462             transaction.enter_transaction_management(using=self.db)
   416             forced_managed = True
   463             forced_managed = True
   417         else:
   464         else:
   418             forced_managed = False
   465             forced_managed = False
   419         try:
   466         try:
   420             rows = query.execute_sql(None)
   467             rows = query.get_compiler(self.db).execute_sql(None)
   421             if forced_managed:
   468             if forced_managed:
   422                 transaction.commit()
   469                 transaction.commit(using=self.db)
   423             else:
   470             else:
   424                 transaction.commit_unless_managed()
   471                 transaction.commit_unless_managed(using=self.db)
   425         finally:
   472         finally:
   426             if forced_managed:
   473             if forced_managed:
   427                 transaction.leave_transaction_management()
   474                 transaction.leave_transaction_management(using=self.db)
   428         self._result_cache = None
   475         self._result_cache = None
   429         return rows
   476         return rows
   430     update.alters_data = True
   477     update.alters_data = True
   431 
   478 
   432     def _update(self, values):
   479     def _update(self, values):
   439         assert self.query.can_filter(), \
   486         assert self.query.can_filter(), \
   440                 "Cannot update a query once a slice has been taken."
   487                 "Cannot update a query once a slice has been taken."
   441         query = self.query.clone(sql.UpdateQuery)
   488         query = self.query.clone(sql.UpdateQuery)
   442         query.add_update_fields(values)
   489         query.add_update_fields(values)
   443         self._result_cache = None
   490         self._result_cache = None
   444         return query.execute_sql(None)
   491         return query.get_compiler(self.db).execute_sql(None)
   445     _update.alters_data = True
   492     _update.alters_data = True
       
   493 
       
   494     def exists(self):
       
   495         if self._result_cache is None:
       
   496             return self.query.has_results(using=self.db)
       
   497         return bool(self._result_cache)
   446 
   498 
   447     ##################################################
   499     ##################################################
   448     # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
   500     # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
   449     ##################################################
   501     ##################################################
   450 
   502 
   646             raise TypeError("Cannot pass None as an argument to only().")
   698             raise TypeError("Cannot pass None as an argument to only().")
   647         clone = self._clone()
   699         clone = self._clone()
   648         clone.query.add_immediate_loading(fields)
   700         clone.query.add_immediate_loading(fields)
   649         return clone
   701         return clone
   650 
   702 
       
   703     def using(self, alias):
       
   704         """
       
   705         Selects which database this QuerySet should excecute it's query against.
       
   706         """
       
   707         clone = self._clone()
       
   708         clone._db = alias
       
   709         return clone
       
   710 
   651     ###################################
   711     ###################################
   652     # PUBLIC INTROSPECTION ATTRIBUTES #
   712     # PUBLIC INTROSPECTION ATTRIBUTES #
   653     ###################################
   713     ###################################
   654 
   714 
   655     def ordered(self):
   715     def ordered(self):
   662         elif self.query.default_ordering and self.query.model._meta.ordering:
   722         elif self.query.default_ordering and self.query.model._meta.ordering:
   663             return True
   723             return True
   664         else:
   724         else:
   665             return False
   725             return False
   666     ordered = property(ordered)
   726     ordered = property(ordered)
       
   727 
       
   728     @property
       
   729     def db(self):
       
   730         "Return the database that will be used if this query is executed now"
       
   731         if self._for_write:
       
   732             return self._db or router.db_for_write(self.model)
       
   733         return self._db or router.db_for_read(self.model)
   667 
   734 
   668     ###################
   735     ###################
   669     # PRIVATE METHODS #
   736     # PRIVATE METHODS #
   670     ###################
   737     ###################
   671 
   738 
   673         if klass is None:
   740         if klass is None:
   674             klass = self.__class__
   741             klass = self.__class__
   675         query = self.query.clone()
   742         query = self.query.clone()
   676         if self._sticky_filter:
   743         if self._sticky_filter:
   677             query.filter_is_sticky = True
   744             query.filter_is_sticky = True
   678         c = klass(model=self.model, query=query)
   745         c = klass(model=self.model, query=query, using=self._db)
       
   746         c._for_write = self._for_write
   679         c.__dict__.update(kwargs)
   747         c.__dict__.update(kwargs)
   680         if setup and hasattr(c, '_setup_query'):
   748         if setup and hasattr(c, '_setup_query'):
   681             c._setup_query()
   749             c._setup_query()
   682         return c
   750         return c
   683 
   751 
   723         if self.query.group_by is None:
   791         if self.query.group_by is None:
   724             field_names = [f.attname for f in opts.fields]
   792             field_names = [f.attname for f in opts.fields]
   725             self.query.add_fields(field_names, False)
   793             self.query.add_fields(field_names, False)
   726             self.query.set_group_by()
   794             self.query.set_group_by()
   727 
   795 
   728     def _as_sql(self):
   796     def _prepare(self):
       
   797         return self
       
   798 
       
   799     def _as_sql(self, connection):
   729         """
   800         """
   730         Returns the internal query's SQL and parameters (as a tuple).
   801         Returns the internal query's SQL and parameters (as a tuple).
   731         """
   802         """
   732         obj = self.values("pk")
   803         obj = self.values("pk")
   733         return obj.query.as_nested_sql()
   804         if obj._db is None or connection == connections[obj._db]:
       
   805             return obj.query.get_compiler(connection=connection).as_nested_sql()
       
   806         raise ValueError("Can't do subqueries with queries on different DBs.")
   734 
   807 
   735     # When used as part of a nested query, a queryset will never be an "always
   808     # When used as part of a nested query, a queryset will never be an "always
   736     # empty" result.
   809     # empty" result.
   737     value_annotation = True
   810     value_annotation = True
   738 
   811 
   751         field_names = self.field_names
   824         field_names = self.field_names
   752         aggregate_names = self.query.aggregate_select.keys()
   825         aggregate_names = self.query.aggregate_select.keys()
   753 
   826 
   754         names = extra_names + field_names + aggregate_names
   827         names = extra_names + field_names + aggregate_names
   755 
   828 
   756         for row in self.query.results_iter():
   829         for row in self.query.get_compiler(self.db).results_iter():
   757             yield dict(zip(names, row))
   830             yield dict(zip(names, row))
   758 
   831 
   759     def _setup_query(self):
   832     def _setup_query(self):
   760         """
   833         """
   761         Constructs the field_names list that the values query will be
   834         Constructs the field_names list that the values query will be
   834             self.aggregate_names.extend(aggregates)
   907             self.aggregate_names.extend(aggregates)
   835             self.query.set_aggregate_mask(self.aggregate_names)
   908             self.query.set_aggregate_mask(self.aggregate_names)
   836 
   909 
   837         super(ValuesQuerySet, self)._setup_aggregate_query(aggregates)
   910         super(ValuesQuerySet, self)._setup_aggregate_query(aggregates)
   838 
   911 
   839     def _as_sql(self):
   912     def _as_sql(self, connection):
   840         """
   913         """
   841         For ValueQuerySet (and subclasses like ValuesListQuerySet), they can
   914         For ValueQuerySet (and subclasses like ValuesListQuerySet), they can
   842         only be used as nested queries if they're already set up to select only
   915         only be used as nested queries if they're already set up to select only
   843         a single field (in which case, that is the field column that is
   916         a single field (in which case, that is the field column that is
   844         returned). This differs from QuerySet.as_sql(), where the column to
   917         returned). This differs from QuerySet.as_sql(), where the column to
   846         """
   919         """
   847         if ((self._fields and len(self._fields) > 1) or
   920         if ((self._fields and len(self._fields) > 1) or
   848                 (not self._fields and len(self.model._meta.fields) > 1)):
   921                 (not self._fields and len(self.model._meta.fields) > 1)):
   849             raise TypeError('Cannot use a multi-field %s as a filter value.'
   922             raise TypeError('Cannot use a multi-field %s as a filter value.'
   850                     % self.__class__.__name__)
   923                     % self.__class__.__name__)
   851         return self._clone().query.as_nested_sql()
   924 
       
   925         obj = self._clone()
       
   926         if obj._db is None or connection == connections[obj._db]:
       
   927             return obj.query.get_compiler(connection=connection).as_nested_sql()
       
   928         raise ValueError("Can't do subqueries with queries on different DBs.")
       
   929 
       
   930     def _prepare(self):
       
   931         """
       
   932         Validates that we aren't trying to do a query like
       
   933         value__in=qs.values('value1', 'value2'), which isn't valid.
       
   934         """
       
   935         if ((self._fields and len(self._fields) > 1) or
       
   936                 (not self._fields and len(self.model._meta.fields) > 1)):
       
   937             raise TypeError('Cannot use a multi-field %s as a filter value.'
       
   938                     % self.__class__.__name__)
       
   939         return self
   852 
   940 
   853 class ValuesListQuerySet(ValuesQuerySet):
   941 class ValuesListQuerySet(ValuesQuerySet):
   854     def iterator(self):
   942     def iterator(self):
   855         if self.flat and len(self._fields) == 1:
   943         if self.flat and len(self._fields) == 1:
   856             for row in self.query.results_iter():
   944             for row in self.query.get_compiler(self.db).results_iter():
   857                 yield row[0]
   945                 yield row[0]
   858         elif not self.query.extra_select and not self.query.aggregate_select:
   946         elif not self.query.extra_select and not self.query.aggregate_select:
   859             for row in self.query.results_iter():
   947             for row in self.query.get_compiler(self.db).results_iter():
   860                 yield tuple(row)
   948                 yield tuple(row)
   861         else:
   949         else:
   862             # When extra(select=...) or an annotation is involved, the extra
   950             # When extra(select=...) or an annotation is involved, the extra
   863             # cols are always at the start of the row, and we need to reorder
   951             # cols are always at the start of the row, and we need to reorder
   864             # the fields to match the order in self._fields.
   952             # the fields to match the order in self._fields.
   869             names = extra_names + field_names + aggregate_names
   957             names = extra_names + field_names + aggregate_names
   870 
   958 
   871             # If a field list has been specified, use it. Otherwise, use the
   959             # If a field list has been specified, use it. Otherwise, use the
   872             # full list of fields, including extras and aggregates.
   960             # full list of fields, including extras and aggregates.
   873             if self._fields:
   961             if self._fields:
   874                 fields = self._fields
   962                 fields = list(self._fields) + filter(lambda f: f not in self._fields,
       
   963                                                      aggregate_names)
   875             else:
   964             else:
   876                 fields = names
   965                 fields = names
   877 
   966 
   878             for row in self.query.results_iter():
   967             for row in self.query.get_compiler(self.db).results_iter():
   879                 data = dict(zip(names, row))
   968                 data = dict(zip(names, row))
   880                 yield tuple([data[f] for f in fields])
   969                 yield tuple([data[f] for f in fields])
   881 
   970 
   882     def _clone(self, *args, **kwargs):
   971     def _clone(self, *args, **kwargs):
   883         clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
   972         clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
   885         return clone
   974         return clone
   886 
   975 
   887 
   976 
   888 class DateQuerySet(QuerySet):
   977 class DateQuerySet(QuerySet):
   889     def iterator(self):
   978     def iterator(self):
   890         return self.query.results_iter()
   979         return self.query.get_compiler(self.db).results_iter()
   891 
   980 
   892     def _setup_query(self):
   981     def _setup_query(self):
   893         """
   982         """
   894         Sets up any special features of the query attribute.
   983         Sets up any special features of the query attribute.
   895 
   984 
   914             c._setup_query()
  1003             c._setup_query()
   915         return c
  1004         return c
   916 
  1005 
   917 
  1006 
   918 class EmptyQuerySet(QuerySet):
  1007 class EmptyQuerySet(QuerySet):
   919     def __init__(self, model=None, query=None):
  1008     def __init__(self, model=None, query=None, using=None):
   920         super(EmptyQuerySet, self).__init__(model, query)
  1009         super(EmptyQuerySet, self).__init__(model, query, using)
   921         self._result_cache = []
  1010         self._result_cache = []
   922 
  1011 
   923     def __and__(self, other):
  1012     def __and__(self, other):
   924         return self._clone()
  1013         return self._clone()
   925 
  1014 
   940     def iterator(self):
  1029     def iterator(self):
   941         # This slightly odd construction is because we need an empty generator
  1030         # This slightly odd construction is because we need an empty generator
   942         # (it raises StopIteration immediately).
  1031         # (it raises StopIteration immediately).
   943         yield iter([]).next()
  1032         yield iter([]).next()
   944 
  1033 
       
  1034     def all(self):
       
  1035         """
       
  1036         Always returns EmptyQuerySet.
       
  1037         """
       
  1038         return self
       
  1039 
       
  1040     def filter(self, *args, **kwargs):
       
  1041         """
       
  1042         Always returns EmptyQuerySet.
       
  1043         """
       
  1044         return self
       
  1045 
       
  1046     def exclude(self, *args, **kwargs):
       
  1047         """
       
  1048         Always returns EmptyQuerySet.
       
  1049         """
       
  1050         return self
       
  1051 
       
  1052     def complex_filter(self, filter_obj):
       
  1053         """
       
  1054         Always returns EmptyQuerySet.
       
  1055         """
       
  1056         return self
       
  1057 
       
  1058     def select_related(self, *fields, **kwargs):
       
  1059         """
       
  1060         Always returns EmptyQuerySet.
       
  1061         """
       
  1062         return self
       
  1063 
       
  1064     def annotate(self, *args, **kwargs):
       
  1065         """
       
  1066         Always returns EmptyQuerySet.
       
  1067         """
       
  1068         return self
       
  1069 
       
  1070     def order_by(self, *field_names):
       
  1071         """
       
  1072         Always returns EmptyQuerySet.
       
  1073         """
       
  1074         return self
       
  1075 
       
  1076     def distinct(self, true_or_false=True):
       
  1077         """
       
  1078         Always returns EmptyQuerySet.
       
  1079         """
       
  1080         return self
       
  1081 
       
  1082     def extra(self, select=None, where=None, params=None, tables=None,
       
  1083               order_by=None, select_params=None):
       
  1084         """
       
  1085         Always returns EmptyQuerySet.
       
  1086         """
       
  1087         assert self.query.can_filter(), \
       
  1088                 "Cannot change a query once a slice has been taken"
       
  1089         return self
       
  1090 
       
  1091     def reverse(self):
       
  1092         """
       
  1093         Always returns EmptyQuerySet.
       
  1094         """
       
  1095         return self
       
  1096 
       
  1097     def defer(self, *fields):
       
  1098         """
       
  1099         Always returns EmptyQuerySet.
       
  1100         """
       
  1101         return self
       
  1102 
       
  1103     def only(self, *fields):
       
  1104         """
       
  1105         Always returns EmptyQuerySet.
       
  1106         """
       
  1107         return self
       
  1108 
       
  1109     def update(self, **kwargs):
       
  1110         """
       
  1111         Don't update anything.
       
  1112         """
       
  1113         return 0
       
  1114 
   945     # EmptyQuerySet is always an empty result in where-clauses (and similar
  1115     # EmptyQuerySet is always an empty result in where-clauses (and similar
   946     # situations).
  1116     # situations).
   947     value_annotation = False
  1117     value_annotation = False
   948 
  1118 
   949 
  1119 
   950 def get_cached_row(klass, row, index_start, max_depth=0, cur_depth=0,
  1120 def get_cached_row(klass, row, index_start, using, max_depth=0, cur_depth=0,
   951                    requested=None, offset=0, only_load=None):
  1121                    requested=None, offset=0, only_load=None, local_only=False):
   952     """
  1122     """
   953     Helper function that recursively returns an object with the specified
  1123     Helper function that recursively returns an object with the specified
   954     related attributes already populated.
  1124     related attributes already populated.
       
  1125 
       
  1126     This method may be called recursively to populate deep select_related()
       
  1127     clauses.
       
  1128 
       
  1129     Arguments:
       
  1130      * klass - the class to retrieve (and instantiate)
       
  1131      * row - the row of data returned by the database cursor
       
  1132      * index_start - the index of the row at which data for this
       
  1133        object is known to start
       
  1134      * using - the database alias on which the query is being executed.
       
  1135      * max_depth - the maximum depth to which a select_related()
       
  1136        relationship should be explored.
       
  1137      * cur_depth - the current depth in the select_related() tree.
       
  1138        Used in recursive calls to determin if we should dig deeper.
       
  1139      * requested - A dictionary describing the select_related() tree
       
  1140        that is to be retrieved. keys are field names; values are
       
  1141        dictionaries describing the keys on that related object that
       
  1142        are themselves to be select_related().
       
  1143      * offset - the number of additional fields that are known to
       
  1144        exist in `row` for `klass`. This usually means the number of
       
  1145        annotated results on `klass`.
       
  1146      * only_load - if the query has had only() or defer() applied,
       
  1147        this is the list of field names that will be returned. If None,
       
  1148        the full field list for `klass` can be assumed.
       
  1149      * local_only - Only populate local fields. This is used when building
       
  1150        following reverse select-related relations
   955     """
  1151     """
   956     if max_depth and requested is None and cur_depth > max_depth:
  1152     if max_depth and requested is None and cur_depth > max_depth:
   957         # We've recursed deeply enough; stop now.
  1153         # We've recursed deeply enough; stop now.
   958         return None
  1154         return None
   959 
  1155 
   960     restricted = requested is not None
  1156     restricted = requested is not None
   961     load_fields = only_load and only_load.get(klass) or None
  1157     if only_load:
       
  1158         load_fields = only_load.get(klass)
       
  1159         # When we create the object, we will also be creating populating
       
  1160         # all the parent classes, so traverse the parent classes looking
       
  1161         # for fields that must be included on load.
       
  1162         for parent in klass._meta.get_parent_list():
       
  1163             fields = only_load.get(parent)
       
  1164             if fields:
       
  1165                 load_fields.update(fields)
       
  1166     else:
       
  1167         load_fields = None
   962     if load_fields:
  1168     if load_fields:
   963         # Handle deferred fields.
  1169         # Handle deferred fields.
   964         skip = set()
  1170         skip = set()
   965         init_list = []
  1171         init_list = []
   966         pk_val = row[index_start + klass._meta.pk_index()]
  1172         # Build the list of fields that *haven't* been requested
   967         for field in klass._meta.fields:
  1173         for field, model in klass._meta.get_fields_with_model():
   968             if field.name not in load_fields:
  1174             if field.name not in load_fields:
   969                 skip.add(field.name)
  1175                 skip.add(field.name)
       
  1176             elif local_only and model is not None:
       
  1177                 continue
   970             else:
  1178             else:
   971                 init_list.append(field.attname)
  1179                 init_list.append(field.attname)
       
  1180         # Retrieve all the requested fields
   972         field_count = len(init_list)
  1181         field_count = len(init_list)
   973         fields = row[index_start : index_start + field_count]
  1182         fields = row[index_start : index_start + field_count]
       
  1183         # If all the select_related columns are None, then the related
       
  1184         # object must be non-existent - set the relation to None.
       
  1185         # Otherwise, construct the related object.
   974         if fields == (None,) * field_count:
  1186         if fields == (None,) * field_count:
   975             obj = None
  1187             obj = None
   976         elif skip:
  1188         elif skip:
   977             klass = deferred_class_factory(klass, skip)
  1189             klass = deferred_class_factory(klass, skip)
   978             obj = klass(**dict(zip(init_list, fields)))
  1190             obj = klass(**dict(zip(init_list, fields)))
   979         else:
  1191         else:
   980             obj = klass(*fields)
  1192             obj = klass(*fields)
       
  1193 
   981     else:
  1194     else:
   982         field_count = len(klass._meta.fields)
  1195         # Load all fields on klass
       
  1196         if local_only:
       
  1197             field_names = [f.attname for f in klass._meta.local_fields]
       
  1198         else:
       
  1199             field_names = [f.attname for f in klass._meta.fields]
       
  1200         field_count = len(field_names)
   983         fields = row[index_start : index_start + field_count]
  1201         fields = row[index_start : index_start + field_count]
       
  1202         # If all the select_related columns are None, then the related
       
  1203         # object must be non-existent - set the relation to None.
       
  1204         # Otherwise, construct the related object.
   984         if fields == (None,) * field_count:
  1205         if fields == (None,) * field_count:
   985             obj = None
  1206             obj = None
   986         else:
  1207         else:
   987             obj = klass(*fields)
  1208             obj = klass(**dict(zip(field_names, fields)))
       
  1209 
       
  1210     # If an object was retrieved, set the database state.
       
  1211     if obj:
       
  1212         obj._state.db = using
   988 
  1213 
   989     index_end = index_start + field_count + offset
  1214     index_end = index_start + field_count + offset
       
  1215     # Iterate over each related object, populating any
       
  1216     # select_related() fields
   990     for f in klass._meta.fields:
  1217     for f in klass._meta.fields:
   991         if not select_related_descend(f, restricted, requested):
  1218         if not select_related_descend(f, restricted, requested):
   992             continue
  1219             continue
   993         if restricted:
  1220         if restricted:
   994             next = requested[f.name]
  1221             next = requested[f.name]
   995         else:
  1222         else:
   996             next = None
  1223             next = None
   997         cached_row = get_cached_row(f.rel.to, row, index_end, max_depth,
  1224         # Recursively retrieve the data for the related object
   998                 cur_depth+1, next)
  1225         cached_row = get_cached_row(f.rel.to, row, index_end, using,
       
  1226                 max_depth, cur_depth+1, next, only_load=only_load)
       
  1227         # If the recursive descent found an object, populate the
       
  1228         # descriptor caches relevant to the object
   999         if cached_row:
  1229         if cached_row:
  1000             rel_obj, index_end = cached_row
  1230             rel_obj, index_end = cached_row
  1001             if obj is not None:
  1231             if obj is not None:
       
  1232                 # If the base object exists, populate the
       
  1233                 # descriptor cache
  1002                 setattr(obj, f.get_cache_name(), rel_obj)
  1234                 setattr(obj, f.get_cache_name(), rel_obj)
       
  1235             if f.unique and rel_obj is not None:
       
  1236                 # If the field is unique, populate the
       
  1237                 # reverse descriptor cache on the related object
       
  1238                 setattr(rel_obj, f.related.get_cache_name(), obj)
       
  1239 
       
  1240     # Now do the same, but for reverse related objects.
       
  1241     # Only handle the restricted case - i.e., don't do a depth
       
  1242     # descent into reverse relations unless explicitly requested
       
  1243     if restricted:
       
  1244         related_fields = [
       
  1245             (o.field, o.model)
       
  1246             for o in klass._meta.get_all_related_objects()
       
  1247             if o.field.unique
       
  1248         ]
       
  1249         for f, model in related_fields:
       
  1250             if not select_related_descend(f, restricted, requested, reverse=True):
       
  1251                 continue
       
  1252             next = requested[f.related_query_name()]
       
  1253             # Recursively retrieve the data for the related object
       
  1254             cached_row = get_cached_row(model, row, index_end, using,
       
  1255                 max_depth, cur_depth+1, next, only_load=only_load, local_only=True)
       
  1256             # If the recursive descent found an object, populate the
       
  1257             # descriptor caches relevant to the object
       
  1258             if cached_row:
       
  1259                 rel_obj, index_end = cached_row
       
  1260                 if obj is not None:
       
  1261                     # If the field is unique, populate the
       
  1262                     # reverse descriptor cache
       
  1263                     setattr(obj, f.related.get_cache_name(), rel_obj)
       
  1264                 if rel_obj is not None:
       
  1265                     # If the related object exists, populate
       
  1266                     # the descriptor cache.
       
  1267                     setattr(rel_obj, f.get_cache_name(), obj)
       
  1268                     # Now populate all the non-local field values
       
  1269                     # on the related object
       
  1270                     for rel_field,rel_model in rel_obj._meta.get_fields_with_model():
       
  1271                         if rel_model is not None:
       
  1272                             setattr(rel_obj, rel_field.attname, getattr(obj, rel_field.attname))
       
  1273                             # populate the field cache for any related object
       
  1274                             # that has already been retrieved
       
  1275                             if rel_field.rel:
       
  1276                                 try:
       
  1277                                     cached_obj = getattr(obj, rel_field.get_cache_name())
       
  1278                                     setattr(rel_obj, rel_field.get_cache_name(), cached_obj)
       
  1279                                 except AttributeError:
       
  1280                                     # Related object hasn't been cached yet
       
  1281                                     pass
  1003     return obj, index_end
  1282     return obj, index_end
  1004 
  1283 
  1005 def delete_objects(seen_objs):
  1284 def delete_objects(seen_objs, using):
  1006     """
  1285     """
  1007     Iterate through a list of seen classes, and remove any instances that are
  1286     Iterate through a list of seen classes, and remove any instances that are
  1008     referred to.
  1287     referred to.
  1009     """
  1288     """
  1010     if not transaction.is_managed():
  1289     connection = connections[using]
  1011         transaction.enter_transaction_management()
  1290     if not transaction.is_managed(using=using):
       
  1291         transaction.enter_transaction_management(using=using)
  1012         forced_managed = True
  1292         forced_managed = True
  1013     else:
  1293     else:
  1014         forced_managed = False
  1294         forced_managed = False
  1015     try:
  1295     try:
  1016         ordered_classes = seen_objs.keys()
  1296         ordered_classes = seen_objs.keys()
  1028             items.sort()
  1308             items.sort()
  1029             obj_pairs[cls] = items
  1309             obj_pairs[cls] = items
  1030 
  1310 
  1031             # Pre-notify all instances to be deleted.
  1311             # Pre-notify all instances to be deleted.
  1032             for pk_val, instance in items:
  1312             for pk_val, instance in items:
  1033                 signals.pre_delete.send(sender=cls, instance=instance)
  1313                 if not cls._meta.auto_created:
       
  1314                     signals.pre_delete.send(sender=cls, instance=instance)
  1034 
  1315 
  1035             pk_list = [pk for pk,instance in items]
  1316             pk_list = [pk for pk,instance in items]
  1036             del_query = sql.DeleteQuery(cls, connection)
  1317 
  1037             del_query.delete_batch_related(pk_list)
  1318             update_query = sql.UpdateQuery(cls)
  1038 
       
  1039             update_query = sql.UpdateQuery(cls, connection)
       
  1040             for field, model in cls._meta.get_fields_with_model():
  1319             for field, model in cls._meta.get_fields_with_model():
  1041                 if (field.rel and field.null and field.rel.to in seen_objs and
  1320                 if (field.rel and field.null and field.rel.to in seen_objs and
  1042                         filter(lambda f: f.column == field.rel.get_related_field().column,
  1321                         filter(lambda f: f.column == field.rel.get_related_field().column,
  1043                         field.rel.to._meta.fields)):
  1322                         field.rel.to._meta.fields)):
  1044                     if model:
  1323                     if model:
  1045                         sql.UpdateQuery(model, connection).clear_related(field,
  1324                         sql.UpdateQuery(model).clear_related(field, pk_list, using=using)
  1046                                 pk_list)
       
  1047                     else:
  1325                     else:
  1048                         update_query.clear_related(field, pk_list)
  1326                         update_query.clear_related(field, pk_list, using=using)
  1049 
  1327 
  1050         # Now delete the actual data.
  1328         # Now delete the actual data.
  1051         for cls in ordered_classes:
  1329         for cls in ordered_classes:
  1052             items = obj_pairs[cls]
  1330             items = obj_pairs[cls]
  1053             items.reverse()
  1331             items.reverse()
  1054 
  1332 
  1055             pk_list = [pk for pk,instance in items]
  1333             pk_list = [pk for pk,instance in items]
  1056             del_query = sql.DeleteQuery(cls, connection)
  1334             del_query = sql.DeleteQuery(cls)
  1057             del_query.delete_batch(pk_list)
  1335             del_query.delete_batch(pk_list, using=using)
  1058 
  1336 
  1059             # Last cleanup; set NULLs where there once was a reference to the
  1337             # Last cleanup; set NULLs where there once was a reference to the
  1060             # object, NULL the primary key of the found objects, and perform
  1338             # object, NULL the primary key of the found objects, and perform
  1061             # post-notification.
  1339             # post-notification.
  1062             for pk_val, instance in items:
  1340             for pk_val, instance in items:
  1063                 for field in cls._meta.fields:
  1341                 for field in cls._meta.fields:
  1064                     if field.rel and field.null and field.rel.to in seen_objs:
  1342                     if field.rel and field.null and field.rel.to in seen_objs:
  1065                         setattr(instance, field.attname, None)
  1343                         setattr(instance, field.attname, None)
  1066 
  1344 
  1067                 signals.post_delete.send(sender=cls, instance=instance)
  1345                 if not cls._meta.auto_created:
       
  1346                     signals.post_delete.send(sender=cls, instance=instance)
  1068                 setattr(instance, cls._meta.pk.attname, None)
  1347                 setattr(instance, cls._meta.pk.attname, None)
  1069 
  1348 
  1070         if forced_managed:
  1349         if forced_managed:
  1071             transaction.commit()
  1350             transaction.commit(using=using)
  1072         else:
  1351         else:
  1073             transaction.commit_unless_managed()
  1352             transaction.commit_unless_managed(using=using)
  1074     finally:
  1353     finally:
  1075         if forced_managed:
  1354         if forced_managed:
  1076             transaction.leave_transaction_management()
  1355             transaction.leave_transaction_management(using=using)
  1077 
  1356 
  1078 
  1357 class RawQuerySet(object):
  1079 def insert_query(model, values, return_id=False, raw_values=False):
  1358     """
       
  1359     Provides an iterator which converts the results of raw SQL queries into
       
  1360     annotated model instances.
       
  1361     """
       
  1362     def __init__(self, raw_query, model=None, query=None, params=None,
       
  1363         translations=None, using=None):
       
  1364         self.raw_query = raw_query
       
  1365         self.model = model
       
  1366         self._db = using
       
  1367         self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
       
  1368         self.params = params or ()
       
  1369         self.translations = translations or {}
       
  1370 
       
  1371     def __iter__(self):
       
  1372         for row in self.query:
       
  1373             yield self.transform_results(row)
       
  1374 
       
  1375     def __repr__(self):
       
  1376         return "<RawQuerySet: %r>" % (self.raw_query % self.params)
       
  1377 
       
  1378     def __getitem__(self, k):
       
  1379         return list(self)[k]
       
  1380 
       
  1381     @property
       
  1382     def db(self):
       
  1383         "Return the database that will be used if this query is executed now"
       
  1384         return self._db or router.db_for_read(self.model)
       
  1385 
       
  1386     def using(self, alias):
       
  1387         """
       
  1388         Selects which database this Raw QuerySet should excecute it's query against.
       
  1389         """
       
  1390         return RawQuerySet(self.raw_query, model=self.model,
       
  1391                 query=self.query.clone(using=alias),
       
  1392                 params=self.params, translations=self.translations,
       
  1393                 using=alias)
       
  1394 
       
  1395     @property
       
  1396     def columns(self):
       
  1397         """
       
  1398         A list of model field names in the order they'll appear in the
       
  1399         query results.
       
  1400         """
       
  1401         if not hasattr(self, '_columns'):
       
  1402             self._columns = self.query.get_columns()
       
  1403 
       
  1404             # Adjust any column names which don't match field names
       
  1405             for (query_name, model_name) in self.translations.items():
       
  1406                 try:
       
  1407                     index = self._columns.index(query_name)
       
  1408                     self._columns[index] = model_name
       
  1409                 except ValueError:
       
  1410                     # Ignore translations for non-existant column names
       
  1411                     pass
       
  1412 
       
  1413         return self._columns
       
  1414 
       
  1415     @property
       
  1416     def model_fields(self):
       
  1417         """
       
  1418         A dict mapping column names to model field names.
       
  1419         """
       
  1420         if not hasattr(self, '_model_fields'):
       
  1421             converter = connections[self.db].introspection.table_name_converter
       
  1422             self._model_fields = {}
       
  1423             for field in self.model._meta.fields:
       
  1424                 name, column = field.get_attname_column()
       
  1425                 self._model_fields[converter(column)] = field
       
  1426         return self._model_fields
       
  1427 
       
  1428     def transform_results(self, values):
       
  1429         model_init_kwargs = {}
       
  1430         annotations = ()
       
  1431 
       
  1432         # Perform database backend type resolution
       
  1433         connection = connections[self.db]
       
  1434         compiler = connection.ops.compiler('SQLCompiler')(self.query, connection, self.db)
       
  1435         if hasattr(compiler, 'resolve_columns'):
       
  1436             fields = [self.model_fields.get(c,None) for c in self.columns]
       
  1437             values = compiler.resolve_columns(values, fields)
       
  1438 
       
  1439         # Associate fields to values
       
  1440         for pos, value in enumerate(values):
       
  1441             column = self.columns[pos]
       
  1442 
       
  1443             # Separate properties from annotations
       
  1444             if column in self.model_fields.keys():
       
  1445                 model_init_kwargs[self.model_fields[column].attname] = value
       
  1446             else:
       
  1447                 annotations += (column, value),
       
  1448 
       
  1449         # Construct model instance and apply annotations
       
  1450         skip = set()
       
  1451         for field in self.model._meta.fields:
       
  1452             if field.attname not in model_init_kwargs.keys():
       
  1453                 skip.add(field.attname)
       
  1454 
       
  1455         if skip:
       
  1456             if self.model._meta.pk.attname in skip:
       
  1457                 raise InvalidQuery('Raw query must include the primary key')
       
  1458             model_cls = deferred_class_factory(self.model, skip)
       
  1459         else:
       
  1460             model_cls = self.model
       
  1461 
       
  1462         instance = model_cls(**model_init_kwargs)
       
  1463 
       
  1464         for field, value in annotations:
       
  1465             setattr(instance, field, value)
       
  1466 
       
  1467         instance._state.db = self.query.using
       
  1468 
       
  1469         return instance
       
  1470 
       
  1471 def insert_query(model, values, return_id=False, raw_values=False, using=None):
  1080     """
  1472     """
  1081     Inserts a new record for the given model. This provides an interface to
  1473     Inserts a new record for the given model. This provides an interface to
  1082     the InsertQuery class and is how Model.save() is implemented. It is not
  1474     the InsertQuery class and is how Model.save() is implemented. It is not
  1083     part of the public API.
  1475     part of the public API.
  1084     """
  1476     """
  1085     query = sql.InsertQuery(model, connection)
  1477     query = sql.InsertQuery(model)
  1086     query.insert_values(values, raw_values)
  1478     query.insert_values(values, raw_values)
  1087     return query.execute_sql(return_id)
  1479     return query.get_compiler(using=using).execute_sql(return_id)