5 themselves do not have to (and could be backed by things other than SQL |
5 themselves do not have to (and could be backed by things other than SQL |
6 databases). The abstraction barrier only works one way: this module has to know |
6 databases). The abstraction barrier only works one way: this module has to know |
7 all about the internals of models in order to get the information it needs. |
7 all about the internals of models in order to get the information it needs. |
8 """ |
8 """ |
9 |
9 |
10 from copy import deepcopy |
10 from django.utils.copycompat import deepcopy |
11 |
|
12 from django.utils.tree import Node |
11 from django.utils.tree import Node |
13 from django.utils.datastructures import SortedDict |
12 from django.utils.datastructures import SortedDict |
14 from django.utils.encoding import force_unicode |
13 from django.utils.encoding import force_unicode |
15 from django.db.backends.util import truncate_name |
14 from django.db import connections, DEFAULT_DB_ALIAS |
16 from django.db import connection |
|
17 from django.db.models import signals |
15 from django.db.models import signals |
18 from django.db.models.fields import FieldDoesNotExist |
16 from django.db.models.fields import FieldDoesNotExist |
19 from django.db.models.query_utils import select_related_descend |
17 from django.db.models.query_utils import select_related_descend, InvalidQuery |
20 from django.db.models.sql import aggregates as base_aggregates_module |
18 from django.db.models.sql import aggregates as base_aggregates_module |
|
19 from django.db.models.sql.constants import * |
|
20 from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin |
21 from django.db.models.sql.expressions import SQLEvaluator |
21 from django.db.models.sql.expressions import SQLEvaluator |
22 from django.db.models.sql.where import WhereNode, Constraint, EverythingNode, AND, OR |
22 from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode, |
|
23 ExtraWhere, AND, OR) |
23 from django.core.exceptions import FieldError |
24 from django.core.exceptions import FieldError |
24 from datastructures import EmptyResultSet, Empty, MultiJoin |
25 |
25 from constants import * |
26 __all__ = ['Query', 'RawQuery'] |
26 |
27 |
27 try: |
28 class RawQuery(object): |
28 set |
29 """ |
29 except NameError: |
30 A single raw SQL query |
30 from sets import Set as set # Python 2.3 fallback |
31 """ |
31 |
32 |
32 __all__ = ['Query', 'BaseQuery'] |
33 def __init__(self, sql, using, params=None): |
33 |
34 self.validate_sql(sql) |
34 class BaseQuery(object): |
35 self.params = params or () |
|
36 self.sql = sql |
|
37 self.using = using |
|
38 self.cursor = None |
|
39 |
|
40 # Mirror some properties of a normal query so that |
|
41 # the compiler can be used to process results. |
|
42 self.low_mark, self.high_mark = 0, None # Used for offset/limit |
|
43 self.extra_select = {} |
|
44 self.aggregate_select = {} |
|
45 |
|
46 def clone(self, using): |
|
47 return RawQuery(self.sql, using, params=self.params) |
|
48 |
|
49 def convert_values(self, value, field, connection): |
|
50 """Convert the database-returned value into a type that is consistent |
|
51 across database backends. |
|
52 |
|
53 By default, this defers to the underlying backend operations, but |
|
54 it can be overridden by Query classes for specific backends. |
|
55 """ |
|
56 return connection.ops.convert_values(value, field) |
|
57 |
|
58 def get_columns(self): |
|
59 if self.cursor is None: |
|
60 self._execute_query() |
|
61 converter = connections[self.using].introspection.table_name_converter |
|
62 return [converter(column_meta[0]) |
|
63 for column_meta in self.cursor.description] |
|
64 |
|
65 def validate_sql(self, sql): |
|
66 if not sql.lower().strip().startswith('select'): |
|
67 raise InvalidQuery('Raw queries are limited to SELECT queries. Use ' |
|
68 'connection.cursor directly for other types of queries.') |
|
69 |
|
70 def __iter__(self): |
|
71 # Always execute a new query for a new iterator. |
|
72 # This could be optimized with a cache at the expense of RAM. |
|
73 self._execute_query() |
|
74 if not connections[self.using].features.can_use_chunked_reads: |
|
75 # If the database can't use chunked reads we need to make sure we |
|
76 # evaluate the entire query up front. |
|
77 result = list(self.cursor) |
|
78 else: |
|
79 result = self.cursor |
|
80 return iter(result) |
|
81 |
|
82 def __repr__(self): |
|
83 return "<RawQuery: %r>" % (self.sql % self.params) |
|
84 |
|
85 def _execute_query(self): |
|
86 self.cursor = connections[self.using].cursor() |
|
87 self.cursor.execute(self.sql, self.params) |
|
88 |
|
89 |
|
90 class Query(object): |
35 """ |
91 """ |
36 A single SQL query. |
92 A single SQL query. |
37 """ |
93 """ |
38 # SQL join types. These are part of the class because their string forms |
94 # SQL join types. These are part of the class because their string forms |
39 # vary from database to database and can be customised by a subclass. |
95 # vary from database to database and can be customised by a subclass. |
145 name is not None and obj_dict['model']._meta.get_field(name) or None |
199 name is not None and obj_dict['model']._meta.get_field(name) or None |
146 for name in obj_dict['select_fields'] |
200 for name in obj_dict['select_fields'] |
147 ] |
201 ] |
148 |
202 |
149 self.__dict__.update(obj_dict) |
203 self.__dict__.update(obj_dict) |
150 # XXX: Need a better solution for this when multi-db stuff is |
204 |
151 # supported. It's the only class-reference to the module-level |
205 def prepare(self): |
152 # connection variable. |
206 return self |
153 self.connection = connection |
207 |
|
208 def get_compiler(self, using=None, connection=None): |
|
209 if using is None and connection is None: |
|
210 raise ValueError("Need either using or connection") |
|
211 if using: |
|
212 connection = connections[using] |
|
213 |
|
214 # Check that the compiler will be able to execute the query |
|
215 for alias, aggregate in self.aggregate_select.items(): |
|
216 connection.ops.check_aggregate_support(aggregate) |
|
217 |
|
218 return connection.ops.compiler(self.compiler)(self, connection, using) |
154 |
219 |
155 def get_meta(self): |
220 def get_meta(self): |
156 """ |
221 """ |
157 Returns the Options instance (the model._meta) from which to start |
222 Returns the Options instance (the model._meta) from which to start |
158 processing. Normally, this is self.model._meta, but it can be changed |
223 processing. Normally, this is self.model._meta, but it can be changed |
159 by subclasses. |
224 by subclasses. |
160 """ |
225 """ |
161 return self.model._meta |
226 return self.model._meta |
162 |
227 |
163 def quote_name_unless_alias(self, name): |
228 def clone(self, klass=None, memo=None, **kwargs): |
164 """ |
|
165 A wrapper around connection.ops.quote_name that doesn't quote aliases |
|
166 for table names. This avoids problems with some SQL dialects that treat |
|
167 quoted strings specially (e.g. PostgreSQL). |
|
168 """ |
|
169 if name in self.quote_cache: |
|
170 return self.quote_cache[name] |
|
171 if ((name in self.alias_map and name not in self.table_map) or |
|
172 name in self.extra_select): |
|
173 self.quote_cache[name] = name |
|
174 return name |
|
175 r = self.connection.ops.quote_name(name) |
|
176 self.quote_cache[name] = r |
|
177 return r |
|
178 |
|
179 def clone(self, klass=None, **kwargs): |
|
180 """ |
229 """ |
181 Creates a copy of the current instance. The 'kwargs' parameter can be |
230 Creates a copy of the current instance. The 'kwargs' parameter can be |
182 used by clients to update attributes after copying has taken place. |
231 used by clients to update attributes after copying has taken place. |
183 """ |
232 """ |
184 obj = Empty() |
233 obj = Empty() |
185 obj.__class__ = klass or self.__class__ |
234 obj.__class__ = klass or self.__class__ |
186 obj.model = self.model |
235 obj.model = self.model |
187 obj.connection = self.connection |
|
188 obj.alias_refcount = self.alias_refcount.copy() |
236 obj.alias_refcount = self.alias_refcount.copy() |
189 obj.alias_map = self.alias_map.copy() |
237 obj.alias_map = self.alias_map.copy() |
190 obj.table_map = self.table_map.copy() |
238 obj.table_map = self.table_map.copy() |
191 obj.join_map = self.join_map.copy() |
239 obj.join_map = self.join_map.copy() |
192 obj.rev_join_map = self.rev_join_map.copy() |
240 obj.rev_join_map = self.rev_join_map.copy() |
199 obj.select_fields = self.select_fields[:] |
247 obj.select_fields = self.select_fields[:] |
200 obj.related_select_fields = self.related_select_fields[:] |
248 obj.related_select_fields = self.related_select_fields[:] |
201 obj.dupe_avoidance = self.dupe_avoidance.copy() |
249 obj.dupe_avoidance = self.dupe_avoidance.copy() |
202 obj.select = self.select[:] |
250 obj.select = self.select[:] |
203 obj.tables = self.tables[:] |
251 obj.tables = self.tables[:] |
204 obj.where = deepcopy(self.where) |
252 obj.where = deepcopy(self.where, memo=memo) |
205 obj.where_class = self.where_class |
253 obj.where_class = self.where_class |
206 if self.group_by is None: |
254 if self.group_by is None: |
207 obj.group_by = None |
255 obj.group_by = None |
208 else: |
256 else: |
209 obj.group_by = self.group_by[:] |
257 obj.group_by = self.group_by[:] |
210 obj.having = deepcopy(self.having) |
258 obj.having = deepcopy(self.having, memo=memo) |
211 obj.order_by = self.order_by[:] |
259 obj.order_by = self.order_by[:] |
212 obj.low_mark, obj.high_mark = self.low_mark, self.high_mark |
260 obj.low_mark, obj.high_mark = self.low_mark, self.high_mark |
213 obj.distinct = self.distinct |
261 obj.distinct = self.distinct |
214 obj.select_related = self.select_related |
262 obj.select_related = self.select_related |
215 obj.related_select_cols = [] |
263 obj.related_select_cols = [] |
216 obj.aggregates = deepcopy(self.aggregates) |
264 obj.aggregates = deepcopy(self.aggregates, memo=memo) |
217 if self.aggregate_select_mask is None: |
265 if self.aggregate_select_mask is None: |
218 obj.aggregate_select_mask = None |
266 obj.aggregate_select_mask = None |
219 else: |
267 else: |
220 obj.aggregate_select_mask = self.aggregate_select_mask.copy() |
268 obj.aggregate_select_mask = self.aggregate_select_mask.copy() |
221 if self._aggregate_select_cache is None: |
269 # _aggregate_select_cache cannot be copied, as doing so breaks the |
222 obj._aggregate_select_cache = None |
270 # (necessary) state in which both aggregates and |
223 else: |
271 # _aggregate_select_cache point to the same underlying objects. |
224 obj._aggregate_select_cache = self._aggregate_select_cache.copy() |
272 # It will get re-populated in the cloned queryset the next time it's |
|
273 # used. |
|
274 obj._aggregate_select_cache = None |
225 obj.max_depth = self.max_depth |
275 obj.max_depth = self.max_depth |
226 obj.extra = self.extra.copy() |
276 obj.extra = self.extra.copy() |
227 if self.extra_select_mask is None: |
277 if self.extra_select_mask is None: |
228 obj.extra_select_mask = None |
278 obj.extra_select_mask = None |
229 else: |
279 else: |
231 if self._extra_select_cache is None: |
281 if self._extra_select_cache is None: |
232 obj._extra_select_cache = None |
282 obj._extra_select_cache = None |
233 else: |
283 else: |
234 obj._extra_select_cache = self._extra_select_cache.copy() |
284 obj._extra_select_cache = self._extra_select_cache.copy() |
235 obj.extra_tables = self.extra_tables |
285 obj.extra_tables = self.extra_tables |
236 obj.extra_where = self.extra_where |
|
237 obj.extra_params = self.extra_params |
|
238 obj.extra_order_by = self.extra_order_by |
286 obj.extra_order_by = self.extra_order_by |
239 obj.deferred_loading = deepcopy(self.deferred_loading) |
287 obj.deferred_loading = deepcopy(self.deferred_loading, memo=memo) |
240 if self.filter_is_sticky and self.used_aliases: |
288 if self.filter_is_sticky and self.used_aliases: |
241 obj.used_aliases = self.used_aliases.copy() |
289 obj.used_aliases = self.used_aliases.copy() |
242 else: |
290 else: |
243 obj.used_aliases = set() |
291 obj.used_aliases = set() |
244 obj.filter_is_sticky = False |
292 obj.filter_is_sticky = False |
245 obj.__dict__.update(kwargs) |
293 obj.__dict__.update(kwargs) |
246 if hasattr(obj, '_setup_query'): |
294 if hasattr(obj, '_setup_query'): |
247 obj._setup_query() |
295 obj._setup_query() |
248 return obj |
296 return obj |
249 |
297 |
250 def convert_values(self, value, field): |
298 def convert_values(self, value, field, connection): |
251 """Convert the database-returned value into a type that is consistent |
299 """Convert the database-returned value into a type that is consistent |
252 across database backends. |
300 across database backends. |
253 |
301 |
254 By default, this defers to the underlying backend operations, but |
302 By default, this defers to the underlying backend operations, but |
255 it can be overridden by Query classes for specific backends. |
303 it can be overridden by Query classes for specific backends. |
256 """ |
304 """ |
257 return self.connection.ops.convert_values(value, field) |
305 return connection.ops.convert_values(value, field) |
258 |
306 |
259 def resolve_aggregate(self, value, aggregate): |
307 def resolve_aggregate(self, value, aggregate, connection): |
260 """Resolve the value of aggregates returned by the database to |
308 """Resolve the value of aggregates returned by the database to |
261 consistent (and reasonable) types. |
309 consistent (and reasonable) types. |
262 |
310 |
263 This is required because of the predisposition of certain backends |
311 This is required because of the predisposition of certain backends |
264 to return Decimal and long types when they are not needed. |
312 to return Decimal and long types when they are not needed. |
367 from subqueries import AggregateQuery |
385 from subqueries import AggregateQuery |
368 subquery = obj |
386 subquery = obj |
369 subquery.clear_ordering(True) |
387 subquery.clear_ordering(True) |
370 subquery.clear_limits() |
388 subquery.clear_limits() |
371 |
389 |
372 obj = AggregateQuery(obj.model, obj.connection) |
390 obj = AggregateQuery(obj.model) |
373 obj.add_subquery(subquery) |
391 obj.add_subquery(subquery, using=using) |
374 |
392 |
375 obj.add_count_column() |
393 obj.add_count_column() |
376 number = obj.get_aggregation()[None] |
394 number = obj.get_aggregation(using=using)[None] |
377 |
395 |
378 # Apply offset and limit constraints manually, since using LIMIT/OFFSET |
396 # Apply offset and limit constraints manually, since using LIMIT/OFFSET |
379 # in SQL (in variants that provide them) doesn't change the COUNT |
397 # in SQL (in variants that provide them) doesn't change the COUNT |
380 # output. |
398 # output. |
381 number = max(0, number - self.low_mark) |
399 number = max(0, number - self.low_mark) |
382 if self.high_mark is not None: |
400 if self.high_mark is not None: |
383 number = min(number, self.high_mark - self.low_mark) |
401 number = min(number, self.high_mark - self.low_mark) |
384 |
402 |
385 return number |
403 return number |
386 |
404 |
387 def as_sql(self, with_limits=True, with_col_aliases=False): |
405 def has_results(self, using): |
388 """ |
406 q = self.clone() |
389 Creates the SQL for this query. Returns the SQL string and list of |
407 q.add_extra({'a': 1}, None, None, None, None, None) |
390 parameters. |
408 q.select = [] |
391 |
409 q.select_fields = [] |
392 If 'with_limits' is False, any limit/offset information is not included |
410 q.default_cols = False |
393 in the query. |
411 q.select_related = False |
394 """ |
412 q.set_extra_mask(('a',)) |
395 self.pre_sql_setup() |
413 q.set_aggregate_mask(()) |
396 out_cols = self.get_columns(with_col_aliases) |
414 q.clear_ordering(True) |
397 ordering, ordering_group_by = self.get_ordering() |
415 q.set_limits(high=1) |
398 |
416 compiler = q.get_compiler(using=using) |
399 # This must come after 'select' and 'ordering' -- see docstring of |
417 return bool(compiler.execute_sql(SINGLE)) |
400 # get_from_clause() for details. |
|
401 from_, f_params = self.get_from_clause() |
|
402 |
|
403 qn = self.quote_name_unless_alias |
|
404 where, w_params = self.where.as_sql(qn=qn) |
|
405 having, h_params = self.having.as_sql(qn=qn) |
|
406 params = [] |
|
407 for val in self.extra_select.itervalues(): |
|
408 params.extend(val[1]) |
|
409 |
|
410 result = ['SELECT'] |
|
411 if self.distinct: |
|
412 result.append('DISTINCT') |
|
413 result.append(', '.join(out_cols + self.ordering_aliases)) |
|
414 |
|
415 result.append('FROM') |
|
416 result.extend(from_) |
|
417 params.extend(f_params) |
|
418 |
|
419 if where: |
|
420 result.append('WHERE %s' % where) |
|
421 params.extend(w_params) |
|
422 if self.extra_where: |
|
423 if not where: |
|
424 result.append('WHERE') |
|
425 else: |
|
426 result.append('AND') |
|
427 result.append(' AND '.join(self.extra_where)) |
|
428 |
|
429 grouping, gb_params = self.get_grouping() |
|
430 if grouping: |
|
431 if ordering: |
|
432 # If the backend can't group by PK (i.e., any database |
|
433 # other than MySQL), then any fields mentioned in the |
|
434 # ordering clause needs to be in the group by clause. |
|
435 if not self.connection.features.allows_group_by_pk: |
|
436 for col, col_params in ordering_group_by: |
|
437 if col not in grouping: |
|
438 grouping.append(str(col)) |
|
439 gb_params.extend(col_params) |
|
440 else: |
|
441 ordering = self.connection.ops.force_no_ordering() |
|
442 result.append('GROUP BY %s' % ', '.join(grouping)) |
|
443 params.extend(gb_params) |
|
444 |
|
445 if having: |
|
446 result.append('HAVING %s' % having) |
|
447 params.extend(h_params) |
|
448 |
|
449 if ordering: |
|
450 result.append('ORDER BY %s' % ', '.join(ordering)) |
|
451 |
|
452 if with_limits: |
|
453 if self.high_mark is not None: |
|
454 result.append('LIMIT %d' % (self.high_mark - self.low_mark)) |
|
455 if self.low_mark: |
|
456 if self.high_mark is None: |
|
457 val = self.connection.ops.no_limit_value() |
|
458 if val: |
|
459 result.append('LIMIT %d' % val) |
|
460 result.append('OFFSET %d' % self.low_mark) |
|
461 |
|
462 params.extend(self.extra_params) |
|
463 return ' '.join(result), tuple(params) |
|
464 |
|
465 def as_nested_sql(self): |
|
466 """ |
|
467 Perform the same functionality as the as_sql() method, returning an |
|
468 SQL string and parameters. However, the alias prefixes are bumped |
|
469 beforehand (in a copy -- the current query isn't changed) and any |
|
470 ordering is removed. |
|
471 |
|
472 Used when nesting this query inside another. |
|
473 """ |
|
474 obj = self.clone() |
|
475 obj.clear_ordering(True) |
|
476 obj.bump_prefix() |
|
477 return obj.as_sql() |
|
478 |
418 |
479 def combine(self, rhs, connector): |
419 def combine(self, rhs, connector): |
480 """ |
420 """ |
481 Merge the 'rhs' query into the current one (with any 'rhs' effects |
421 Merge the 'rhs' query into the current one (with any 'rhs' effects |
482 being applied *after* (that is, "to the right of") anything in the |
422 being applied *after* (that is, "to the right of") anything in the |
552 # really make sense (or return consistent value sets). Not worth |
492 # really make sense (or return consistent value sets). Not worth |
553 # the extra complexity when you can write a real query instead. |
493 # the extra complexity when you can write a real query instead. |
554 if self.extra and rhs.extra: |
494 if self.extra and rhs.extra: |
555 raise ValueError("When merging querysets using 'or', you " |
495 raise ValueError("When merging querysets using 'or', you " |
556 "cannot have extra(select=...) on both sides.") |
496 "cannot have extra(select=...) on both sides.") |
557 if self.extra_where and rhs.extra_where: |
|
558 raise ValueError("When merging querysets using 'or', you " |
|
559 "cannot have extra(where=...) on both sides.") |
|
560 self.extra.update(rhs.extra) |
497 self.extra.update(rhs.extra) |
561 extra_select_mask = set() |
498 extra_select_mask = set() |
562 if self.extra_select_mask is not None: |
499 if self.extra_select_mask is not None: |
563 extra_select_mask.update(self.extra_select_mask) |
500 extra_select_mask.update(self.extra_select_mask) |
564 if rhs.extra_select_mask is not None: |
501 if rhs.extra_select_mask is not None: |
565 extra_select_mask.update(rhs.extra_select_mask) |
502 extra_select_mask.update(rhs.extra_select_mask) |
566 if extra_select_mask: |
503 if extra_select_mask: |
567 self.set_extra_mask(extra_select_mask) |
504 self.set_extra_mask(extra_select_mask) |
568 self.extra_tables += rhs.extra_tables |
505 self.extra_tables += rhs.extra_tables |
569 self.extra_where += rhs.extra_where |
|
570 self.extra_params += rhs.extra_params |
|
571 |
506 |
572 # Ordering uses the 'rhs' ordering, unless it has none, in which case |
507 # Ordering uses the 'rhs' ordering, unless it has none, in which case |
573 # the current ordering is used. |
508 # the current ordering is used. |
574 self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by |
509 self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by |
575 self.extra_order_by = rhs.extra_order_by or self.extra_order_by |
510 self.extra_order_by = rhs.extra_order_by or self.extra_order_by |
576 |
|
577 def pre_sql_setup(self): |
|
578 """ |
|
579 Does any necessary class setup immediately prior to producing SQL. This |
|
580 is for things that can't necessarily be done in __init__ because we |
|
581 might not have all the pieces in place at that time. |
|
582 """ |
|
583 if not self.tables: |
|
584 self.join((None, self.model._meta.db_table, None, None)) |
|
585 if (not self.select and self.default_cols and not |
|
586 self.included_inherited_models): |
|
587 self.setup_inherited_models() |
|
588 if self.select_related and not self.related_select_cols: |
|
589 self.fill_related_selections() |
|
590 |
511 |
591 def deferred_to_data(self, target, callback): |
512 def deferred_to_data(self, target, callback): |
592 """ |
513 """ |
593 Converts the self.deferred_loading data structure to an alternate data |
514 Converts the self.deferred_loading data structure to an alternate data |
594 structure, describing the field that *will* be loaded. This is used to |
515 structure, describing the field that *will* be loaded. This is used to |
685 if table not in target: |
597 if table not in target: |
686 target[table] = set() |
598 target[table] = set() |
687 for field in fields: |
599 for field in fields: |
688 target[table].add(field.column) |
600 target[table].add(field.column) |
689 |
601 |
690 def get_columns(self, with_aliases=False): |
|
691 """ |
|
692 Returns the list of columns to use in the select statement. If no |
|
693 columns have been specified, returns all columns relating to fields in |
|
694 the model. |
|
695 |
|
696 If 'with_aliases' is true, any column names that are duplicated |
|
697 (without the table names) are given unique aliases. This is needed in |
|
698 some cases to avoid ambiguity with nested queries. |
|
699 """ |
|
700 qn = self.quote_name_unless_alias |
|
701 qn2 = self.connection.ops.quote_name |
|
702 result = ['(%s) AS %s' % (col[0], qn2(alias)) for alias, col in self.extra_select.iteritems()] |
|
703 aliases = set(self.extra_select.keys()) |
|
704 if with_aliases: |
|
705 col_aliases = aliases.copy() |
|
706 else: |
|
707 col_aliases = set() |
|
708 if self.select: |
|
709 only_load = self.deferred_to_columns() |
|
710 for col in self.select: |
|
711 if isinstance(col, (list, tuple)): |
|
712 alias, column = col |
|
713 table = self.alias_map[alias][TABLE_NAME] |
|
714 if table in only_load and col not in only_load[table]: |
|
715 continue |
|
716 r = '%s.%s' % (qn(alias), qn(column)) |
|
717 if with_aliases: |
|
718 if col[1] in col_aliases: |
|
719 c_alias = 'Col%d' % len(col_aliases) |
|
720 result.append('%s AS %s' % (r, c_alias)) |
|
721 aliases.add(c_alias) |
|
722 col_aliases.add(c_alias) |
|
723 else: |
|
724 result.append('%s AS %s' % (r, qn2(col[1]))) |
|
725 aliases.add(r) |
|
726 col_aliases.add(col[1]) |
|
727 else: |
|
728 result.append(r) |
|
729 aliases.add(r) |
|
730 col_aliases.add(col[1]) |
|
731 else: |
|
732 result.append(col.as_sql(quote_func=qn)) |
|
733 |
|
734 if hasattr(col, 'alias'): |
|
735 aliases.add(col.alias) |
|
736 col_aliases.add(col.alias) |
|
737 |
|
738 elif self.default_cols: |
|
739 cols, new_aliases = self.get_default_columns(with_aliases, |
|
740 col_aliases) |
|
741 result.extend(cols) |
|
742 aliases.update(new_aliases) |
|
743 |
|
744 result.extend([ |
|
745 '%s%s' % ( |
|
746 aggregate.as_sql(quote_func=qn), |
|
747 alias is not None and ' AS %s' % qn(alias) or '' |
|
748 ) |
|
749 for alias, aggregate in self.aggregate_select.items() |
|
750 ]) |
|
751 |
|
752 for table, col in self.related_select_cols: |
|
753 r = '%s.%s' % (qn(table), qn(col)) |
|
754 if with_aliases and col in col_aliases: |
|
755 c_alias = 'Col%d' % len(col_aliases) |
|
756 result.append('%s AS %s' % (r, c_alias)) |
|
757 aliases.add(c_alias) |
|
758 col_aliases.add(c_alias) |
|
759 else: |
|
760 result.append(r) |
|
761 aliases.add(r) |
|
762 col_aliases.add(col) |
|
763 |
|
764 self._select_aliases = aliases |
|
765 return result |
|
766 |
|
767 def get_default_columns(self, with_aliases=False, col_aliases=None, |
|
768 start_alias=None, opts=None, as_pairs=False): |
|
769 """ |
|
770 Computes the default columns for selecting every field in the base |
|
771 model. Will sometimes be called to pull in related models (e.g. via |
|
772 select_related), in which case "opts" and "start_alias" will be given |
|
773 to provide a starting point for the traversal. |
|
774 |
|
775 Returns a list of strings, quoted appropriately for use in SQL |
|
776 directly, as well as a set of aliases used in the select statement (if |
|
777 'as_pairs' is True, returns a list of (alias, col_name) pairs instead |
|
778 of strings as the first component and None as the second component). |
|
779 """ |
|
780 result = [] |
|
781 if opts is None: |
|
782 opts = self.model._meta |
|
783 qn = self.quote_name_unless_alias |
|
784 qn2 = self.connection.ops.quote_name |
|
785 aliases = set() |
|
786 only_load = self.deferred_to_columns() |
|
787 # Skip all proxy to the root proxied model |
|
788 proxied_model = get_proxied_model(opts) |
|
789 |
|
790 if start_alias: |
|
791 seen = {None: start_alias} |
|
792 for field, model in opts.get_fields_with_model(): |
|
793 if start_alias: |
|
794 try: |
|
795 alias = seen[model] |
|
796 except KeyError: |
|
797 if model is proxied_model: |
|
798 alias = start_alias |
|
799 else: |
|
800 link_field = opts.get_ancestor_link(model) |
|
801 alias = self.join((start_alias, model._meta.db_table, |
|
802 link_field.column, model._meta.pk.column)) |
|
803 seen[model] = alias |
|
804 else: |
|
805 # If we're starting from the base model of the queryset, the |
|
806 # aliases will have already been set up in pre_sql_setup(), so |
|
807 # we can save time here. |
|
808 alias = self.included_inherited_models[model] |
|
809 table = self.alias_map[alias][TABLE_NAME] |
|
810 if table in only_load and field.column not in only_load[table]: |
|
811 continue |
|
812 if as_pairs: |
|
813 result.append((alias, field.column)) |
|
814 aliases.add(alias) |
|
815 continue |
|
816 if with_aliases and field.column in col_aliases: |
|
817 c_alias = 'Col%d' % len(col_aliases) |
|
818 result.append('%s.%s AS %s' % (qn(alias), |
|
819 qn2(field.column), c_alias)) |
|
820 col_aliases.add(c_alias) |
|
821 aliases.add(c_alias) |
|
822 else: |
|
823 r = '%s.%s' % (qn(alias), qn2(field.column)) |
|
824 result.append(r) |
|
825 aliases.add(r) |
|
826 if with_aliases: |
|
827 col_aliases.add(field.column) |
|
828 return result, aliases |
|
829 |
|
830 def get_from_clause(self): |
|
831 """ |
|
832 Returns a list of strings that are joined together to go after the |
|
833 "FROM" part of the query, as well as a list any extra parameters that |
|
834 need to be included. Sub-classes, can override this to create a |
|
835 from-clause via a "select". |
|
836 |
|
837 This should only be called after any SQL construction methods that |
|
838 might change the tables we need. This means the select columns and |
|
839 ordering must be done first. |
|
840 """ |
|
841 result = [] |
|
842 qn = self.quote_name_unless_alias |
|
843 qn2 = self.connection.ops.quote_name |
|
844 first = True |
|
845 for alias in self.tables: |
|
846 if not self.alias_refcount[alias]: |
|
847 continue |
|
848 try: |
|
849 name, alias, join_type, lhs, lhs_col, col, nullable = self.alias_map[alias] |
|
850 except KeyError: |
|
851 # Extra tables can end up in self.tables, but not in the |
|
852 # alias_map if they aren't in a join. That's OK. We skip them. |
|
853 continue |
|
854 alias_str = (alias != name and ' %s' % alias or '') |
|
855 if join_type and not first: |
|
856 result.append('%s %s%s ON (%s.%s = %s.%s)' |
|
857 % (join_type, qn(name), alias_str, qn(lhs), |
|
858 qn2(lhs_col), qn(alias), qn2(col))) |
|
859 else: |
|
860 connector = not first and ', ' or '' |
|
861 result.append('%s%s%s' % (connector, qn(name), alias_str)) |
|
862 first = False |
|
863 for t in self.extra_tables: |
|
864 alias, unused = self.table_alias(t) |
|
865 # Only add the alias if it's not already present (the table_alias() |
|
866 # calls increments the refcount, so an alias refcount of one means |
|
867 # this is the only reference. |
|
868 if alias not in self.alias_map or self.alias_refcount[alias] == 1: |
|
869 connector = not first and ', ' or '' |
|
870 result.append('%s%s' % (connector, qn(alias))) |
|
871 first = False |
|
872 return result, [] |
|
873 |
|
874 def get_grouping(self): |
|
875 """ |
|
876 Returns a tuple representing the SQL elements in the "group by" clause. |
|
877 """ |
|
878 qn = self.quote_name_unless_alias |
|
879 result, params = [], [] |
|
880 if self.group_by is not None: |
|
881 group_by = self.group_by or [] |
|
882 |
|
883 extra_selects = [] |
|
884 for extra_select, extra_params in self.extra_select.itervalues(): |
|
885 extra_selects.append(extra_select) |
|
886 params.extend(extra_params) |
|
887 for col in group_by + self.related_select_cols + extra_selects: |
|
888 if isinstance(col, (list, tuple)): |
|
889 result.append('%s.%s' % (qn(col[0]), qn(col[1]))) |
|
890 elif hasattr(col, 'as_sql'): |
|
891 result.append(col.as_sql(qn)) |
|
892 else: |
|
893 result.append(str(col)) |
|
894 return result, params |
|
895 |
|
896 def get_ordering(self): |
|
897 """ |
|
898 Returns a tuple containing a list representing the SQL elements in the |
|
899 "order by" clause, and the list of SQL elements that need to be added |
|
900 to the GROUP BY clause as a result of the ordering. |
|
901 |
|
902 Also sets the ordering_aliases attribute on this instance to a list of |
|
903 extra aliases needed in the select. |
|
904 |
|
905 Determining the ordering SQL can change the tables we need to include, |
|
906 so this should be run *before* get_from_clause(). |
|
907 """ |
|
908 if self.extra_order_by: |
|
909 ordering = self.extra_order_by |
|
910 elif not self.default_ordering: |
|
911 ordering = self.order_by |
|
912 else: |
|
913 ordering = self.order_by or self.model._meta.ordering |
|
914 qn = self.quote_name_unless_alias |
|
915 qn2 = self.connection.ops.quote_name |
|
916 distinct = self.distinct |
|
917 select_aliases = self._select_aliases |
|
918 result = [] |
|
919 group_by = [] |
|
920 ordering_aliases = [] |
|
921 if self.standard_ordering: |
|
922 asc, desc = ORDER_DIR['ASC'] |
|
923 else: |
|
924 asc, desc = ORDER_DIR['DESC'] |
|
925 |
|
926 # It's possible, due to model inheritance, that normal usage might try |
|
927 # to include the same field more than once in the ordering. We track |
|
928 # the table/column pairs we use and discard any after the first use. |
|
929 processed_pairs = set() |
|
930 |
|
931 for field in ordering: |
|
932 if field == '?': |
|
933 result.append(self.connection.ops.random_function_sql()) |
|
934 continue |
|
935 if isinstance(field, int): |
|
936 if field < 0: |
|
937 order = desc |
|
938 field = -field |
|
939 else: |
|
940 order = asc |
|
941 result.append('%s %s' % (field, order)) |
|
942 group_by.append((field, [])) |
|
943 continue |
|
944 col, order = get_order_dir(field, asc) |
|
945 if col in self.aggregate_select: |
|
946 result.append('%s %s' % (col, order)) |
|
947 continue |
|
948 if '.' in field: |
|
949 # This came in through an extra(order_by=...) addition. Pass it |
|
950 # on verbatim. |
|
951 table, col = col.split('.', 1) |
|
952 if (table, col) not in processed_pairs: |
|
953 elt = '%s.%s' % (qn(table), col) |
|
954 processed_pairs.add((table, col)) |
|
955 if not distinct or elt in select_aliases: |
|
956 result.append('%s %s' % (elt, order)) |
|
957 group_by.append((elt, [])) |
|
958 elif get_order_dir(field)[0] not in self.extra_select: |
|
959 # 'col' is of the form 'field' or 'field1__field2' or |
|
960 # '-field1__field2__field', etc. |
|
961 for table, col, order in self.find_ordering_name(field, |
|
962 self.model._meta, default_order=asc): |
|
963 if (table, col) not in processed_pairs: |
|
964 elt = '%s.%s' % (qn(table), qn2(col)) |
|
965 processed_pairs.add((table, col)) |
|
966 if distinct and elt not in select_aliases: |
|
967 ordering_aliases.append(elt) |
|
968 result.append('%s %s' % (elt, order)) |
|
969 group_by.append((elt, [])) |
|
970 else: |
|
971 elt = qn2(col) |
|
972 if distinct and col not in select_aliases: |
|
973 ordering_aliases.append(elt) |
|
974 result.append('%s %s' % (elt, order)) |
|
975 group_by.append(self.extra_select[col]) |
|
976 self.ordering_aliases = ordering_aliases |
|
977 return result, group_by |
|
978 |
|
979 def find_ordering_name(self, name, opts, alias=None, default_order='ASC', |
|
980 already_seen=None): |
|
981 """ |
|
982 Returns the table alias (the name might be ambiguous, the alias will |
|
983 not be) and column name for ordering by the given 'name' parameter. |
|
984 The 'name' is of the form 'field1__field2__...__fieldN'. |
|
985 """ |
|
986 name, order = get_order_dir(name, default_order) |
|
987 pieces = name.split(LOOKUP_SEP) |
|
988 if not alias: |
|
989 alias = self.get_initial_alias() |
|
990 field, target, opts, joins, last, extra = self.setup_joins(pieces, |
|
991 opts, alias, False) |
|
992 alias = joins[-1] |
|
993 col = target.column |
|
994 if not field.rel: |
|
995 # To avoid inadvertent trimming of a necessary alias, use the |
|
996 # refcount to show that we are referencing a non-relation field on |
|
997 # the model. |
|
998 self.ref_alias(alias) |
|
999 |
|
1000 # Must use left outer joins for nullable fields and their relations. |
|
1001 self.promote_alias_chain(joins, |
|
1002 self.alias_map[joins[0]][JOIN_TYPE] == self.LOUTER) |
|
1003 |
|
1004 # If we get to this point and the field is a relation to another model, |
|
1005 # append the default ordering for that model. |
|
1006 if field.rel and len(joins) > 1 and opts.ordering: |
|
1007 # Firstly, avoid infinite loops. |
|
1008 if not already_seen: |
|
1009 already_seen = set() |
|
1010 join_tuple = tuple([self.alias_map[j][TABLE_NAME] for j in joins]) |
|
1011 if join_tuple in already_seen: |
|
1012 raise FieldError('Infinite loop caused by ordering.') |
|
1013 already_seen.add(join_tuple) |
|
1014 |
|
1015 results = [] |
|
1016 for item in opts.ordering: |
|
1017 results.extend(self.find_ordering_name(item, opts, alias, |
|
1018 order, already_seen)) |
|
1019 return results |
|
1020 |
|
1021 if alias: |
|
1022 # We have to do the same "final join" optimisation as in |
|
1023 # add_filter, since the final column might not otherwise be part of |
|
1024 # the select set (so we can't order on it). |
|
1025 while 1: |
|
1026 join = self.alias_map[alias] |
|
1027 if col != join[RHS_JOIN_COL]: |
|
1028 break |
|
1029 self.unref_alias(alias) |
|
1030 alias = join[LHS_ALIAS] |
|
1031 col = join[LHS_JOIN_COL] |
|
1032 return [(alias, col, order)] |
|
1033 |
602 |
1034 def table_alias(self, table_name, create=False): |
603 def table_alias(self, table_name, create=False): |
1035 """ |
604 """ |
1036 Returns a table alias for the given table_name and whether this is a |
605 Returns a table alias for the given table_name and whether this is a |
1037 new alias or not. |
606 new alias or not. |
1331 for key, alias in self.included_inherited_models.items(): |
900 for key, alias in self.included_inherited_models.items(): |
1332 if key: |
901 if key: |
1333 self.unref_alias(alias) |
902 self.unref_alias(alias) |
1334 self.included_inherited_models = {} |
903 self.included_inherited_models = {} |
1335 |
904 |
1336 def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1, |
|
1337 used=None, requested=None, restricted=None, nullable=None, |
|
1338 dupe_set=None, avoid_set=None): |
|
1339 """ |
|
1340 Fill in the information needed for a select_related query. The current |
|
1341 depth is measured as the number of connections away from the root model |
|
1342 (for example, cur_depth=1 means we are looking at models with direct |
|
1343 connections to the root model). |
|
1344 """ |
|
1345 if not restricted and self.max_depth and cur_depth > self.max_depth: |
|
1346 # We've recursed far enough; bail out. |
|
1347 return |
|
1348 |
|
1349 if not opts: |
|
1350 opts = self.get_meta() |
|
1351 root_alias = self.get_initial_alias() |
|
1352 self.related_select_cols = [] |
|
1353 self.related_select_fields = [] |
|
1354 if not used: |
|
1355 used = set() |
|
1356 if dupe_set is None: |
|
1357 dupe_set = set() |
|
1358 if avoid_set is None: |
|
1359 avoid_set = set() |
|
1360 orig_dupe_set = dupe_set |
|
1361 |
|
1362 # Setup for the case when only particular related fields should be |
|
1363 # included in the related selection. |
|
1364 if requested is None and restricted is not False: |
|
1365 if isinstance(self.select_related, dict): |
|
1366 requested = self.select_related |
|
1367 restricted = True |
|
1368 else: |
|
1369 restricted = False |
|
1370 |
|
1371 for f, model in opts.get_fields_with_model(): |
|
1372 if not select_related_descend(f, restricted, requested): |
|
1373 continue |
|
1374 # The "avoid" set is aliases we want to avoid just for this |
|
1375 # particular branch of the recursion. They aren't permanently |
|
1376 # forbidden from reuse in the related selection tables (which is |
|
1377 # what "used" specifies). |
|
1378 avoid = avoid_set.copy() |
|
1379 dupe_set = orig_dupe_set.copy() |
|
1380 table = f.rel.to._meta.db_table |
|
1381 if nullable or f.null: |
|
1382 promote = True |
|
1383 else: |
|
1384 promote = False |
|
1385 if model: |
|
1386 int_opts = opts |
|
1387 alias = root_alias |
|
1388 alias_chain = [] |
|
1389 for int_model in opts.get_base_chain(model): |
|
1390 # Proxy model have elements in base chain |
|
1391 # with no parents, assign the new options |
|
1392 # object and skip to the next base in that |
|
1393 # case |
|
1394 if not int_opts.parents[int_model]: |
|
1395 int_opts = int_model._meta |
|
1396 continue |
|
1397 lhs_col = int_opts.parents[int_model].column |
|
1398 dedupe = lhs_col in opts.duplicate_targets |
|
1399 if dedupe: |
|
1400 avoid.update(self.dupe_avoidance.get(id(opts), lhs_col), |
|
1401 ()) |
|
1402 dupe_set.add((opts, lhs_col)) |
|
1403 int_opts = int_model._meta |
|
1404 alias = self.join((alias, int_opts.db_table, lhs_col, |
|
1405 int_opts.pk.column), exclusions=used, |
|
1406 promote=promote) |
|
1407 alias_chain.append(alias) |
|
1408 for (dupe_opts, dupe_col) in dupe_set: |
|
1409 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
|
1410 if self.alias_map[root_alias][JOIN_TYPE] == self.LOUTER: |
|
1411 self.promote_alias_chain(alias_chain, True) |
|
1412 else: |
|
1413 alias = root_alias |
|
1414 |
|
1415 dedupe = f.column in opts.duplicate_targets |
|
1416 if dupe_set or dedupe: |
|
1417 avoid.update(self.dupe_avoidance.get((id(opts), f.column), ())) |
|
1418 if dedupe: |
|
1419 dupe_set.add((opts, f.column)) |
|
1420 |
|
1421 alias = self.join((alias, table, f.column, |
|
1422 f.rel.get_related_field().column), |
|
1423 exclusions=used.union(avoid), promote=promote) |
|
1424 used.add(alias) |
|
1425 columns, aliases = self.get_default_columns(start_alias=alias, |
|
1426 opts=f.rel.to._meta, as_pairs=True) |
|
1427 self.related_select_cols.extend(columns) |
|
1428 if self.alias_map[alias][JOIN_TYPE] == self.LOUTER: |
|
1429 self.promote_alias_chain(aliases, True) |
|
1430 self.related_select_fields.extend(f.rel.to._meta.fields) |
|
1431 if restricted: |
|
1432 next = requested.get(f.name, {}) |
|
1433 else: |
|
1434 next = False |
|
1435 if f.null is not None: |
|
1436 new_nullable = f.null |
|
1437 else: |
|
1438 new_nullable = None |
|
1439 for dupe_opts, dupe_col in dupe_set: |
|
1440 self.update_dupe_avoidance(dupe_opts, dupe_col, alias) |
|
1441 self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1, |
|
1442 used, next, restricted, new_nullable, dupe_set, avoid) |
|
1443 |
905 |
1444 def add_aggregate(self, aggregate, model, alias, is_summary): |
906 def add_aggregate(self, aggregate, model, alias, is_summary): |
1445 """ |
907 """ |
1446 Adds a single aggregate expression to the Query |
908 Adds a single aggregate expression to the Query |
1447 """ |
909 """ |
2341 select_alias = join_info[RHS_ALIAS] |
1791 select_alias = join_info[RHS_ALIAS] |
2342 select_col = join_info[RHS_JOIN_COL] |
1792 select_col = join_info[RHS_JOIN_COL] |
2343 self.select = [(select_alias, select_col)] |
1793 self.select = [(select_alias, select_col)] |
2344 self.remove_inherited_models() |
1794 self.remove_inherited_models() |
2345 |
1795 |
2346 def execute_sql(self, result_type=MULTI): |
|
2347 """ |
|
2348 Run the query against the database and returns the result(s). The |
|
2349 return value is a single data item if result_type is SINGLE, or an |
|
2350 iterator over the results if the result_type is MULTI. |
|
2351 |
|
2352 result_type is either MULTI (use fetchmany() to retrieve all rows), |
|
2353 SINGLE (only retrieve a single row), or None. In this last case, the |
|
2354 cursor is returned if any query is executed, since it's used by |
|
2355 subclasses such as InsertQuery). It's possible, however, that no query |
|
2356 is needed, as the filters describe an empty set. In that case, None is |
|
2357 returned, to avoid any unnecessary database interaction. |
|
2358 """ |
|
2359 try: |
|
2360 sql, params = self.as_sql() |
|
2361 if not sql: |
|
2362 raise EmptyResultSet |
|
2363 except EmptyResultSet: |
|
2364 if result_type == MULTI: |
|
2365 return empty_iter() |
|
2366 else: |
|
2367 return |
|
2368 cursor = self.connection.cursor() |
|
2369 cursor.execute(sql, params) |
|
2370 |
|
2371 if not result_type: |
|
2372 return cursor |
|
2373 if result_type == SINGLE: |
|
2374 if self.ordering_aliases: |
|
2375 return cursor.fetchone()[:-len(self.ordering_aliases)] |
|
2376 return cursor.fetchone() |
|
2377 |
|
2378 # The MULTI case. |
|
2379 if self.ordering_aliases: |
|
2380 result = order_modified_iter(cursor, len(self.ordering_aliases), |
|
2381 self.connection.features.empty_fetchmany_value) |
|
2382 else: |
|
2383 result = iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)), |
|
2384 self.connection.features.empty_fetchmany_value) |
|
2385 if not self.connection.features.can_use_chunked_reads: |
|
2386 # If we are using non-chunked reads, we return the same data |
|
2387 # structure as normally, but ensure it is all read into memory |
|
2388 # before going any further. |
|
2389 return list(result) |
|
2390 return result |
|
2391 |
|
2392 # Use the backend's custom Query class if it defines one. Otherwise, use the |
|
2393 # default. |
|
2394 if connection.features.uses_custom_query_class: |
|
2395 Query = connection.ops.query_class(BaseQuery) |
|
2396 else: |
|
2397 Query = BaseQuery |
|
2398 |
1796 |
2399 def get_order_dir(field, default='ASC'): |
1797 def get_order_dir(field, default='ASC'): |
2400 """ |
1798 """ |
2401 Returns the field name and direction for an order specification. For |
1799 Returns the field name and direction for an order specification. For |
2402 example, '-foo' is returned as ('foo', 'DESC'). |
1800 example, '-foo' is returned as ('foo', 'DESC'). |