Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/django/db/models/sql/compiler.py: 45%
915 statements
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
1import collections
2import json
3import re
4from functools import partial
5from itertools import chain
7from django.core.exceptions import EmptyResultSet, FieldError
8from django.db import DatabaseError, NotSupportedError
9from django.db.models.constants import LOOKUP_SEP
10from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value
11from django.db.models.functions import Cast, Random
12from django.db.models.query_utils import select_related_descend
13from django.db.models.sql.constants import (
14 CURSOR,
15 GET_ITERATOR_CHUNK_SIZE,
16 MULTI,
17 NO_RESULTS,
18 ORDER_DIR,
19 SINGLE,
20)
21from django.db.models.sql.query import Query, get_order_dir
22from django.db.transaction import TransactionManagementError
23from django.utils.functional import cached_property
24from django.utils.hashable import make_hashable
25from django.utils.regex_helper import _lazy_re_compile
28class SQLCompiler:
29 # Multiline ordering SQL clause may appear from RawSQL.
30 ordering_parts = _lazy_re_compile(
31 r"^(.*)\s(?:ASC|DESC).*",
32 re.MULTILINE | re.DOTALL,
33 )
35 def __init__(self, query, connection, using, elide_empty=True):
36 self.query = query
37 self.connection = connection
38 self.using = using
39 # Some queries, e.g. coalesced aggregation, need to be executed even if
40 # they would return an empty result set.
41 self.elide_empty = elide_empty
42 self.quote_cache = {"*": "*"}
43 # The select, klass_info, and annotations are needed by QuerySet.iterator()
44 # these are set as a side-effect of executing the query. Note that we calculate
45 # separately a list of extra select columns needed for grammatical correctness
46 # of the query, but these columns are not included in self.select.
47 self.select = None
48 self.annotation_col_map = None
49 self.klass_info = None
50 self._meta_ordering = None
52 def setup_query(self):
53 if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
54 self.query.get_initial_alias()
55 self.select, self.klass_info, self.annotation_col_map = self.get_select()
56 self.col_count = len(self.select)
58 def pre_sql_setup(self):
59 """
60 Do any necessary class setup immediately prior to producing SQL. This
61 is for things that can't necessarily be done in __init__ because we
62 might not have all the pieces in place at that time.
63 """
64 self.setup_query()
65 order_by = self.get_order_by()
66 self.where, self.having = self.query.where.split_having()
67 extra_select = self.get_extra_select(order_by, self.select)
68 self.has_extra_select = bool(extra_select)
69 group_by = self.get_group_by(self.select + extra_select, order_by)
70 return extra_select, order_by, group_by
72 def get_group_by(self, select, order_by):
73 """
74 Return a list of 2-tuples of form (sql, params).
76 The logic of what exactly the GROUP BY clause contains is hard
77 to describe in other words than "if it passes the test suite,
78 then it is correct".
79 """
80 # Some examples:
81 # SomeModel.objects.annotate(Count('somecol'))
82 # GROUP BY: all fields of the model
83 #
84 # SomeModel.objects.values('name').annotate(Count('somecol'))
85 # GROUP BY: name
86 #
87 # SomeModel.objects.annotate(Count('somecol')).values('name')
88 # GROUP BY: all cols of the model
89 #
90 # SomeModel.objects.values('name', 'pk')
91 # .annotate(Count('somecol')).values('pk')
92 # GROUP BY: name, pk
93 #
94 # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
95 # GROUP BY: name, pk
96 #
97 # In fact, the self.query.group_by is the minimal set to GROUP BY. It
98 # can't be ever restricted to a smaller set, but additional columns in
99 # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
100 # the end result is that it is impossible to force the query to have
101 # a chosen GROUP BY clause - you can almost do this by using the form:
102 # .values(*wanted_cols).annotate(AnAggregate())
103 # but any later annotations, extra selects, values calls that
104 # refer some column outside of the wanted_cols, order_by, or even
105 # filter calls can alter the GROUP BY clause.
107 # The query.group_by is either None (no GROUP BY at all), True
108 # (group by select fields), or a list of expressions to be added
109 # to the group by.
110 if self.query.group_by is None: 110 ↛ 112line 110 didn't jump to line 112, because the condition on line 110 was never false
111 return []
112 expressions = []
113 if self.query.group_by is not True:
114 # If the group by is set to a list (by .values() call most likely),
115 # then we need to add everything in it to the GROUP BY clause.
116 # Backwards compatibility hack for setting query.group_by. Remove
117 # when we have public API way of forcing the GROUP BY clause.
118 # Converts string references to expressions.
119 for expr in self.query.group_by:
120 if not hasattr(expr, "as_sql"):
121 expressions.append(self.query.resolve_ref(expr))
122 else:
123 expressions.append(expr)
124 # Note that even if the group_by is set, it is only the minimal
125 # set to group by. So, we need to add cols in select, order_by, and
126 # having into the select in any case.
127 ref_sources = {expr.source for expr in expressions if isinstance(expr, Ref)}
128 for expr, _, _ in select:
129 # Skip members of the select clause that are already included
130 # by reference.
131 if expr in ref_sources:
132 continue
133 cols = expr.get_group_by_cols()
134 for col in cols:
135 expressions.append(col)
136 if not self._meta_ordering:
137 for expr, (sql, params, is_ref) in order_by:
138 # Skip references to the SELECT clause, as all expressions in
139 # the SELECT clause are already part of the GROUP BY.
140 if not is_ref:
141 expressions.extend(expr.get_group_by_cols())
142 having_group_by = self.having.get_group_by_cols() if self.having else ()
143 for expr in having_group_by:
144 expressions.append(expr)
145 result = []
146 seen = set()
147 expressions = self.collapse_group_by(expressions, having_group_by)
149 for expr in expressions:
150 sql, params = self.compile(expr)
151 sql, params = expr.select_format(self, sql, params)
152 params_hash = make_hashable(params)
153 if (sql, params_hash) not in seen:
154 result.append((sql, params))
155 seen.add((sql, params_hash))
156 return result
158 def collapse_group_by(self, expressions, having):
159 # If the DB can group by primary key, then group by the primary key of
160 # query's main model. Note that for PostgreSQL the GROUP BY clause must
161 # include the primary key of every table, but for MySQL it is enough to
162 # have the main table's primary key.
163 if self.connection.features.allows_group_by_pk:
164 # Determine if the main model's primary key is in the query.
165 pk = None
166 for expr in expressions:
167 # Is this a reference to query's base table primary key? If the
168 # expression isn't a Col-like, then skip the expression.
169 if (
170 getattr(expr, "target", None) == self.query.model._meta.pk
171 and getattr(expr, "alias", None) == self.query.base_table
172 ):
173 pk = expr
174 break
175 # If the main model's primary key is in the query, group by that
176 # field, HAVING expressions, and expressions associated with tables
177 # that don't have a primary key included in the grouped columns.
178 if pk:
179 pk_aliases = {
180 expr.alias
181 for expr in expressions
182 if hasattr(expr, "target") and expr.target.primary_key
183 }
184 expressions = [pk] + [
185 expr
186 for expr in expressions
187 if expr in having
188 or (
189 getattr(expr, "alias", None) is not None
190 and expr.alias not in pk_aliases
191 )
192 ]
193 elif self.connection.features.allows_group_by_selected_pks:
194 # Filter out all expressions associated with a table's primary key
195 # present in the grouped columns. This is done by identifying all
196 # tables that have their primary key included in the grouped
197 # columns and removing non-primary key columns referring to them.
198 # Unmanaged models are excluded because they could be representing
199 # database views on which the optimization might not be allowed.
200 pks = {
201 expr
202 for expr in expressions
203 if (
204 hasattr(expr, "target")
205 and expr.target.primary_key
206 and self.connection.features.allows_group_by_selected_pks_on_model(
207 expr.target.model
208 )
209 )
210 }
211 aliases = {expr.alias for expr in pks}
212 expressions = [
213 expr
214 for expr in expressions
215 if expr in pks or getattr(expr, "alias", None) not in aliases
216 ]
217 return expressions
219 def get_select(self):
220 """
221 Return three values:
222 - a list of 3-tuples of (expression, (sql, params), alias)
223 - a klass_info structure,
224 - a dictionary of annotations
226 The (sql, params) is what the expression will produce, and alias is the
227 "AS alias" for the column (possibly None).
229 The klass_info structure contains the following information:
230 - The base model of the query.
231 - Which columns for that model are present in the query (by
232 position of the select clause).
233 - related_klass_infos: [f, klass_info] to descent into
235 The annotations is a dictionary of {'attname': column position} values.
236 """
237 select = []
238 klass_info = None
239 annotations = {}
240 select_idx = 0
241 for alias, (sql, params) in self.query.extra_select.items():
242 annotations[alias] = select_idx
243 select.append((RawSQL(sql, params), alias))
244 select_idx += 1
245 assert not (self.query.select and self.query.default_cols)
246 if self.query.default_cols:
247 cols = self.get_default_columns()
248 else:
249 # self.query.select is a special case. These columns never go to
250 # any model.
251 cols = self.query.select
252 if cols:
253 select_list = []
254 for col in cols:
255 select_list.append(select_idx)
256 select.append((col, None))
257 select_idx += 1
258 klass_info = {
259 "model": self.query.model,
260 "select_fields": select_list,
261 }
262 for alias, annotation in self.query.annotation_select.items():
263 annotations[alias] = select_idx
264 select.append((annotation, alias))
265 select_idx += 1
267 if self.query.select_related: 267 ↛ 268line 267 didn't jump to line 268, because the condition on line 267 was never true
268 related_klass_infos = self.get_related_selections(select)
269 klass_info["related_klass_infos"] = related_klass_infos
271 def get_select_from_parent(klass_info):
272 for ki in klass_info["related_klass_infos"]:
273 if ki["from_parent"]:
274 ki["select_fields"] = (
275 klass_info["select_fields"] + ki["select_fields"]
276 )
277 get_select_from_parent(ki)
279 get_select_from_parent(klass_info)
281 ret = []
282 for col, alias in select:
283 try:
284 sql, params = self.compile(col)
285 except EmptyResultSet:
286 empty_result_set_value = getattr(
287 col, "empty_result_set_value", NotImplemented
288 )
289 if empty_result_set_value is NotImplemented:
290 # Select a predicate that's always False.
291 sql, params = "0", ()
292 else:
293 sql, params = self.compile(Value(empty_result_set_value))
294 else:
295 sql, params = col.select_format(self, sql, params)
296 ret.append((col, (sql, params), alias))
297 return ret, klass_info, annotations
299 def _order_by_pairs(self):
300 if self.query.extra_order_by: 300 ↛ 301line 300 didn't jump to line 301, because the condition on line 300 was never true
301 ordering = self.query.extra_order_by
302 elif not self.query.default_ordering:
303 ordering = self.query.order_by
304 elif self.query.order_by:
305 ordering = self.query.order_by
306 elif self.query.get_meta().ordering:
307 ordering = self.query.get_meta().ordering
308 self._meta_ordering = ordering
309 else:
310 ordering = []
311 if self.query.standard_ordering:
312 default_order, _ = ORDER_DIR["ASC"]
313 else:
314 default_order, _ = ORDER_DIR["DESC"]
316 for field in ordering:
317 if hasattr(field, "resolve_expression"): 317 ↛ 318line 317 didn't jump to line 318, because the condition on line 317 was never true
318 if isinstance(field, Value):
319 # output_field must be resolved for constants.
320 field = Cast(field, field.output_field)
321 if not isinstance(field, OrderBy):
322 field = field.asc()
323 if not self.query.standard_ordering:
324 field = field.copy()
325 field.reverse_ordering()
326 yield field, False
327 continue
328 if field == "?": # random 328 ↛ 329line 328 didn't jump to line 329, because the condition on line 328 was never true
329 yield OrderBy(Random()), False
330 continue
332 col, order = get_order_dir(field, default_order)
333 descending = order == "DESC"
335 if col in self.query.annotation_select: 335 ↛ 337line 335 didn't jump to line 337, because the condition on line 335 was never true
336 # Reference to expression in SELECT clause
337 yield (
338 OrderBy(
339 Ref(col, self.query.annotation_select[col]),
340 descending=descending,
341 ),
342 True,
343 )
344 continue
345 if col in self.query.annotations: 345 ↛ 348line 345 didn't jump to line 348, because the condition on line 345 was never true
346 # References to an expression which is masked out of the SELECT
347 # clause.
348 if self.query.combinator and self.select:
349 # Don't use the resolved annotation because other
350 # combinated queries might define it differently.
351 expr = F(col)
352 else:
353 expr = self.query.annotations[col]
354 if isinstance(expr, Value):
355 # output_field must be resolved for constants.
356 expr = Cast(expr, expr.output_field)
357 yield OrderBy(expr, descending=descending), False
358 continue
360 if "." in field: 360 ↛ 363line 360 didn't jump to line 363, because the condition on line 360 was never true
361 # This came in through an extra(order_by=...) addition. Pass it
362 # on verbatim.
363 table, col = col.split(".", 1)
364 yield (
365 OrderBy(
366 RawSQL(
367 "%s.%s" % (self.quote_name_unless_alias(table), col), []
368 ),
369 descending=descending,
370 ),
371 False,
372 )
373 continue
375 if self.query.extra and col in self.query.extra: 375 ↛ 376line 375 didn't jump to line 376, because the condition on line 375 was never true
376 if col in self.query.extra_select:
377 yield (
378 OrderBy(
379 Ref(col, RawSQL(*self.query.extra[col])),
380 descending=descending,
381 ),
382 True,
383 )
384 else:
385 yield (
386 OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
387 False,
388 )
389 else:
390 if self.query.combinator and self.select: 390 ↛ 393line 390 didn't jump to line 393, because the condition on line 390 was never true
391 # Don't use the first model's field because other
392 # combinated queries might define it differently.
393 yield OrderBy(F(col), descending=descending), False
394 else:
395 # 'col' is of the form 'field' or 'field1__field2' or
396 # '-field1__field2__field', etc.
397 yield from self.find_ordering_name(
398 field,
399 self.query.get_meta(),
400 default_order=default_order,
401 )
403 def get_order_by(self):
404 """
405 Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
406 the ORDER BY clause.
408 The order_by clause can alter the select clause (for example it can add
409 aliases to clauses that do not yet have one, or it can add totally new
410 select clauses).
411 """
412 result = []
413 seen = set()
415 for expr, is_ref in self._order_by_pairs():
416 resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
417 if self.query.combinator and self.select: 417 ↛ 418line 417 didn't jump to line 418, because the condition on line 417 was never true
418 src = resolved.get_source_expressions()[0]
419 expr_src = expr.get_source_expressions()[0]
420 # Relabel order by columns to raw numbers if this is a combined
421 # query; necessary since the columns can't be referenced by the
422 # fully qualified name and the simple column names may collide.
423 for idx, (sel_expr, _, col_alias) in enumerate(self.select):
424 if is_ref and col_alias == src.refs:
425 src = src.source
426 elif col_alias and not (
427 isinstance(expr_src, F) and col_alias == expr_src.name
428 ):
429 continue
430 if src == sel_expr:
431 resolved.set_source_expressions([RawSQL("%d" % (idx + 1), ())])
432 break
433 else:
434 if col_alias:
435 raise DatabaseError(
436 "ORDER BY term does not match any column in the result set."
437 )
438 # Add column used in ORDER BY clause to the selected
439 # columns and to each combined query.
440 order_by_idx = len(self.query.select) + 1
441 col_name = f"__orderbycol{order_by_idx}"
442 for q in self.query.combined_queries:
443 q.add_annotation(expr_src, col_name)
444 self.query.add_select_col(resolved, col_name)
445 resolved.set_source_expressions([RawSQL(f"{order_by_idx}", ())])
446 sql, params = self.compile(resolved)
447 # Don't add the same column twice, but the order direction is
448 # not taken into account so we strip it. When this entire method
449 # is refactored into expressions, then we can check each part as we
450 # generate it.
451 without_ordering = self.ordering_parts.search(sql)[1]
452 params_hash = make_hashable(params)
453 if (without_ordering, params_hash) in seen: 453 ↛ 454line 453 didn't jump to line 454, because the condition on line 453 was never true
454 continue
455 seen.add((without_ordering, params_hash))
456 result.append((resolved, (sql, params, is_ref)))
457 return result
459 def get_extra_select(self, order_by, select):
460 extra_select = []
461 if self.query.distinct and not self.query.distinct_fields: 461 ↛ 462line 461 didn't jump to line 462, because the condition on line 461 was never true
462 select_sql = [t[1] for t in select]
463 for expr, (sql, params, is_ref) in order_by:
464 without_ordering = self.ordering_parts.search(sql)[1]
465 if not is_ref and (without_ordering, params) not in select_sql:
466 extra_select.append((expr, (without_ordering, params), None))
467 return extra_select
469 def quote_name_unless_alias(self, name):
470 """
471 A wrapper around connection.ops.quote_name that doesn't quote aliases
472 for table names. This avoids problems with some SQL dialects that treat
473 quoted strings specially (e.g. PostgreSQL).
474 """
475 if name in self.quote_cache:
476 return self.quote_cache[name]
477 if (
478 (name in self.query.alias_map and name not in self.query.table_map)
479 or name in self.query.extra_select
480 or (
481 self.query.external_aliases.get(name)
482 and name not in self.query.table_map
483 )
484 ):
485 self.quote_cache[name] = name
486 return name
487 r = self.connection.ops.quote_name(name)
488 self.quote_cache[name] = r
489 return r
491 def compile(self, node):
492 vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
493 if vendor_impl: 493 ↛ 494line 493 didn't jump to line 494, because the condition on line 493 was never true
494 sql, params = vendor_impl(self, self.connection)
495 else:
496 sql, params = node.as_sql(self, self.connection)
497 return sql, params
499 def get_combinator_sql(self, combinator, all):
500 features = self.connection.features
501 compilers = [
502 query.get_compiler(self.using, self.connection, self.elide_empty)
503 for query in self.query.combined_queries
504 if not query.is_empty()
505 ]
506 if not features.supports_slicing_ordering_in_compound:
507 for query, compiler in zip(self.query.combined_queries, compilers):
508 if query.low_mark or query.high_mark:
509 raise DatabaseError(
510 "LIMIT/OFFSET not allowed in subqueries of compound statements."
511 )
512 if compiler.get_order_by():
513 raise DatabaseError(
514 "ORDER BY not allowed in subqueries of compound statements."
515 )
516 parts = ()
517 for compiler in compilers:
518 try:
519 # If the columns list is limited, then all combined queries
520 # must have the same columns list. Set the selects defined on
521 # the query on all combined queries, if not already set.
522 if not compiler.query.values_select and self.query.values_select:
523 compiler.query = compiler.query.clone()
524 compiler.query.set_values(
525 (
526 *self.query.extra_select,
527 *self.query.values_select,
528 *self.query.annotation_select,
529 )
530 )
531 part_sql, part_args = compiler.as_sql()
532 if compiler.query.combinator:
533 # Wrap in a subquery if wrapping in parentheses isn't
534 # supported.
535 if not features.supports_parentheses_in_compound:
536 part_sql = "SELECT * FROM ({})".format(part_sql)
537 # Add parentheses when combining with compound query if not
538 # already added for all compound queries.
539 elif not features.supports_slicing_ordering_in_compound:
540 part_sql = "({})".format(part_sql)
541 parts += ((part_sql, part_args),)
542 except EmptyResultSet:
543 # Omit the empty queryset with UNION and with DIFFERENCE if the
544 # first queryset is nonempty.
545 if combinator == "union" or (combinator == "difference" and parts):
546 continue
547 raise
548 if not parts:
549 raise EmptyResultSet
550 combinator_sql = self.connection.ops.set_operators[combinator]
551 if all and combinator == "union":
552 combinator_sql += " ALL"
553 braces = "({})" if features.supports_slicing_ordering_in_compound else "{}"
554 sql_parts, args_parts = zip(
555 *((braces.format(sql), args) for sql, args in parts)
556 )
557 result = [" {} ".format(combinator_sql).join(sql_parts)]
558 params = []
559 for part in args_parts:
560 params.extend(part)
561 return result, params
563 def as_sql(self, with_limits=True, with_col_aliases=False):
564 """
565 Create the SQL for this query. Return the SQL string and list of
566 parameters.
568 If 'with_limits' is False, any limit/offset information is not included
569 in the query.
570 """
571 refcounts_before = self.query.alias_refcount.copy()
572 try:
573 extra_select, order_by, group_by = self.pre_sql_setup()
574 for_update_part = None
575 # Is a LIMIT/OFFSET clause needed?
576 with_limit_offset = with_limits and (
577 self.query.high_mark is not None or self.query.low_mark
578 )
579 combinator = self.query.combinator
580 features = self.connection.features
581 if combinator: 581 ↛ 582line 581 didn't jump to line 582, because the condition on line 581 was never true
582 if not getattr(features, "supports_select_{}".format(combinator)):
583 raise NotSupportedError(
584 "{} is not supported on this database backend.".format(
585 combinator
586 )
587 )
588 result, params = self.get_combinator_sql(
589 combinator, self.query.combinator_all
590 )
591 else:
592 distinct_fields, distinct_params = self.get_distinct()
593 # This must come after 'select', 'ordering', and 'distinct'
594 # (see docstring of get_from_clause() for details).
595 from_, f_params = self.get_from_clause()
596 try:
597 where, w_params = (
598 self.compile(self.where) if self.where is not None else ("", [])
599 )
600 except EmptyResultSet:
601 if self.elide_empty: 601 ↛ 604line 601 didn't jump to line 604, because the condition on line 601 was never false
602 raise
603 # Use a predicate that's always False.
604 where, w_params = "0 = 1", []
605 having, h_params = (
606 self.compile(self.having) if self.having is not None else ("", [])
607 )
608 result = ["SELECT"]
609 params = []
611 if self.query.distinct: 611 ↛ 612line 611 didn't jump to line 612, because the condition on line 611 was never true
612 distinct_result, distinct_params = self.connection.ops.distinct_sql(
613 distinct_fields,
614 distinct_params,
615 )
616 result += distinct_result
617 params += distinct_params
619 out_cols = []
620 col_idx = 1
621 for _, (s_sql, s_params), alias in self.select + extra_select:
622 if alias:
623 s_sql = "%s AS %s" % (
624 s_sql,
625 self.connection.ops.quote_name(alias),
626 )
627 elif with_col_aliases: 627 ↛ 628line 627 didn't jump to line 628, because the condition on line 627 was never true
628 s_sql = "%s AS %s" % (
629 s_sql,
630 self.connection.ops.quote_name("col%d" % col_idx),
631 )
632 col_idx += 1
633 params.extend(s_params)
634 out_cols.append(s_sql)
636 result += [", ".join(out_cols), "FROM", *from_]
637 params.extend(f_params)
639 if self.query.select_for_update and features.has_select_for_update: 639 ↛ 640line 639 didn't jump to line 640, because the condition on line 639 was never true
640 if self.connection.get_autocommit():
641 raise TransactionManagementError(
642 "select_for_update cannot be used outside of a transaction."
643 )
645 if (
646 with_limit_offset
647 and not features.supports_select_for_update_with_limit
648 ):
649 raise NotSupportedError(
650 "LIMIT/OFFSET is not supported with "
651 "select_for_update on this database backend."
652 )
653 nowait = self.query.select_for_update_nowait
654 skip_locked = self.query.select_for_update_skip_locked
655 of = self.query.select_for_update_of
656 no_key = self.query.select_for_no_key_update
657 # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
658 # backend doesn't support it, raise NotSupportedError to
659 # prevent a possible deadlock.
660 if nowait and not features.has_select_for_update_nowait:
661 raise NotSupportedError(
662 "NOWAIT is not supported on this database backend."
663 )
664 elif skip_locked and not features.has_select_for_update_skip_locked:
665 raise NotSupportedError(
666 "SKIP LOCKED is not supported on this database backend."
667 )
668 elif of and not features.has_select_for_update_of:
669 raise NotSupportedError(
670 "FOR UPDATE OF is not supported on this database backend."
671 )
672 elif no_key and not features.has_select_for_no_key_update:
673 raise NotSupportedError(
674 "FOR NO KEY UPDATE is not supported on this "
675 "database backend."
676 )
677 for_update_part = self.connection.ops.for_update_sql(
678 nowait=nowait,
679 skip_locked=skip_locked,
680 of=self.get_select_for_update_of_arguments(),
681 no_key=no_key,
682 )
684 if for_update_part and features.for_update_after_from: 684 ↛ 685line 684 didn't jump to line 685, because the condition on line 684 was never true
685 result.append(for_update_part)
687 if where:
688 result.append("WHERE %s" % where)
689 params.extend(w_params)
691 grouping = []
692 for g_sql, g_params in group_by: 692 ↛ 693line 692 didn't jump to line 693, because the loop on line 692 never started
693 grouping.append(g_sql)
694 params.extend(g_params)
695 if grouping: 695 ↛ 696line 695 didn't jump to line 696, because the condition on line 695 was never true
696 if distinct_fields:
697 raise NotImplementedError(
698 "annotate() + distinct(fields) is not implemented."
699 )
700 order_by = order_by or self.connection.ops.force_no_ordering()
701 result.append("GROUP BY %s" % ", ".join(grouping))
702 if self._meta_ordering:
703 order_by = None
704 if having: 704 ↛ 705line 704 didn't jump to line 705, because the condition on line 704 was never true
705 result.append("HAVING %s" % having)
706 params.extend(h_params)
708 if self.query.explain_info: 708 ↛ 709line 708 didn't jump to line 709, because the condition on line 708 was never true
709 result.insert(
710 0,
711 self.connection.ops.explain_query_prefix(
712 self.query.explain_info.format,
713 **self.query.explain_info.options,
714 ),
715 )
717 if order_by:
718 ordering = []
719 for _, (o_sql, o_params, _) in order_by:
720 ordering.append(o_sql)
721 params.extend(o_params)
722 result.append("ORDER BY %s" % ", ".join(ordering))
724 if with_limit_offset:
725 result.append(
726 self.connection.ops.limit_offset_sql(
727 self.query.low_mark, self.query.high_mark
728 )
729 )
731 if for_update_part and not features.for_update_after_from: 731 ↛ 732line 731 didn't jump to line 732, because the condition on line 731 was never true
732 result.append(for_update_part)
734 if self.query.subquery and extra_select: 734 ↛ 741line 734 didn't jump to line 741, because the condition on line 734 was never true
735 # If the query is used as a subquery, the extra selects would
736 # result in more columns than the left-hand side expression is
737 # expecting. This can happen when a subquery uses a combination
738 # of order_by() and distinct(), forcing the ordering expressions
739 # to be selected as well. Wrap the query in another subquery
740 # to exclude extraneous selects.
741 sub_selects = []
742 sub_params = []
743 for index, (select, _, alias) in enumerate(self.select, start=1):
744 if not alias and with_col_aliases:
745 alias = "col%d" % index
746 if alias:
747 sub_selects.append(
748 "%s.%s"
749 % (
750 self.connection.ops.quote_name("subquery"),
751 self.connection.ops.quote_name(alias),
752 )
753 )
754 else:
755 select_clone = select.relabeled_clone(
756 {select.alias: "subquery"}
757 )
758 subselect, subparams = select_clone.as_sql(
759 self, self.connection
760 )
761 sub_selects.append(subselect)
762 sub_params.extend(subparams)
763 return "SELECT %s FROM (%s) subquery" % (
764 ", ".join(sub_selects),
765 " ".join(result),
766 ), tuple(sub_params + params)
768 return " ".join(result), tuple(params)
769 finally:
770 # Finally do cleanup - get rid of the joins we created above.
771 self.query.reset_refcounts(refcounts_before) 771 ↛ 763line 771 didn't jump to line 763, because the return on line 763 wasn't executed
773 def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
774 """
775 Compute the default columns for selecting every field in the base
776 model. Will sometimes be called to pull in related models (e.g. via
777 select_related), in which case "opts" and "start_alias" will be given
778 to provide a starting point for the traversal.
780 Return a list of strings, quoted appropriately for use in SQL
781 directly, as well as a set of aliases used in the select statement (if
782 'as_pairs' is True, return a list of (alias, col_name) pairs instead
783 of strings as the first component and None as the second component).
784 """
785 result = []
786 if opts is None: 786 ↛ 788line 786 didn't jump to line 788, because the condition on line 786 was never false
787 opts = self.query.get_meta()
788 only_load = self.deferred_to_columns()
789 start_alias = start_alias or self.query.get_initial_alias()
790 # The 'seen_models' is used to optimize checking the needed parent
791 # alias for a given field. This also includes None -> start_alias to
792 # be used by local fields.
793 seen_models = {None: start_alias}
795 for field in opts.concrete_fields:
796 model = field.model._meta.concrete_model
797 # A proxy model will have a different model and concrete_model. We
798 # will assign None if the field belongs to this model.
799 if model == opts.model:
800 model = None
801 if ( 801 ↛ 813line 801 didn't jump to line 813
802 from_parent
803 and model is not None
804 and issubclass(
805 from_parent._meta.concrete_model, model._meta.concrete_model
806 )
807 ):
808 # Avoid loading data for already loaded parents.
809 # We end up here in the case select_related() resolution
810 # proceeds from parent model to child model. In that case the
811 # parent model data is already present in the SELECT clause,
812 # and we want to avoid reloading the same data again.
813 continue
814 if field.model in only_load and field.attname not in only_load[field.model]:
815 continue
816 alias = self.query.join_parent_model(opts, model, start_alias, seen_models)
817 column = field.get_col(alias)
818 result.append(column)
819 return result
821 def get_distinct(self):
822 """
823 Return a quoted list of fields to use in DISTINCT ON part of the query.
825 This method can alter the tables in the query, and thus it must be
826 called before get_from_clause().
827 """
828 result = []
829 params = []
830 opts = self.query.get_meta()
832 for name in self.query.distinct_fields: 832 ↛ 833line 832 didn't jump to line 833, because the loop on line 832 never started
833 parts = name.split(LOOKUP_SEP)
834 _, targets, alias, joins, path, _, transform_function = self._setup_joins(
835 parts, opts, None
836 )
837 targets, alias, _ = self.query.trim_joins(targets, joins, path)
838 for target in targets:
839 if name in self.query.annotation_select:
840 result.append(name)
841 else:
842 r, p = self.compile(transform_function(target, alias))
843 result.append(r)
844 params.append(p)
845 return result, params
847 def find_ordering_name(
848 self, name, opts, alias=None, default_order="ASC", already_seen=None
849 ):
850 """
851 Return the table alias (the name might be ambiguous, the alias will
852 not be) and column name for ordering by the given 'name' parameter.
853 The 'name' is of the form 'field1__field2__...__fieldN'.
854 """
855 name, order = get_order_dir(name, default_order)
856 descending = order == "DESC"
857 pieces = name.split(LOOKUP_SEP)
858 (
859 field,
860 targets,
861 alias,
862 joins,
863 path,
864 opts,
865 transform_function,
866 ) = self._setup_joins(pieces, opts, alias)
868 # If we get to this point and the field is a relation to another model,
869 # append the default ordering for that model unless it is the pk
870 # shortcut or the attribute name of the field that is specified.
871 if (
872 field.is_relation
873 and opts.ordering
874 and getattr(field, "attname", None) != pieces[-1]
875 and name != "pk"
876 ):
877 # Firstly, avoid infinite loops.
878 already_seen = already_seen or set()
879 join_tuple = tuple(
880 getattr(self.query.alias_map[j], "join_cols", None) for j in joins
881 )
882 if join_tuple in already_seen: 882 ↛ 883line 882 didn't jump to line 883, because the condition on line 882 was never true
883 raise FieldError("Infinite loop caused by ordering.")
884 already_seen.add(join_tuple)
886 results = []
887 for item in opts.ordering:
888 if hasattr(item, "resolve_expression") and not isinstance( 888 ↛ 891line 888 didn't jump to line 891, because the condition on line 888 was never true
889 item, OrderBy
890 ):
891 item = item.desc() if descending else item.asc()
892 if isinstance(item, OrderBy): 892 ↛ 893line 892 didn't jump to line 893, because the condition on line 892 was never true
893 results.append((item, False))
894 continue
895 results.extend(
896 self.find_ordering_name(item, opts, alias, order, already_seen)
897 )
898 return results
899 targets, alias, _ = self.query.trim_joins(targets, joins, path)
900 return [
901 (OrderBy(transform_function(t, alias), descending=descending), False)
902 for t in targets
903 ]
905 def _setup_joins(self, pieces, opts, alias):
906 """
907 Helper method for get_order_by() and get_distinct().
909 get_ordering() and get_distinct() must produce same target columns on
910 same input, as the prefixes of get_ordering() and get_distinct() must
911 match. Executing SQL where this is not true is an error.
912 """
913 alias = alias or self.query.get_initial_alias()
914 field, targets, opts, joins, path, transform_function = self.query.setup_joins(
915 pieces, opts, alias
916 )
917 alias = joins[-1]
918 return field, targets, alias, joins, path, opts, transform_function
920 def get_from_clause(self):
921 """
922 Return a list of strings that are joined together to go after the
923 "FROM" part of the query, as well as a list any extra parameters that
924 need to be included. Subclasses, can override this to create a
925 from-clause via a "select".
927 This should only be called after any SQL construction methods that
928 might change the tables that are needed. This means the select columns,
929 ordering, and distinct must be done first.
930 """
931 result = []
932 params = []
933 for alias in tuple(self.query.alias_map):
934 if not self.query.alias_refcount[alias]:
935 continue
936 try:
937 from_clause = self.query.alias_map[alias]
938 except KeyError:
939 # Extra tables can end up in self.tables, but not in the
940 # alias_map if they aren't in a join. That's OK. We skip them.
941 continue
942 clause_sql, clause_params = self.compile(from_clause)
943 result.append(clause_sql)
944 params.extend(clause_params)
945 for t in self.query.extra_tables: 945 ↛ 946line 945 didn't jump to line 946, because the loop on line 945 never started
946 alias, _ = self.query.table_alias(t)
947 # Only add the alias if it's not already present (the table_alias()
948 # call increments the refcount, so an alias refcount of one means
949 # this is the only reference).
950 if (
951 alias not in self.query.alias_map
952 or self.query.alias_refcount[alias] == 1
953 ):
954 result.append(", %s" % self.quote_name_unless_alias(alias))
955 return result, params
957 def get_related_selections(
958 self,
959 select,
960 opts=None,
961 root_alias=None,
962 cur_depth=1,
963 requested=None,
964 restricted=None,
965 ):
966 """
967 Fill in the information needed for a select_related query. The current
968 depth is measured as the number of connections away from the root model
969 (for example, cur_depth=1 means we are looking at models with direct
970 connections to the root model).
971 """
973 def _get_field_choices():
974 direct_choices = (f.name for f in opts.fields if f.is_relation)
975 reverse_choices = (
976 f.field.related_query_name()
977 for f in opts.related_objects
978 if f.field.unique
979 )
980 return chain(
981 direct_choices, reverse_choices, self.query._filtered_relations
982 )
984 related_klass_infos = []
985 if not restricted and cur_depth > self.query.max_depth:
986 # We've recursed far enough; bail out.
987 return related_klass_infos
989 if not opts:
990 opts = self.query.get_meta()
991 root_alias = self.query.get_initial_alias()
992 only_load = self.query.get_loaded_field_names()
994 # Setup for the case when only particular related fields should be
995 # included in the related selection.
996 fields_found = set()
997 if requested is None:
998 restricted = isinstance(self.query.select_related, dict)
999 if restricted:
1000 requested = self.query.select_related
1002 def get_related_klass_infos(klass_info, related_klass_infos):
1003 klass_info["related_klass_infos"] = related_klass_infos
1005 for f in opts.fields:
1006 field_model = f.model._meta.concrete_model
1007 fields_found.add(f.name)
1009 if restricted:
1010 next = requested.get(f.name, {})
1011 if not f.is_relation:
1012 # If a non-related field is used like a relation,
1013 # or if a single non-relational field is given.
1014 if next or f.name in requested:
1015 raise FieldError(
1016 "Non-relational field given in select_related: '%s'. "
1017 "Choices are: %s"
1018 % (
1019 f.name,
1020 ", ".join(_get_field_choices()) or "(none)",
1021 )
1022 )
1023 else:
1024 next = False
1026 if not select_related_descend(
1027 f, restricted, requested, only_load.get(field_model)
1028 ):
1029 continue
1030 klass_info = {
1031 "model": f.remote_field.model,
1032 "field": f,
1033 "reverse": False,
1034 "local_setter": f.set_cached_value,
1035 "remote_setter": f.remote_field.set_cached_value
1036 if f.unique
1037 else lambda x, y: None,
1038 "from_parent": False,
1039 }
1040 related_klass_infos.append(klass_info)
1041 select_fields = []
1042 _, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
1043 alias = joins[-1]
1044 columns = self.get_default_columns(
1045 start_alias=alias, opts=f.remote_field.model._meta
1046 )
1047 for col in columns:
1048 select_fields.append(len(select))
1049 select.append((col, None))
1050 klass_info["select_fields"] = select_fields
1051 next_klass_infos = self.get_related_selections(
1052 select,
1053 f.remote_field.model._meta,
1054 alias,
1055 cur_depth + 1,
1056 next,
1057 restricted,
1058 )
1059 get_related_klass_infos(klass_info, next_klass_infos)
1061 if restricted:
1062 related_fields = [
1063 (o.field, o.related_model)
1064 for o in opts.related_objects
1065 if o.field.unique and not o.many_to_many
1066 ]
1067 for f, model in related_fields:
1068 if not select_related_descend(
1069 f, restricted, requested, only_load.get(model), reverse=True
1070 ):
1071 continue
1073 related_field_name = f.related_query_name()
1074 fields_found.add(related_field_name)
1076 join_info = self.query.setup_joins(
1077 [related_field_name], opts, root_alias
1078 )
1079 alias = join_info.joins[-1]
1080 from_parent = issubclass(model, opts.model) and model is not opts.model
1081 klass_info = {
1082 "model": model,
1083 "field": f,
1084 "reverse": True,
1085 "local_setter": f.remote_field.set_cached_value,
1086 "remote_setter": f.set_cached_value,
1087 "from_parent": from_parent,
1088 }
1089 related_klass_infos.append(klass_info)
1090 select_fields = []
1091 columns = self.get_default_columns(
1092 start_alias=alias, opts=model._meta, from_parent=opts.model
1093 )
1094 for col in columns:
1095 select_fields.append(len(select))
1096 select.append((col, None))
1097 klass_info["select_fields"] = select_fields
1098 next = requested.get(f.related_query_name(), {})
1099 next_klass_infos = self.get_related_selections(
1100 select, model._meta, alias, cur_depth + 1, next, restricted
1101 )
1102 get_related_klass_infos(klass_info, next_klass_infos)
1104 def local_setter(obj, from_obj):
1105 # Set a reverse fk object when relation is non-empty.
1106 if from_obj:
1107 f.remote_field.set_cached_value(from_obj, obj)
1109 def remote_setter(name, obj, from_obj):
1110 setattr(from_obj, name, obj)
1112 for name in list(requested):
1113 # Filtered relations work only on the topmost level.
1114 if cur_depth > 1:
1115 break
1116 if name in self.query._filtered_relations:
1117 fields_found.add(name)
1118 f, _, join_opts, joins, _, _ = self.query.setup_joins(
1119 [name], opts, root_alias
1120 )
1121 model = join_opts.model
1122 alias = joins[-1]
1123 from_parent = (
1124 issubclass(model, opts.model) and model is not opts.model
1125 )
1126 klass_info = {
1127 "model": model,
1128 "field": f,
1129 "reverse": True,
1130 "local_setter": local_setter,
1131 "remote_setter": partial(remote_setter, name),
1132 "from_parent": from_parent,
1133 }
1134 related_klass_infos.append(klass_info)
1135 select_fields = []
1136 columns = self.get_default_columns(
1137 start_alias=alias,
1138 opts=model._meta,
1139 from_parent=opts.model,
1140 )
1141 for col in columns:
1142 select_fields.append(len(select))
1143 select.append((col, None))
1144 klass_info["select_fields"] = select_fields
1145 next_requested = requested.get(name, {})
1146 next_klass_infos = self.get_related_selections(
1147 select,
1148 opts=model._meta,
1149 root_alias=alias,
1150 cur_depth=cur_depth + 1,
1151 requested=next_requested,
1152 restricted=restricted,
1153 )
1154 get_related_klass_infos(klass_info, next_klass_infos)
1155 fields_not_found = set(requested).difference(fields_found)
1156 if fields_not_found:
1157 invalid_fields = ("'%s'" % s for s in fields_not_found)
1158 raise FieldError(
1159 "Invalid field name(s) given in select_related: %s. "
1160 "Choices are: %s"
1161 % (
1162 ", ".join(invalid_fields),
1163 ", ".join(_get_field_choices()) or "(none)",
1164 )
1165 )
1166 return related_klass_infos
1168 def get_select_for_update_of_arguments(self):
1169 """
1170 Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
1171 the query.
1172 """
1174 def _get_parent_klass_info(klass_info):
1175 concrete_model = klass_info["model"]._meta.concrete_model
1176 for parent_model, parent_link in concrete_model._meta.parents.items():
1177 parent_list = parent_model._meta.get_parent_list()
1178 yield {
1179 "model": parent_model,
1180 "field": parent_link,
1181 "reverse": False,
1182 "select_fields": [
1183 select_index
1184 for select_index in klass_info["select_fields"]
1185 # Selected columns from a model or its parents.
1186 if (
1187 self.select[select_index][0].target.model == parent_model
1188 or self.select[select_index][0].target.model in parent_list
1189 )
1190 ],
1191 }
1193 def _get_first_selected_col_from_model(klass_info):
1194 """
1195 Find the first selected column from a model. If it doesn't exist,
1196 don't lock a model.
1198 select_fields is filled recursively, so it also contains fields
1199 from the parent models.
1200 """
1201 concrete_model = klass_info["model"]._meta.concrete_model
1202 for select_index in klass_info["select_fields"]:
1203 if self.select[select_index][0].target.model == concrete_model:
1204 return self.select[select_index][0]
1206 def _get_field_choices():
1207 """Yield all allowed field paths in breadth-first search order."""
1208 queue = collections.deque([(None, self.klass_info)])
1209 while queue:
1210 parent_path, klass_info = queue.popleft()
1211 if parent_path is None:
1212 path = []
1213 yield "self"
1214 else:
1215 field = klass_info["field"]
1216 if klass_info["reverse"]:
1217 field = field.remote_field
1218 path = parent_path + [field.name]
1219 yield LOOKUP_SEP.join(path)
1220 queue.extend(
1221 (path, klass_info)
1222 for klass_info in _get_parent_klass_info(klass_info)
1223 )
1224 queue.extend(
1225 (path, klass_info)
1226 for klass_info in klass_info.get("related_klass_infos", [])
1227 )
1229 result = []
1230 invalid_names = []
1231 for name in self.query.select_for_update_of:
1232 klass_info = self.klass_info
1233 if name == "self":
1234 col = _get_first_selected_col_from_model(klass_info)
1235 else:
1236 for part in name.split(LOOKUP_SEP):
1237 klass_infos = (
1238 *klass_info.get("related_klass_infos", []),
1239 *_get_parent_klass_info(klass_info),
1240 )
1241 for related_klass_info in klass_infos:
1242 field = related_klass_info["field"]
1243 if related_klass_info["reverse"]:
1244 field = field.remote_field
1245 if field.name == part:
1246 klass_info = related_klass_info
1247 break
1248 else:
1249 klass_info = None
1250 break
1251 if klass_info is None:
1252 invalid_names.append(name)
1253 continue
1254 col = _get_first_selected_col_from_model(klass_info)
1255 if col is not None:
1256 if self.connection.features.select_for_update_of_column:
1257 result.append(self.compile(col)[0])
1258 else:
1259 result.append(self.quote_name_unless_alias(col.alias))
1260 if invalid_names:
1261 raise FieldError(
1262 "Invalid field name(s) given in select_for_update(of=(...)): %s. "
1263 "Only relational fields followed in the query are allowed. "
1264 "Choices are: %s."
1265 % (
1266 ", ".join(invalid_names),
1267 ", ".join(_get_field_choices()),
1268 )
1269 )
1270 return result
1272 def deferred_to_columns(self):
1273 """
1274 Convert the self.deferred_loading data structure to mapping of table
1275 names to sets of column names which are to be loaded. Return the
1276 dictionary.
1277 """
1278 columns = {}
1279 self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb)
1280 return columns
1282 def get_converters(self, expressions):
1283 converters = {}
1284 for i, expression in enumerate(expressions):
1285 if expression: 1285 ↛ 1284line 1285 didn't jump to line 1284, because the condition on line 1285 was never false
1286 backend_converters = self.connection.ops.get_db_converters(expression)
1287 field_converters = expression.get_db_converters(self.connection)
1288 if backend_converters or field_converters:
1289 converters[i] = (backend_converters + field_converters, expression)
1290 return converters
1292 def apply_converters(self, rows, converters):
1293 connection = self.connection
1294 converters = list(converters.items())
1295 for row in map(list, rows):
1296 for pos, (convs, expression) in converters:
1297 value = row[pos]
1298 for converter in convs:
1299 value = converter(value, expression, connection)
1300 row[pos] = value
1301 yield row
1303 def results_iter(
1304 self,
1305 results=None,
1306 tuple_expected=False,
1307 chunked_fetch=False,
1308 chunk_size=GET_ITERATOR_CHUNK_SIZE,
1309 ):
1310 """Return an iterator over the results from executing this query."""
1311 if results is None:
1312 results = self.execute_sql(
1313 MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size
1314 )
1315 fields = [s[0] for s in self.select[0 : self.col_count]]
1316 converters = self.get_converters(fields)
1317 rows = chain.from_iterable(results)
1318 if converters:
1319 rows = self.apply_converters(rows, converters)
1320 if tuple_expected: 1320 ↛ 1321line 1320 didn't jump to line 1321, because the condition on line 1320 was never true
1321 rows = map(tuple, rows)
1322 return rows
1324 def has_results(self):
1325 """
1326 Backends (e.g. NoSQL) can override this in order to use optimized
1327 versions of "query has any results."
1328 """
1329 return bool(self.execute_sql(SINGLE))
1331 def execute_sql(
1332 self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
1333 ):
1334 """
1335 Run the query against the database and return the result(s). The
1336 return value is a single data item if result_type is SINGLE, or an
1337 iterator over the results if the result_type is MULTI.
1339 result_type is either MULTI (use fetchmany() to retrieve all rows),
1340 SINGLE (only retrieve a single row), or None. In this last case, the
1341 cursor is returned if any query is executed, since it's used by
1342 subclasses such as InsertQuery). It's possible, however, that no query
1343 is needed, as the filters describe an empty set. In that case, None is
1344 returned, to avoid any unnecessary database interaction.
1345 """
1346 result_type = result_type or NO_RESULTS
1347 try:
1348 sql, params = self.as_sql()
1349 if not sql: 1349 ↛ 1350line 1349 didn't jump to line 1350, because the condition on line 1349 was never true
1350 raise EmptyResultSet
1351 except EmptyResultSet:
1352 if result_type == MULTI:
1353 return iter([])
1354 else:
1355 return
1356 if chunked_fetch: 1356 ↛ 1357line 1356 didn't jump to line 1357, because the condition on line 1356 was never true
1357 cursor = self.connection.chunked_cursor()
1358 else:
1359 cursor = self.connection.cursor()
1360 try:
1361 cursor.execute(sql, params)
1362 except Exception:
1363 # Might fail for server-side cursors (e.g. connection closed)
1364 cursor.close()
1365 raise
1367 if result_type == CURSOR:
1368 # Give the caller the cursor to process and close.
1369 return cursor
1370 if result_type == SINGLE:
1371 try:
1372 val = cursor.fetchone()
1373 if val:
1374 return val[0 : self.col_count]
1375 return val
1376 finally:
1377 # done with the cursor
1378 cursor.close()
1379 if result_type == NO_RESULTS: 1379 ↛ 1380line 1379 didn't jump to line 1380, because the condition on line 1379 was never true
1380 cursor.close()
1381 return
1383 result = cursor_iter(
1384 cursor,
1385 self.connection.features.empty_fetchmany_value,
1386 self.col_count if self.has_extra_select else None,
1387 chunk_size,
1388 )
1389 if not chunked_fetch or not self.connection.features.can_use_chunked_reads: 1389 ↛ 1395line 1389 didn't jump to line 1395, because the condition on line 1389 was never false
1390 # If we are using non-chunked reads, we return the same data
1391 # structure as normally, but ensure it is all read into memory
1392 # before going any further. Use chunked_fetch if requested,
1393 # unless the database doesn't support it.
1394 return list(result)
1395 return result
1397 def as_subquery_condition(self, alias, columns, compiler):
1398 qn = compiler.quote_name_unless_alias
1399 qn2 = self.connection.ops.quote_name
1401 for index, select_col in enumerate(self.query.select):
1402 lhs_sql, lhs_params = self.compile(select_col)
1403 rhs = "%s.%s" % (qn(alias), qn2(columns[index]))
1404 self.query.where.add(RawSQL("%s = %s" % (lhs_sql, rhs), lhs_params), "AND")
1406 sql, params = self.as_sql()
1407 return "EXISTS (%s)" % sql, params
1409 def explain_query(self):
1410 result = list(self.execute_sql())
1411 # Some backends return 1 item tuples with strings, and others return
1412 # tuples with integers and strings. Flatten them out into strings.
1413 output_formatter = (
1414 json.dumps if self.query.explain_info.format == "json" else str
1415 )
1416 for row in result[0]:
1417 if not isinstance(row, str):
1418 yield " ".join(output_formatter(c) for c in row)
1419 else:
1420 yield row
1423class SQLInsertCompiler(SQLCompiler):
1424 returning_fields = None
1425 returning_params = tuple()
1427 def field_as_sql(self, field, val):
1428 """
1429 Take a field and a value intended to be saved on that field, and
1430 return placeholder SQL and accompanying params. Check for raw values,
1431 expressions, and fields with get_placeholder() defined in that order.
1433 When field is None, consider the value raw and use it as the
1434 placeholder, with no corresponding parameters returned.
1435 """
1436 if field is None: 1436 ↛ 1438line 1436 didn't jump to line 1438, because the condition on line 1436 was never true
1437 # A field value of None means the value is raw.
1438 sql, params = val, []
1439 elif hasattr(val, "as_sql"): 1439 ↛ 1441line 1439 didn't jump to line 1441, because the condition on line 1439 was never true
1440 # This is an expression, let's compile it.
1441 sql, params = self.compile(val)
1442 elif hasattr(field, "get_placeholder"): 1442 ↛ 1445line 1442 didn't jump to line 1445, because the condition on line 1442 was never true
1443 # Some fields (e.g. geo fields) need special munging before
1444 # they can be inserted.
1445 sql, params = field.get_placeholder(val, self, self.connection), [val]
1446 else:
1447 # Return the common case for the placeholder
1448 sql, params = "%s", [val]
1450 # The following hook is only used by Oracle Spatial, which sometimes
1451 # needs to yield 'NULL' and [] as its placeholder and params instead
1452 # of '%s' and [None]. The 'NULL' placeholder is produced earlier by
1453 # OracleOperations.get_geom_placeholder(). The following line removes
1454 # the corresponding None parameter. See ticket #10888.
1455 params = self.connection.ops.modify_insert_params(sql, params)
1457 return sql, params
1459 def prepare_value(self, field, value):
1460 """
1461 Prepare a value to be used in a query by resolving it if it is an
1462 expression and otherwise calling the field's get_db_prep_save().
1463 """
1464 if hasattr(value, "resolve_expression"): 1464 ↛ 1465line 1464 didn't jump to line 1465, because the condition on line 1464 was never true
1465 value = value.resolve_expression(
1466 self.query, allow_joins=False, for_save=True
1467 )
1468 # Don't allow values containing Col expressions. They refer to
1469 # existing columns on a row, but in the case of insert the row
1470 # doesn't exist yet.
1471 if value.contains_column_references:
1472 raise ValueError(
1473 'Failed to insert expression "%s" on %s. F() expressions '
1474 "can only be used to update, not to insert." % (value, field)
1475 )
1476 if value.contains_aggregate:
1477 raise FieldError(
1478 "Aggregate functions are not allowed in this query "
1479 "(%s=%r)." % (field.name, value)
1480 )
1481 if value.contains_over_clause:
1482 raise FieldError(
1483 "Window expressions are not allowed in this query (%s=%r)."
1484 % (field.name, value)
1485 )
1486 else:
1487 value = field.get_db_prep_save(value, connection=self.connection)
1488 return value
1490 def pre_save_val(self, field, obj):
1491 """
1492 Get the given field's value off the given obj. pre_save() is used for
1493 things like auto_now on DateTimeField. Skip it if this is a raw query.
1494 """
1495 if self.query.raw: 1495 ↛ 1496line 1495 didn't jump to line 1496, because the condition on line 1495 was never true
1496 return getattr(obj, field.attname)
1497 return field.pre_save(obj, add=True)
1499 def assemble_as_sql(self, fields, value_rows):
1500 """
1501 Take a sequence of N fields and a sequence of M rows of values, and
1502 generate placeholder SQL and parameters for each field and value.
1503 Return a pair containing:
1504 * a sequence of M rows of N SQL placeholder strings, and
1505 * a sequence of M rows of corresponding parameter values.
1507 Each placeholder string may contain any number of '%s' interpolation
1508 strings, and each parameter row will contain exactly as many params
1509 as the total number of '%s's in the corresponding placeholder row.
1510 """
1511 if not value_rows: 1511 ↛ 1512line 1511 didn't jump to line 1512, because the condition on line 1511 was never true
1512 return [], []
1514 # list of (sql, [params]) tuples for each object to be saved
1515 # Shape: [n_objs][n_fields][2]
1516 rows_of_fields_as_sql = (
1517 (self.field_as_sql(field, v) for field, v in zip(fields, row))
1518 for row in value_rows
1519 )
1521 # tuple like ([sqls], [[params]s]) for each object to be saved
1522 # Shape: [n_objs][2][n_fields]
1523 sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
1525 # Extract separate lists for placeholders and params.
1526 # Each of these has shape [n_objs][n_fields]
1527 placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
1529 # Params for each field are still lists, and need to be flattened.
1530 param_rows = [[p for ps in row for p in ps] for row in param_rows]
1532 return placeholder_rows, param_rows
1534 def as_sql(self):
1535 # We don't need quote_name_unless_alias() here, since these are all
1536 # going to be column names (so we can avoid the extra overhead).
1537 qn = self.connection.ops.quote_name
1538 opts = self.query.get_meta()
1539 insert_statement = self.connection.ops.insert_statement(
1540 ignore_conflicts=self.query.ignore_conflicts
1541 )
1542 result = ["%s %s" % (insert_statement, qn(opts.db_table))]
1543 fields = self.query.fields or [opts.pk]
1544 result.append("(%s)" % ", ".join(qn(f.column) for f in fields))
1546 if self.query.fields: 1546 ↛ 1556line 1546 didn't jump to line 1556, because the condition on line 1546 was never false
1547 value_rows = [
1548 [
1549 self.prepare_value(field, self.pre_save_val(field, obj))
1550 for field in fields
1551 ]
1552 for obj in self.query.objs
1553 ]
1554 else:
1555 # An empty object.
1556 value_rows = [
1557 [self.connection.ops.pk_default_value()] for _ in self.query.objs
1558 ]
1559 fields = [None]
1561 # Currently the backends just accept values when generating bulk
1562 # queries and generate their own placeholders. Doing that isn't
1563 # necessary and it should be possible to use placeholders and
1564 # expressions in bulk inserts too.
1565 can_bulk = (
1566 not self.returning_fields and self.connection.features.has_bulk_insert
1567 )
1569 placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
1571 ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql(
1572 ignore_conflicts=self.query.ignore_conflicts
1573 )
1574 if (
1575 self.returning_fields
1576 and self.connection.features.can_return_columns_from_insert
1577 ):
1578 if self.connection.features.can_return_rows_from_bulk_insert: 1578 ↛ 1584line 1578 didn't jump to line 1584, because the condition on line 1578 was never false
1579 result.append(
1580 self.connection.ops.bulk_insert_sql(fields, placeholder_rows)
1581 )
1582 params = param_rows
1583 else:
1584 result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
1585 params = [param_rows[0]]
1586 if ignore_conflicts_suffix_sql: 1586 ↛ 1587line 1586 didn't jump to line 1587, because the condition on line 1586 was never true
1587 result.append(ignore_conflicts_suffix_sql)
1588 # Skip empty r_sql to allow subclasses to customize behavior for
1589 # 3rd party backends. Refs #19096.
1590 r_sql, self.returning_params = self.connection.ops.return_insert_columns(
1591 self.returning_fields
1592 )
1593 if r_sql: 1593 ↛ 1596line 1593 didn't jump to line 1596, because the condition on line 1593 was never false
1594 result.append(r_sql)
1595 params += [self.returning_params]
1596 return [(" ".join(result), tuple(chain.from_iterable(params)))]
1598 if can_bulk: 1598 ↛ 1604line 1598 didn't jump to line 1604, because the condition on line 1598 was never false
1599 result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
1600 if ignore_conflicts_suffix_sql:
1601 result.append(ignore_conflicts_suffix_sql)
1602 return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
1603 else:
1604 if ignore_conflicts_suffix_sql:
1605 result.append(ignore_conflicts_suffix_sql)
1606 return [
1607 (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
1608 for p, vals in zip(placeholder_rows, param_rows)
1609 ]
1611 def execute_sql(self, returning_fields=None):
1612 assert not (
1613 returning_fields
1614 and len(self.query.objs) != 1
1615 and not self.connection.features.can_return_rows_from_bulk_insert
1616 )
1617 opts = self.query.get_meta()
1618 self.returning_fields = returning_fields
1619 with self.connection.cursor() as cursor:
1620 for sql, params in self.as_sql():
1621 cursor.execute(sql, params)
1622 if not self.returning_fields:
1623 return []
1624 if (
1625 self.connection.features.can_return_rows_from_bulk_insert
1626 and len(self.query.objs) > 1
1627 ):
1628 rows = self.connection.ops.fetch_returned_insert_rows(cursor)
1629 elif self.connection.features.can_return_columns_from_insert: 1629 ↛ 1638line 1629 didn't jump to line 1638
1630 assert len(self.query.objs) == 1
1631 rows = [
1632 self.connection.ops.fetch_returned_insert_columns(
1633 cursor,
1634 self.returning_params,
1635 )
1636 ]
1637 else:
1638 rows = [
1639 (
1640 self.connection.ops.last_insert_id(
1641 cursor,
1642 opts.db_table,
1643 opts.pk.column,
1644 ),
1645 )
1646 ]
1647 cols = [field.get_col(opts.db_table) for field in self.returning_fields]
1648 converters = self.get_converters(cols)
1649 if converters: 1649 ↛ 1650line 1649 didn't jump to line 1650, because the condition on line 1649 was never true
1650 rows = list(self.apply_converters(rows, converters))
1651 return rows
1654class SQLDeleteCompiler(SQLCompiler):
1655 @cached_property
1656 def single_alias(self):
1657 # Ensure base table is in aliases.
1658 self.query.get_initial_alias()
1659 return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
1661 @classmethod
1662 def _expr_refs_base_model(cls, expr, base_model):
1663 if isinstance(expr, Query): 1663 ↛ 1664line 1663 didn't jump to line 1664, because the condition on line 1663 was never true
1664 return expr.model == base_model
1665 if not hasattr(expr, "get_source_expressions"): 1665 ↛ 1666line 1665 didn't jump to line 1666, because the condition on line 1665 was never true
1666 return False
1667 return any(
1668 cls._expr_refs_base_model(source_expr, base_model)
1669 for source_expr in expr.get_source_expressions()
1670 )
1672 @cached_property
1673 def contains_self_reference_subquery(self):
1674 return any(
1675 self._expr_refs_base_model(expr, self.query.model)
1676 for expr in chain(
1677 self.query.annotations.values(), self.query.where.children
1678 )
1679 )
1681 def _as_sql(self, query):
1682 result = ["DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)]
1683 where, params = self.compile(query.where)
1684 if where: 1684 ↛ 1686line 1684 didn't jump to line 1686, because the condition on line 1684 was never false
1685 result.append("WHERE %s" % where)
1686 return " ".join(result), tuple(params)
1688 def as_sql(self):
1689 """
1690 Create the SQL for this query. Return the SQL string and list of
1691 parameters.
1692 """
1693 if self.single_alias and not self.contains_self_reference_subquery: 1693 ↛ 1695line 1693 didn't jump to line 1695, because the condition on line 1693 was never false
1694 return self._as_sql(self.query)
1695 innerq = self.query.clone()
1696 innerq.__class__ = Query
1697 innerq.clear_select_clause()
1698 pk = self.query.model._meta.pk
1699 innerq.select = [pk.get_col(self.query.get_initial_alias())]
1700 outerq = Query(self.query.model)
1701 if not self.connection.features.update_can_self_select:
1702 # Force the materialization of the inner query to allow reference
1703 # to the target table on MySQL.
1704 sql, params = innerq.get_compiler(connection=self.connection).as_sql()
1705 innerq = RawSQL("SELECT * FROM (%s) subquery" % sql, params)
1706 outerq.add_filter("pk__in", innerq)
1707 return self._as_sql(outerq)
1710class SQLUpdateCompiler(SQLCompiler):
1711 def as_sql(self):
1712 """
1713 Create the SQL for this query. Return the SQL string and list of
1714 parameters.
1715 """
1716 self.pre_sql_setup()
1717 if not self.query.values: 1717 ↛ 1718line 1717 didn't jump to line 1718, because the condition on line 1717 was never true
1718 return "", ()
1719 qn = self.quote_name_unless_alias
1720 values, update_params = [], []
1721 for field, model, val in self.query.values:
1722 if hasattr(val, "resolve_expression"): 1722 ↛ 1723line 1722 didn't jump to line 1723, because the condition on line 1722 was never true
1723 val = val.resolve_expression(
1724 self.query, allow_joins=False, for_save=True
1725 )
1726 if val.contains_aggregate:
1727 raise FieldError(
1728 "Aggregate functions are not allowed in this query "
1729 "(%s=%r)." % (field.name, val)
1730 )
1731 if val.contains_over_clause:
1732 raise FieldError(
1733 "Window expressions are not allowed in this query "
1734 "(%s=%r)." % (field.name, val)
1735 )
1736 elif hasattr(val, "prepare_database_save"): 1736 ↛ 1737line 1736 didn't jump to line 1737, because the condition on line 1736 was never true
1737 if field.remote_field:
1738 val = field.get_db_prep_save(
1739 val.prepare_database_save(field),
1740 connection=self.connection,
1741 )
1742 else:
1743 raise TypeError(
1744 "Tried to update field %s with a model instance, %r. "
1745 "Use a value compatible with %s."
1746 % (field, val, field.__class__.__name__)
1747 )
1748 else:
1749 val = field.get_db_prep_save(val, connection=self.connection)
1751 # Getting the placeholder for the field.
1752 if hasattr(field, "get_placeholder"): 1752 ↛ 1753line 1752 didn't jump to line 1753, because the condition on line 1752 was never true
1753 placeholder = field.get_placeholder(val, self, self.connection)
1754 else:
1755 placeholder = "%s"
1756 name = field.column
1757 if hasattr(val, "as_sql"): 1757 ↛ 1758line 1757 didn't jump to line 1758, because the condition on line 1757 was never true
1758 sql, params = self.compile(val)
1759 values.append("%s = %s" % (qn(name), placeholder % sql))
1760 update_params.extend(params)
1761 elif val is not None:
1762 values.append("%s = %s" % (qn(name), placeholder))
1763 update_params.append(val)
1764 else:
1765 values.append("%s = NULL" % qn(name))
1766 table = self.query.base_table
1767 result = [
1768 "UPDATE %s SET" % qn(table),
1769 ", ".join(values),
1770 ]
1771 where, params = self.compile(self.query.where)
1772 if where: 1772 ↛ 1774line 1772 didn't jump to line 1774, because the condition on line 1772 was never false
1773 result.append("WHERE %s" % where)
1774 return " ".join(result), tuple(update_params + params)
1776 def execute_sql(self, result_type):
1777 """
1778 Execute the specified update. Return the number of rows affected by
1779 the primary update query. The "primary update query" is the first
1780 non-empty query that is executed. Row counts for any subsequent,
1781 related queries are not available.
1782 """
1783 cursor = super().execute_sql(result_type)
1784 try:
1785 rows = cursor.rowcount if cursor else 0
1786 is_empty = cursor is None
1787 finally:
1788 if cursor: 1788 ↛ 1790line 1788 didn't jump to line 1790, because the condition on line 1788 was never false
1789 cursor.close()
1790 for query in self.query.get_related_updates(): 1790 ↛ 1791line 1790 didn't jump to line 1791, because the loop on line 1790 never started
1791 aux_rows = query.get_compiler(self.using).execute_sql(result_type)
1792 if is_empty and aux_rows:
1793 rows = aux_rows
1794 is_empty = False
1795 return rows
1797 def pre_sql_setup(self):
1798 """
1799 If the update depends on results from other tables, munge the "where"
1800 conditions to match the format required for (portable) SQL updates.
1802 If multiple updates are required, pull out the id values to update at
1803 this point so that they don't change as a result of the progressive
1804 updates.
1805 """
1806 refcounts_before = self.query.alias_refcount.copy()
1807 # Ensure base table is in the query
1808 self.query.get_initial_alias()
1809 count = self.query.count_active_tables()
1810 if not self.query.related_updates and count == 1: 1810 ↛ 1812line 1810 didn't jump to line 1812, because the condition on line 1810 was never false
1811 return
1812 query = self.query.chain(klass=Query)
1813 query.select_related = False
1814 query.clear_ordering(force=True)
1815 query.extra = {}
1816 query.select = []
1817 query.add_fields([query.get_meta().pk.name])
1818 super().pre_sql_setup()
1820 must_pre_select = (
1821 count > 1 and not self.connection.features.update_can_self_select
1822 )
1824 # Now we adjust the current query: reset the where clause and get rid
1825 # of all the tables we don't need (since they're in the sub-select).
1826 self.query.clear_where()
1827 if self.query.related_updates or must_pre_select:
1828 # Either we're using the idents in multiple update queries (so
1829 # don't want them to change), or the db backend doesn't support
1830 # selecting from the updating table (e.g. MySQL).
1831 idents = []
1832 for rows in query.get_compiler(self.using).execute_sql(MULTI):
1833 idents.extend(r[0] for r in rows)
1834 self.query.add_filter("pk__in", idents)
1835 self.query.related_ids = idents
1836 else:
1837 # The fast path. Filters and updates in one query.
1838 self.query.add_filter("pk__in", query)
1839 self.query.reset_refcounts(refcounts_before)
1842class SQLAggregateCompiler(SQLCompiler):
1843 def as_sql(self):
1844 """
1845 Create the SQL for this query. Return the SQL string and list of
1846 parameters.
1847 """
1848 sql, params = [], []
1849 for annotation in self.query.annotation_select.values():
1850 ann_sql, ann_params = self.compile(annotation)
1851 ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
1852 sql.append(ann_sql)
1853 params.extend(ann_params)
1854 self.col_count = len(self.query.annotation_select)
1855 sql = ", ".join(sql)
1856 params = tuple(params)
1858 inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
1859 self.using,
1860 elide_empty=self.elide_empty,
1861 ).as_sql(with_col_aliases=True)
1862 sql = "SELECT %s FROM (%s) subquery" % (sql, inner_query_sql)
1863 params = params + inner_query_params
1864 return sql, params
1867def cursor_iter(cursor, sentinel, col_count, itersize):
1868 """
1869 Yield blocks of rows from a cursor and ensure the cursor is closed when
1870 done.
1871 """
1872 try:
1873 for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
1874 yield rows if col_count is None else [r[:col_count] for r in rows] 1874 ↛ exitline 1874 didn't run the list comprehension on line 1874
1875 finally:
1876 cursor.close()