Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/django/db/backends/base/operations.py: 37%
263 statements
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
1import datetime
2import decimal
3from importlib import import_module
5import sqlparse
7from django.conf import settings
8from django.db import NotSupportedError, transaction
9from django.db.backends import utils
10from django.utils import timezone
11from django.utils.encoding import force_str
12from django.utils.regex_helper import _lazy_re_compile
15class BaseDatabaseOperations:
16 """
17 Encapsulate backend-specific differences, such as the way a backend
18 performs ordering or calculates the ID of a recently-inserted row.
19 """
21 compiler_module = "django.db.models.sql.compiler"
23 # Integer field safe ranges by `internal_type` as documented
24 # in docs/ref/models/fields.txt.
25 integer_field_ranges = {
26 "SmallIntegerField": (-32768, 32767),
27 "IntegerField": (-2147483648, 2147483647),
28 "BigIntegerField": (-9223372036854775808, 9223372036854775807),
29 "PositiveBigIntegerField": (0, 9223372036854775807),
30 "PositiveSmallIntegerField": (0, 32767),
31 "PositiveIntegerField": (0, 2147483647),
32 "SmallAutoField": (-32768, 32767),
33 "AutoField": (-2147483648, 2147483647),
34 "BigAutoField": (-9223372036854775808, 9223372036854775807),
35 }
36 set_operators = {
37 "union": "UNION",
38 "intersection": "INTERSECT",
39 "difference": "EXCEPT",
40 }
41 # Mapping of Field.get_internal_type() (typically the model field's class
42 # name) to the data type to use for the Cast() function, if different from
43 # DatabaseWrapper.data_types.
44 cast_data_types = {}
45 # CharField data type if the max_length argument isn't provided.
46 cast_char_field_without_max_length = None
48 # Start and end points for window expressions.
49 PRECEDING = "PRECEDING"
50 FOLLOWING = "FOLLOWING"
51 UNBOUNDED_PRECEDING = "UNBOUNDED " + PRECEDING
52 UNBOUNDED_FOLLOWING = "UNBOUNDED " + FOLLOWING
53 CURRENT_ROW = "CURRENT ROW"
55 # Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
56 explain_prefix = None
58 extract_trunc_lookup_pattern = _lazy_re_compile(r"[\w\-_()]+")
60 def __init__(self, connection):
61 self.connection = connection
62 self._cache = None
64 def autoinc_sql(self, table, column):
65 """
66 Return any SQL needed to support auto-incrementing primary keys, or
67 None if no SQL is necessary.
69 This SQL is executed when a table is created.
70 """
71 return None
73 def bulk_batch_size(self, fields, objs):
74 """
75 Return the maximum allowed batch size for the backend. The fields
76 are the fields going to be inserted in the batch, the objs contains
77 all the objects to be inserted.
78 """
79 return len(objs)
81 def cache_key_culling_sql(self):
82 """
83 Return an SQL query that retrieves the first cache key greater than the
84 n smallest.
86 This is used by the 'db' cache backend to determine where to start
87 culling.
88 """
89 return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
91 def unification_cast_sql(self, output_field):
92 """
93 Given a field instance, return the SQL that casts the result of a union
94 to that type. The resulting string should contain a '%s' placeholder
95 for the expression being cast.
96 """
97 return "%s"
99 def date_extract_sql(self, lookup_type, field_name):
100 """
101 Given a lookup_type of 'year', 'month', or 'day', return the SQL that
102 extracts a value from the given date field field_name.
103 """
104 raise NotImplementedError(
105 "subclasses of BaseDatabaseOperations may require a date_extract_sql() "
106 "method"
107 )
109 def date_trunc_sql(self, lookup_type, field_name, tzname=None):
110 """
111 Given a lookup_type of 'year', 'month', or 'day', return the SQL that
112 truncates the given date or datetime field field_name to a date object
113 with only the given specificity.
115 If `tzname` is provided, the given value is truncated in a specific
116 timezone.
117 """
118 raise NotImplementedError(
119 "subclasses of BaseDatabaseOperations may require a date_trunc_sql() "
120 "method."
121 )
123 def datetime_cast_date_sql(self, field_name, tzname):
124 """
125 Return the SQL to cast a datetime value to date value.
126 """
127 raise NotImplementedError(
128 "subclasses of BaseDatabaseOperations may require a "
129 "datetime_cast_date_sql() method."
130 )
132 def datetime_cast_time_sql(self, field_name, tzname):
133 """
134 Return the SQL to cast a datetime value to time value.
135 """
136 raise NotImplementedError(
137 "subclasses of BaseDatabaseOperations may require a "
138 "datetime_cast_time_sql() method"
139 )
141 def datetime_extract_sql(self, lookup_type, field_name, tzname):
142 """
143 Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
144 'second', return the SQL that extracts a value from the given
145 datetime field field_name.
146 """
147 raise NotImplementedError(
148 "subclasses of BaseDatabaseOperations may require a datetime_extract_sql() "
149 "method"
150 )
152 def datetime_trunc_sql(self, lookup_type, field_name, tzname):
153 """
154 Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
155 'second', return the SQL that truncates the given datetime field
156 field_name to a datetime object with only the given specificity.
157 """
158 raise NotImplementedError(
159 "subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() "
160 "method"
161 )
163 def time_trunc_sql(self, lookup_type, field_name, tzname=None):
164 """
165 Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
166 that truncates the given time or datetime field field_name to a time
167 object with only the given specificity.
169 If `tzname` is provided, the given value is truncated in a specific
170 timezone.
171 """
172 raise NotImplementedError(
173 "subclasses of BaseDatabaseOperations may require a time_trunc_sql() method"
174 )
176 def time_extract_sql(self, lookup_type, field_name):
177 """
178 Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
179 that extracts a value from the given time field field_name.
180 """
181 return self.date_extract_sql(lookup_type, field_name)
183 def deferrable_sql(self):
184 """
185 Return the SQL to make a constraint "initially deferred" during a
186 CREATE TABLE statement.
187 """
188 return ""
190 def distinct_sql(self, fields, params):
191 """
192 Return an SQL DISTINCT clause which removes duplicate rows from the
193 result set. If any fields are given, only check the given fields for
194 duplicates.
195 """
196 if fields:
197 raise NotSupportedError(
198 "DISTINCT ON fields is not supported by this database backend"
199 )
200 else:
201 return ["DISTINCT"], []
203 def fetch_returned_insert_columns(self, cursor, returning_params):
204 """
205 Given a cursor object that has just performed an INSERT...RETURNING
206 statement into a table, return the newly created data.
207 """
208 return cursor.fetchone()
210 def field_cast_sql(self, db_type, internal_type):
211 """
212 Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
213 (e.g. 'GenericIPAddressField'), return the SQL to cast it before using
214 it in a WHERE statement. The resulting string should contain a '%s'
215 placeholder for the column being searched against.
216 """
217 return "%s"
219 def force_no_ordering(self):
220 """
221 Return a list used in the "ORDER BY" clause to force no ordering at
222 all. Return an empty list to include nothing in the ordering.
223 """
224 return []
226 def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False):
227 """
228 Return the FOR UPDATE SQL clause to lock rows for an update operation.
229 """
230 return "FOR%s UPDATE%s%s%s" % (
231 " NO KEY" if no_key else "",
232 " OF %s" % ", ".join(of) if of else "",
233 " NOWAIT" if nowait else "",
234 " SKIP LOCKED" if skip_locked else "",
235 )
237 def _get_limit_offset_params(self, low_mark, high_mark):
238 offset = low_mark or 0
239 if high_mark is not None: 239 ↛ 241line 239 didn't jump to line 241, because the condition on line 239 was never false
240 return (high_mark - offset), offset
241 elif offset:
242 return self.connection.ops.no_limit_value(), offset
243 return None, offset
245 def limit_offset_sql(self, low_mark, high_mark):
246 """Return LIMIT/OFFSET SQL clause."""
247 limit, offset = self._get_limit_offset_params(low_mark, high_mark)
248 return " ".join(
249 sql
250 for sql in (
251 ("LIMIT %d" % limit) if limit else None,
252 ("OFFSET %d" % offset) if offset else None,
253 )
254 if sql
255 )
257 def last_executed_query(self, cursor, sql, params):
258 """
259 Return a string of the query last executed by the given cursor, with
260 placeholders replaced with actual values.
262 `sql` is the raw query containing placeholders and `params` is the
263 sequence of parameters. These are used by default, but this method
264 exists for database backends to provide a better implementation
265 according to their own quoting schemes.
266 """
267 # Convert params to contain string values.
268 def to_string(s):
269 return force_str(s, strings_only=True, errors="replace")
271 if isinstance(params, (list, tuple)):
272 u_params = tuple(to_string(val) for val in params)
273 elif params is None:
274 u_params = ()
275 else:
276 u_params = {to_string(k): to_string(v) for k, v in params.items()}
278 return "QUERY = %r - PARAMS = %r" % (sql, u_params)
280 def last_insert_id(self, cursor, table_name, pk_name):
281 """
282 Given a cursor object that has just performed an INSERT statement into
283 a table that has an auto-incrementing ID, return the newly created ID.
285 `pk_name` is the name of the primary-key column.
286 """
287 return cursor.lastrowid
289 def lookup_cast(self, lookup_type, internal_type=None):
290 """
291 Return the string to use in a query when performing lookups
292 ("contains", "like", etc.). It should contain a '%s' placeholder for
293 the column being searched against.
294 """
295 return "%s"
297 def max_in_list_size(self):
298 """
299 Return the maximum number of items that can be passed in a single 'IN'
300 list condition, or None if the backend does not impose a limit.
301 """
302 return None
304 def max_name_length(self):
305 """
306 Return the maximum length of table and column names, or None if there
307 is no limit.
308 """
309 return None
311 def no_limit_value(self):
312 """
313 Return the value to use for the LIMIT when we are wanting "LIMIT
314 infinity". Return None if the limit clause can be omitted in this case.
315 """
316 raise NotImplementedError(
317 "subclasses of BaseDatabaseOperations may require a no_limit_value() method"
318 )
320 def pk_default_value(self):
321 """
322 Return the value to use during an INSERT statement to specify that
323 the field should use its default value.
324 """
325 return "DEFAULT"
327 def prepare_sql_script(self, sql):
328 """
329 Take an SQL script that may contain multiple lines and return a list
330 of statements to feed to successive cursor.execute() calls.
332 Since few databases are able to process raw SQL scripts in a single
333 cursor.execute() call and PEP 249 doesn't talk about this use case,
334 the default implementation is conservative.
335 """
336 return [
337 sqlparse.format(statement, strip_comments=True)
338 for statement in sqlparse.split(sql)
339 if statement
340 ]
342 def process_clob(self, value):
343 """
344 Return the value of a CLOB column, for backends that return a locator
345 object that requires additional processing.
346 """
347 return value
349 def return_insert_columns(self, fields):
350 """
351 For backends that support returning columns as part of an insert query,
352 return the SQL and params to append to the INSERT query. The returned
353 fragment should contain a format string to hold the appropriate column.
354 """
355 pass
357 def compiler(self, compiler_name):
358 """
359 Return the SQLCompiler class corresponding to the given name,
360 in the namespace corresponding to the `compiler_module` attribute
361 on this backend.
362 """
363 if self._cache is None:
364 self._cache = import_module(self.compiler_module)
365 return getattr(self._cache, compiler_name)
367 def quote_name(self, name):
368 """
369 Return a quoted version of the given table, index, or column name. Do
370 not quote the given name if it's already been quoted.
371 """
372 raise NotImplementedError(
373 "subclasses of BaseDatabaseOperations may require a quote_name() method"
374 )
376 def regex_lookup(self, lookup_type):
377 """
378 Return the string to use in a query when performing regular expression
379 lookups (using "regex" or "iregex"). It should contain a '%s'
380 placeholder for the column being searched against.
382 If the feature is not supported (or part of it is not supported), raise
383 NotImplementedError.
384 """
385 raise NotImplementedError(
386 "subclasses of BaseDatabaseOperations may require a regex_lookup() method"
387 )
389 def savepoint_create_sql(self, sid):
390 """
391 Return the SQL for starting a new savepoint. Only required if the
392 "uses_savepoints" feature is True. The "sid" parameter is a string
393 for the savepoint id.
394 """
395 return "SAVEPOINT %s" % self.quote_name(sid)
397 def savepoint_commit_sql(self, sid):
398 """
399 Return the SQL for committing the given savepoint.
400 """
401 return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
403 def savepoint_rollback_sql(self, sid):
404 """
405 Return the SQL for rolling back the given savepoint.
406 """
407 return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
409 def set_time_zone_sql(self):
410 """
411 Return the SQL that will set the connection's time zone.
413 Return '' if the backend doesn't support time zones.
414 """
415 return ""
417 def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
418 """
419 Return a list of SQL statements required to remove all data from
420 the given database tables (without actually removing the tables
421 themselves).
423 The `style` argument is a Style object as returned by either
424 color_style() or no_style() in django.core.management.color.
426 If `reset_sequences` is True, the list includes SQL statements required
427 to reset the sequences.
429 The `allow_cascade` argument determines whether truncation may cascade
430 to tables with foreign keys pointing the tables being truncated.
431 PostgreSQL requires a cascade even if these tables are empty.
432 """
433 raise NotImplementedError(
434 "subclasses of BaseDatabaseOperations must provide an sql_flush() method"
435 )
437 def execute_sql_flush(self, sql_list):
438 """Execute a list of SQL statements to flush the database."""
439 with transaction.atomic(
440 using=self.connection.alias,
441 savepoint=self.connection.features.can_rollback_ddl,
442 ):
443 with self.connection.cursor() as cursor:
444 for sql in sql_list:
445 cursor.execute(sql)
447 def sequence_reset_by_name_sql(self, style, sequences):
448 """
449 Return a list of the SQL statements required to reset sequences
450 passed in `sequences`.
452 The `style` argument is a Style object as returned by either
453 color_style() or no_style() in django.core.management.color.
454 """
455 return []
457 def sequence_reset_sql(self, style, model_list):
458 """
459 Return a list of the SQL statements required to reset sequences for
460 the given models.
462 The `style` argument is a Style object as returned by either
463 color_style() or no_style() in django.core.management.color.
464 """
465 return [] # No sequence reset required by default.
467 def start_transaction_sql(self):
468 """Return the SQL statement required to start a transaction."""
469 return "BEGIN;"
471 def end_transaction_sql(self, success=True):
472 """Return the SQL statement required to end a transaction."""
473 if not success:
474 return "ROLLBACK;"
475 return "COMMIT;"
477 def tablespace_sql(self, tablespace, inline=False):
478 """
479 Return the SQL that will be used in a query to define the tablespace.
481 Return '' if the backend doesn't support tablespaces.
483 If `inline` is True, append the SQL to a row; otherwise append it to
484 the entire CREATE TABLE or CREATE INDEX statement.
485 """
486 return ""
488 def prep_for_like_query(self, x):
489 """Prepare a value for use in a LIKE query."""
490 return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
492 # Same as prep_for_like_query(), but called for "iexact" matches, which
493 # need not necessarily be implemented using "LIKE" in the backend.
494 prep_for_iexact_query = prep_for_like_query
496 def validate_autopk_value(self, value):
497 """
498 Certain backends do not accept some values for "serial" fields
499 (for example zero in MySQL). Raise a ValueError if the value is
500 invalid, otherwise return the validated value.
501 """
502 return value
504 def adapt_unknown_value(self, value):
505 """
506 Transform a value to something compatible with the backend driver.
508 This method only depends on the type of the value. It's designed for
509 cases where the target type isn't known, such as .raw() SQL queries.
510 As a consequence it may not work perfectly in all circumstances.
511 """
512 if isinstance(value, datetime.datetime): # must be before date
513 return self.adapt_datetimefield_value(value)
514 elif isinstance(value, datetime.date):
515 return self.adapt_datefield_value(value)
516 elif isinstance(value, datetime.time):
517 return self.adapt_timefield_value(value)
518 elif isinstance(value, decimal.Decimal):
519 return self.adapt_decimalfield_value(value)
520 else:
521 return value
523 def adapt_datefield_value(self, value):
524 """
525 Transform a date value to an object compatible with what is expected
526 by the backend driver for date columns.
527 """
528 if value is None:
529 return None
530 return str(value)
532 def adapt_datetimefield_value(self, value):
533 """
534 Transform a datetime value to an object compatible with what is expected
535 by the backend driver for datetime columns.
536 """
537 if value is None:
538 return None
539 return str(value)
541 def adapt_timefield_value(self, value):
542 """
543 Transform a time value to an object compatible with what is expected
544 by the backend driver for time columns.
545 """
546 if value is None:
547 return None
548 if timezone.is_aware(value):
549 raise ValueError("Django does not support timezone-aware times.")
550 return str(value)
552 def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
553 """
554 Transform a decimal.Decimal value to an object compatible with what is
555 expected by the backend driver for decimal (numeric) columns.
556 """
557 return utils.format_number(value, max_digits, decimal_places)
559 def adapt_ipaddressfield_value(self, value):
560 """
561 Transform a string representation of an IP address into the expected
562 type for the backend driver.
563 """
564 return value or None
566 def year_lookup_bounds_for_date_field(self, value, iso_year=False):
567 """
568 Return a two-elements list with the lower and upper bound to be used
569 with a BETWEEN operator to query a DateField value using a year
570 lookup.
572 `value` is an int, containing the looked-up year.
573 If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
574 """
575 if iso_year:
576 first = datetime.date.fromisocalendar(value, 1, 1)
577 second = datetime.date.fromisocalendar(
578 value + 1, 1, 1
579 ) - datetime.timedelta(days=1)
580 else:
581 first = datetime.date(value, 1, 1)
582 second = datetime.date(value, 12, 31)
583 first = self.adapt_datefield_value(first)
584 second = self.adapt_datefield_value(second)
585 return [first, second]
587 def year_lookup_bounds_for_datetime_field(self, value, iso_year=False):
588 """
589 Return a two-elements list with the lower and upper bound to be used
590 with a BETWEEN operator to query a DateTimeField value using a year
591 lookup.
593 `value` is an int, containing the looked-up year.
594 If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
595 """
596 if iso_year:
597 first = datetime.datetime.fromisocalendar(value, 1, 1)
598 second = datetime.datetime.fromisocalendar(
599 value + 1, 1, 1
600 ) - datetime.timedelta(microseconds=1)
601 else:
602 first = datetime.datetime(value, 1, 1)
603 second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
604 if settings.USE_TZ:
605 tz = timezone.get_current_timezone()
606 first = timezone.make_aware(first, tz)
607 second = timezone.make_aware(second, tz)
608 first = self.adapt_datetimefield_value(first)
609 second = self.adapt_datetimefield_value(second)
610 return [first, second]
612 def get_db_converters(self, expression):
613 """
614 Return a list of functions needed to convert field data.
616 Some field types on some backends do not provide data in the correct
617 format, this is the hook for converter functions.
618 """
619 return []
621 def convert_durationfield_value(self, value, expression, connection):
622 if value is not None:
623 return datetime.timedelta(0, 0, value)
625 def check_expression_support(self, expression):
626 """
627 Check that the backend supports the provided expression.
629 This is used on specific backends to rule out known expressions
630 that have problematic or nonexistent implementations. If the
631 expression has a known problem, the backend should raise
632 NotSupportedError.
633 """
634 pass
636 def conditional_expression_supported_in_where_clause(self, expression):
637 """
638 Return True, if the conditional expression is supported in the WHERE
639 clause.
640 """
641 return True
643 def combine_expression(self, connector, sub_expressions):
644 """
645 Combine a list of subexpressions into a single expression, using
646 the provided connecting operator. This is required because operators
647 can vary between backends (e.g., Oracle with %% and &) and between
648 subexpression types (e.g., date expressions).
649 """
650 conn = " %s " % connector
651 return conn.join(sub_expressions)
653 def combine_duration_expression(self, connector, sub_expressions):
654 return self.combine_expression(connector, sub_expressions)
656 def binary_placeholder_sql(self, value):
657 """
658 Some backends require special syntax to insert binary content (MySQL
659 for example uses '_binary %s').
660 """
661 return "%s"
663 def modify_insert_params(self, placeholder, params):
664 """
665 Allow modification of insert parameters. Needed for Oracle Spatial
666 backend due to #10888.
667 """
668 return params
670 def integer_field_range(self, internal_type):
671 """
672 Given an integer field internal type (e.g. 'PositiveIntegerField'),
673 return a tuple of the (min_value, max_value) form representing the
674 range of the column type bound to the field.
675 """
676 return self.integer_field_ranges[internal_type]
678 def subtract_temporals(self, internal_type, lhs, rhs):
679 if self.connection.features.supports_temporal_subtraction:
680 lhs_sql, lhs_params = lhs
681 rhs_sql, rhs_params = rhs
682 return "(%s - %s)" % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params)
683 raise NotSupportedError(
684 "This backend does not support %s subtraction." % internal_type
685 )
687 def window_frame_start(self, start):
688 if isinstance(start, int):
689 if start < 0:
690 return "%d %s" % (abs(start), self.PRECEDING)
691 elif start == 0:
692 return self.CURRENT_ROW
693 elif start is None:
694 return self.UNBOUNDED_PRECEDING
695 raise ValueError(
696 "start argument must be a negative integer, zero, or None, but got '%s'."
697 % start
698 )
700 def window_frame_end(self, end):
701 if isinstance(end, int):
702 if end == 0:
703 return self.CURRENT_ROW
704 elif end > 0:
705 return "%d %s" % (end, self.FOLLOWING)
706 elif end is None:
707 return self.UNBOUNDED_FOLLOWING
708 raise ValueError(
709 "end argument must be a positive integer, zero, or None, but got '%s'."
710 % end
711 )
713 def window_frame_rows_start_end(self, start=None, end=None):
714 """
715 Return SQL for start and end points in an OVER clause window frame.
716 """
717 if not self.connection.features.supports_over_clause:
718 raise NotSupportedError("This backend does not support window expressions.")
719 return self.window_frame_start(start), self.window_frame_end(end)
721 def window_frame_range_start_end(self, start=None, end=None):
722 start_, end_ = self.window_frame_rows_start_end(start, end)
723 features = self.connection.features
724 if features.only_supports_unbounded_with_preceding_and_following and (
725 (start and start < 0) or (end and end > 0)
726 ):
727 raise NotSupportedError(
728 "%s only supports UNBOUNDED together with PRECEDING and "
729 "FOLLOWING." % self.connection.display_name
730 )
731 return start_, end_
733 def explain_query_prefix(self, format=None, **options):
734 if not self.connection.features.supports_explaining_query_execution:
735 raise NotSupportedError(
736 "This backend does not support explaining query execution."
737 )
738 if format:
739 supported_formats = self.connection.features.supported_explain_formats
740 normalized_format = format.upper()
741 if normalized_format not in supported_formats:
742 msg = "%s is not a recognized format." % normalized_format
743 if supported_formats:
744 msg += " Allowed formats: %s" % ", ".join(sorted(supported_formats))
745 raise ValueError(msg)
746 if options:
747 raise ValueError("Unknown options: %s" % ", ".join(sorted(options.keys())))
748 return self.explain_prefix
750 def insert_statement(self, ignore_conflicts=False):
751 return "INSERT INTO"
753 def ignore_conflicts_suffix_sql(self, ignore_conflicts=None):
754 return ""