Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/django/db/models/base.py: 61%

1022 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2023-07-17 14:22 -0600

1import copy 

2import inspect 

3import warnings 

4from functools import partialmethod 

5from itertools import chain 

6 

7import django 

8from django.apps import apps 

9from django.conf import settings 

10from django.core import checks 

11from django.core.exceptions import ( 

12 NON_FIELD_ERRORS, 

13 FieldDoesNotExist, 

14 FieldError, 

15 MultipleObjectsReturned, 

16 ObjectDoesNotExist, 

17 ValidationError, 

18) 

19from django.db import ( 

20 DEFAULT_DB_ALIAS, 

21 DJANGO_VERSION_PICKLE_KEY, 

22 DatabaseError, 

23 connection, 

24 connections, 

25 router, 

26 transaction, 

27) 

28from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value 

29from django.db.models.constants import LOOKUP_SEP 

30from django.db.models.constraints import CheckConstraint, UniqueConstraint 

31from django.db.models.deletion import CASCADE, Collector 

32from django.db.models.fields.related import ( 

33 ForeignObjectRel, 

34 OneToOneField, 

35 lazy_related_operation, 

36 resolve_relation, 

37) 

38from django.db.models.functions import Coalesce 

39from django.db.models.manager import Manager 

40from django.db.models.options import Options 

41from django.db.models.query import F, Q 

42from django.db.models.signals import ( 

43 class_prepared, 

44 post_init, 

45 post_save, 

46 pre_init, 

47 pre_save, 

48) 

49from django.db.models.utils import make_model_tuple 

50from django.utils.encoding import force_str 

51from django.utils.hashable import make_hashable 

52from django.utils.text import capfirst, get_text_list 

53from django.utils.translation import gettext_lazy as _ 

54 

55 

56class Deferred: 

57 def __repr__(self): 

58 return "<Deferred field>" 

59 

60 def __str__(self): 

61 return "<Deferred field>" 

62 

63 

64DEFERRED = Deferred() 

65 

66 

67def subclass_exception(name, bases, module, attached_to): 

68 """ 

69 Create exception subclass. Used by ModelBase below. 

70 

71 The exception is created in a way that allows it to be pickled, assuming 

72 that the returned exception class will be added as an attribute to the 

73 'attached_to' class. 

74 """ 

75 return type( 

76 name, 

77 bases, 

78 { 

79 "__module__": module, 

80 "__qualname__": "%s.%s" % (attached_to.__qualname__, name), 

81 }, 

82 ) 

83 

84 

85def _has_contribute_to_class(value): 

86 # Only call contribute_to_class() if it's bound. 

87 return not inspect.isclass(value) and hasattr(value, "contribute_to_class") 

88 

89 

90class ModelBase(type): 

91 """Metaclass for all models.""" 

92 

93 def __new__(cls, name, bases, attrs, **kwargs): 

94 super_new = super().__new__ 

95 

96 # Also ensure initialization is only performed for subclasses of Model 

97 # (excluding Model class itself). 

98 parents = [b for b in bases if isinstance(b, ModelBase)] 

99 if not parents: 

100 return super_new(cls, name, bases, attrs) 

101 

102 # Create the class. 

103 module = attrs.pop("__module__") 

104 new_attrs = {"__module__": module} 

105 classcell = attrs.pop("__classcell__", None) 

106 if classcell is not None: 

107 new_attrs["__classcell__"] = classcell 

108 attr_meta = attrs.pop("Meta", None) 

109 # Pass all attrs without a (Django-specific) contribute_to_class() 

110 # method to type.__new__() so that they're properly initialized 

111 # (i.e. __set_name__()). 

112 contributable_attrs = {} 

113 for obj_name, obj in attrs.items(): 

114 if _has_contribute_to_class(obj): 

115 contributable_attrs[obj_name] = obj 

116 else: 

117 new_attrs[obj_name] = obj 

118 new_class = super_new(cls, name, bases, new_attrs, **kwargs) 

119 

120 abstract = getattr(attr_meta, "abstract", False) 

121 meta = attr_meta or getattr(new_class, "Meta", None) 

122 base_meta = getattr(new_class, "_meta", None) 

123 

124 app_label = None 

125 

126 # Look for an application configuration to attach the model to. 

127 app_config = apps.get_containing_app_config(module) 

128 

129 if getattr(meta, "app_label", None) is None: 

130 if app_config is None: 

131 if not abstract: 131 ↛ 132line 131 didn't jump to line 132, because the condition on line 131 was never true

132 raise RuntimeError( 

133 "Model class %s.%s doesn't declare an explicit " 

134 "app_label and isn't in an application in " 

135 "INSTALLED_APPS." % (module, name) 

136 ) 

137 

138 else: 

139 app_label = app_config.label 

140 

141 new_class.add_to_class("_meta", Options(meta, app_label)) 

142 if not abstract: 

143 new_class.add_to_class( 

144 "DoesNotExist", 

145 subclass_exception( 

146 "DoesNotExist", 

147 tuple( 

148 x.DoesNotExist 

149 for x in parents 

150 if hasattr(x, "_meta") and not x._meta.abstract 

151 ) 

152 or (ObjectDoesNotExist,), 

153 module, 

154 attached_to=new_class, 

155 ), 

156 ) 

157 new_class.add_to_class( 

158 "MultipleObjectsReturned", 

159 subclass_exception( 

160 "MultipleObjectsReturned", 

161 tuple( 

162 x.MultipleObjectsReturned 

163 for x in parents 

164 if hasattr(x, "_meta") and not x._meta.abstract 

165 ) 

166 or (MultipleObjectsReturned,), 

167 module, 

168 attached_to=new_class, 

169 ), 

170 ) 

171 if base_meta and not base_meta.abstract: 

172 # Non-abstract child classes inherit some attributes from their 

173 # non-abstract parent (unless an ABC comes before it in the 

174 # method resolution order). 

175 if not hasattr(meta, "ordering"): 

176 new_class._meta.ordering = base_meta.ordering 

177 if not hasattr(meta, "get_latest_by"): 177 ↛ 180line 177 didn't jump to line 180, because the condition on line 177 was never false

178 new_class._meta.get_latest_by = base_meta.get_latest_by 

179 

180 is_proxy = new_class._meta.proxy 

181 

182 # If the model is a proxy, ensure that the base class 

183 # hasn't been swapped out. 

184 if is_proxy and base_meta and base_meta.swapped: 184 ↛ 185line 184 didn't jump to line 185, because the condition on line 184 was never true

185 raise TypeError( 

186 "%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped) 

187 ) 

188 

189 # Add remaining attributes (those with a contribute_to_class() method) 

190 # to the class. 

191 for obj_name, obj in contributable_attrs.items(): 

192 new_class.add_to_class(obj_name, obj) 

193 

194 # All the fields of any type declared on this model 

195 new_fields = chain( 

196 new_class._meta.local_fields, 

197 new_class._meta.local_many_to_many, 

198 new_class._meta.private_fields, 

199 ) 

200 field_names = {f.name for f in new_fields} 

201 

202 # Basic setup for proxy models. 

203 if is_proxy: 

204 base = None 

205 for parent in [kls for kls in parents if hasattr(kls, "_meta")]: 

206 if parent._meta.abstract: 206 ↛ 207line 206 didn't jump to line 207, because the condition on line 206 was never true

207 if parent._meta.fields: 

208 raise TypeError( 

209 "Abstract base class containing model fields not " 

210 "permitted for proxy model '%s'." % name 

211 ) 

212 else: 

213 continue 

214 if base is None: 214 ↛ 216line 214 didn't jump to line 216, because the condition on line 214 was never false

215 base = parent 

216 elif parent._meta.concrete_model is not base._meta.concrete_model: 

217 raise TypeError( 

218 "Proxy model '%s' has more than one non-abstract model base " 

219 "class." % name 

220 ) 

221 if base is None: 221 ↛ 222line 221 didn't jump to line 222, because the condition on line 221 was never true

222 raise TypeError( 

223 "Proxy model '%s' has no non-abstract model base class." % name 

224 ) 

225 new_class._meta.setup_proxy(base) 

226 new_class._meta.concrete_model = base._meta.concrete_model 

227 else: 

228 new_class._meta.concrete_model = new_class 

229 

230 # Collect the parent links for multi-table inheritance. 

231 parent_links = {} 

232 for base in reversed([new_class] + parents): 

233 # Conceptually equivalent to `if base is Model`. 

234 if not hasattr(base, "_meta"): 

235 continue 

236 # Skip concrete parent classes. 

237 if base != new_class and not base._meta.abstract: 

238 continue 

239 # Locate OneToOneField instances. 

240 for field in base._meta.local_fields: 

241 if isinstance(field, OneToOneField) and field.remote_field.parent_link: 

242 related = resolve_relation(new_class, field.remote_field.model) 

243 parent_links[make_model_tuple(related)] = field 

244 

245 # Track fields inherited from base models. 

246 inherited_attributes = set() 

247 # Do the appropriate setup for any model parents. 

248 for base in new_class.mro(): 

249 if base not in parents or not hasattr(base, "_meta"): 

250 # Things without _meta aren't functional models, so they're 

251 # uninteresting parents. 

252 inherited_attributes.update(base.__dict__) 

253 continue 

254 

255 parent_fields = base._meta.local_fields + base._meta.local_many_to_many 

256 if not base._meta.abstract: 

257 # Check for clashes between locally declared fields and those 

258 # on the base classes. 

259 for field in parent_fields: 

260 if field.name in field_names: 260 ↛ 261line 260 didn't jump to line 261, because the condition on line 260 was never true

261 raise FieldError( 

262 "Local field %r in class %r clashes with field of " 

263 "the same name from base class %r." 

264 % ( 

265 field.name, 

266 name, 

267 base.__name__, 

268 ) 

269 ) 

270 else: 

271 inherited_attributes.add(field.name) 

272 

273 # Concrete classes... 

274 base = base._meta.concrete_model 

275 base_key = make_model_tuple(base) 

276 if base_key in parent_links: 

277 field = parent_links[base_key] 

278 elif not is_proxy: 

279 attr_name = "%s_ptr" % base._meta.model_name 

280 field = OneToOneField( 

281 base, 

282 on_delete=CASCADE, 

283 name=attr_name, 

284 auto_created=True, 

285 parent_link=True, 

286 ) 

287 

288 if attr_name in field_names: 288 ↛ 289line 288 didn't jump to line 289, because the condition on line 288 was never true

289 raise FieldError( 

290 "Auto-generated field '%s' in class %r for " 

291 "parent_link to base class %r clashes with " 

292 "declared field of the same name." 

293 % ( 

294 attr_name, 

295 name, 

296 base.__name__, 

297 ) 

298 ) 

299 

300 # Only add the ptr field if it's not already present; 

301 # e.g. migrations will already have it specified 

302 if not hasattr(new_class, attr_name): 302 ↛ 306line 302 didn't jump to line 306, because the condition on line 302 was never false

303 new_class.add_to_class(attr_name, field) 

304 else: 

305 field = None 

306 new_class._meta.parents[base] = field 

307 else: 

308 base_parents = base._meta.parents.copy() 

309 

310 # Add fields from abstract base class if it wasn't overridden. 

311 for field in parent_fields: 

312 if ( 312 ↛ 311line 312 didn't jump to line 311

313 field.name not in field_names 

314 and field.name not in new_class.__dict__ 

315 and field.name not in inherited_attributes 

316 ): 

317 new_field = copy.deepcopy(field) 

318 new_class.add_to_class(field.name, new_field) 

319 # Replace parent links defined on this base by the new 

320 # field. It will be appropriately resolved if required. 

321 if field.one_to_one: 321 ↛ 322line 321 didn't jump to line 322, because the condition on line 321 was never true

322 for parent, parent_link in base_parents.items(): 

323 if field == parent_link: 

324 base_parents[parent] = new_field 

325 

326 # Pass any non-abstract parent classes onto child. 

327 new_class._meta.parents.update(base_parents) 

328 

329 # Inherit private fields (like GenericForeignKey) from the parent 

330 # class 

331 for field in base._meta.private_fields: 331 ↛ 332line 331 didn't jump to line 332, because the loop on line 331 never started

332 if field.name in field_names: 

333 if not base._meta.abstract: 

334 raise FieldError( 

335 "Local field %r in class %r clashes with field of " 

336 "the same name from base class %r." 

337 % ( 

338 field.name, 

339 name, 

340 base.__name__, 

341 ) 

342 ) 

343 else: 

344 field = copy.deepcopy(field) 

345 if not base._meta.abstract: 

346 field.mti_inherited = True 

347 new_class.add_to_class(field.name, field) 

348 

349 # Copy indexes so that index names are unique when models extend an 

350 # abstract model. 

351 new_class._meta.indexes = [ 

352 copy.deepcopy(idx) for idx in new_class._meta.indexes 

353 ] 

354 

355 if abstract: 

356 # Abstract base models can't be instantiated and don't appear in 

357 # the list of models for an app. We do the final setup for them a 

358 # little differently from normal models. 

359 attr_meta.abstract = False 

360 new_class.Meta = attr_meta 

361 return new_class 

362 

363 new_class._prepare() 

364 new_class._meta.apps.register_model(new_class._meta.app_label, new_class) 

365 return new_class 

366 

367 def add_to_class(cls, name, value): 

368 if _has_contribute_to_class(value): 

369 value.contribute_to_class(cls, name) 

370 else: 

371 setattr(cls, name, value) 

372 

373 def _prepare(cls): 

374 """Create some methods once self._meta has been populated.""" 

375 opts = cls._meta 

376 opts._prepare(cls) 

377 

378 if opts.order_with_respect_to: 378 ↛ 379line 378 didn't jump to line 379, because the condition on line 378 was never true

379 cls.get_next_in_order = partialmethod( 

380 cls._get_next_or_previous_in_order, is_next=True 

381 ) 

382 cls.get_previous_in_order = partialmethod( 

383 cls._get_next_or_previous_in_order, is_next=False 

384 ) 

385 

386 # Defer creating accessors on the foreign class until it has been 

387 # created and registered. If remote_field is None, we're ordering 

388 # with respect to a GenericForeignKey and don't know what the 

389 # foreign class is - we'll add those accessors later in 

390 # contribute_to_class(). 

391 if opts.order_with_respect_to.remote_field: 

392 wrt = opts.order_with_respect_to 

393 remote = wrt.remote_field.model 

394 lazy_related_operation(make_foreign_order_accessors, cls, remote) 

395 

396 # Give the class a docstring -- its definition. 

397 if cls.__doc__ is None: 

398 cls.__doc__ = "%s(%s)" % ( 

399 cls.__name__, 

400 ", ".join(f.name for f in opts.fields), 

401 ) 

402 

403 get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get( 

404 opts.label_lower 

405 ) 

406 if get_absolute_url_override: 406 ↛ 407line 406 didn't jump to line 407, because the condition on line 406 was never true

407 setattr(cls, "get_absolute_url", get_absolute_url_override) 

408 

409 if not opts.managers: 

410 if any(f.name == "objects" for f in opts.fields): 410 ↛ 411line 410 didn't jump to line 411, because the condition on line 410 was never true

411 raise ValueError( 

412 "Model %s must specify a custom Manager, because it has a " 

413 "field named 'objects'." % cls.__name__ 

414 ) 

415 manager = Manager() 

416 manager.auto_created = True 

417 cls.add_to_class("objects", manager) 

418 

419 # Set the name of _meta.indexes. This can't be done in 

420 # Options.contribute_to_class() because fields haven't been added to 

421 # the model at that point. 

422 for index in cls._meta.indexes: 

423 if not index.name: 

424 index.set_name_with_model(cls) 

425 

426 class_prepared.send(sender=cls) 

427 

428 @property 

429 def _base_manager(cls): 

430 return cls._meta.base_manager 

431 

432 @property 

433 def _default_manager(cls): 

434 return cls._meta.default_manager 

435 

436 

437class ModelStateFieldsCacheDescriptor: 

438 def __get__(self, instance, cls=None): 

439 if instance is None: 439 ↛ 440line 439 didn't jump to line 440, because the condition on line 439 was never true

440 return self 

441 res = instance.fields_cache = {} 

442 return res 

443 

444 

445class ModelState: 

446 """Store model instance state.""" 

447 

448 db = None 

449 # If true, uniqueness validation checks will consider this a new, unsaved 

450 # object. Necessary for correct validation of new instances of objects with 

451 # explicit (non-auto) PKs. This impacts validation only; it has no effect 

452 # on the actual save. 

453 adding = True 

454 fields_cache = ModelStateFieldsCacheDescriptor() 

455 

456 

457class Model(metaclass=ModelBase): 

458 def __init__(self, *args, **kwargs): 

459 # Alias some things as locals to avoid repeat global lookups 

460 cls = self.__class__ 

461 opts = self._meta 

462 _setattr = setattr 

463 _DEFERRED = DEFERRED 

464 if opts.abstract: 464 ↛ 465line 464 didn't jump to line 465, because the condition on line 464 was never true

465 raise TypeError("Abstract models cannot be instantiated.") 

466 

467 pre_init.send(sender=cls, args=args, kwargs=kwargs) 

468 

469 # Set up the storage for instance state 

470 self._state = ModelState() 

471 

472 # There is a rather weird disparity here; if kwargs, it's set, then args 

473 # overrides it. It should be one or the other; don't duplicate the work 

474 # The reason for the kwargs check is that standard iterator passes in by 

475 # args, and instantiation for iteration is 33% faster. 

476 if len(args) > len(opts.concrete_fields): 476 ↛ 478line 476 didn't jump to line 478, because the condition on line 476 was never true

477 # Daft, but matches old exception sans the err msg. 

478 raise IndexError("Number of args exceeds number of fields") 

479 

480 if not kwargs: 

481 fields_iter = iter(opts.concrete_fields) 

482 # The ordering of the zip calls matter - zip throws StopIteration 

483 # when an iter throws it. So if the first iter throws it, the second 

484 # is *not* consumed. We rely on this, so don't change the order 

485 # without changing the logic. 

486 for val, field in zip(args, fields_iter): 

487 if val is _DEFERRED: 

488 continue 

489 _setattr(self, field.attname, val) 

490 else: 

491 # Slower, kwargs-ready version. 

492 fields_iter = iter(opts.fields) 

493 for val, field in zip(args, fields_iter): 493 ↛ 494line 493 didn't jump to line 494, because the loop on line 493 never started

494 if val is _DEFERRED: 

495 continue 

496 _setattr(self, field.attname, val) 

497 if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED: 

498 raise TypeError( 

499 f"{cls.__qualname__}() got both positional and " 

500 f"keyword arguments for field '{field.name}'." 

501 ) 

502 

503 # Now we're left with the unprocessed fields that *must* come from 

504 # keywords, or default. 

505 

506 for field in fields_iter: 

507 is_related_object = False 

508 # Virtual field 

509 if field.attname not in kwargs and field.column is None: 509 ↛ 510line 509 didn't jump to line 510, because the condition on line 509 was never true

510 continue 

511 if kwargs: 

512 if isinstance(field.remote_field, ForeignObjectRel): 

513 try: 

514 # Assume object instance was passed in. 

515 rel_obj = kwargs.pop(field.name) 

516 is_related_object = True 

517 except KeyError: 

518 try: 

519 # Object instance wasn't passed in -- must be an ID. 

520 val = kwargs.pop(field.attname) 

521 except KeyError: 

522 val = field.get_default() 

523 else: 

524 try: 

525 val = kwargs.pop(field.attname) 

526 except KeyError: 

527 # This is done with an exception rather than the 

528 # default argument on pop because we don't want 

529 # get_default() to be evaluated, and then not used. 

530 # Refs #12057. 

531 val = field.get_default() 

532 else: 

533 val = field.get_default() 

534 

535 if is_related_object: 

536 # If we are passed a related instance, set it using the 

537 # field.name instead of field.attname (e.g. "user" instead of 

538 # "user_id") so that the object gets properly cached (and type 

539 # checked) by the RelatedObjectDescriptor. 

540 if rel_obj is not _DEFERRED: 540 ↛ 506line 540 didn't jump to line 506, because the condition on line 540 was never false

541 _setattr(self, field.name, rel_obj) 

542 else: 

543 if val is not _DEFERRED: 543 ↛ 506line 543 didn't jump to line 506, because the condition on line 543 was never false

544 _setattr(self, field.attname, val) 

545 

546 if kwargs: 

547 property_names = opts._property_names 

548 for prop in tuple(kwargs): 

549 try: 

550 # Any remaining kwargs must correspond to properties or 

551 # virtual fields. 

552 if prop in property_names or opts.get_field(prop): 552 ↛ 548line 552 didn't jump to line 548, because the condition on line 552 was never false

553 if kwargs[prop] is not _DEFERRED: 553 ↛ 555line 553 didn't jump to line 555, because the condition on line 553 was never false

554 _setattr(self, prop, kwargs[prop]) 

555 del kwargs[prop] 

556 except (AttributeError, FieldDoesNotExist): 

557 pass 

558 for kwarg in kwargs: 558 ↛ 559line 558 didn't jump to line 559, because the loop on line 558 never started

559 raise TypeError( 

560 "%s() got an unexpected keyword argument '%s'" 

561 % (cls.__name__, kwarg) 

562 ) 

563 super().__init__() 

564 post_init.send(sender=cls, instance=self) 

565 

566 @classmethod 

567 def from_db(cls, db, field_names, values): 

568 if len(values) != len(cls._meta.concrete_fields): 

569 values_iter = iter(values) 

570 values = [ 

571 next(values_iter) if f.attname in field_names else DEFERRED 

572 for f in cls._meta.concrete_fields 

573 ] 

574 new = cls(*values) 

575 new._state.adding = False 

576 new._state.db = db 

577 return new 

578 

579 def __repr__(self): 

580 return "<%s: %s>" % (self.__class__.__name__, self) 

581 

582 def __str__(self): 

583 return "%s object (%s)" % (self.__class__.__name__, self.pk) 

584 

585 def __eq__(self, other): 

586 if not isinstance(other, Model): 

587 return NotImplemented 

588 if self._meta.concrete_model != other._meta.concrete_model: 588 ↛ 589line 588 didn't jump to line 589, because the condition on line 588 was never true

589 return False 

590 my_pk = self.pk 

591 if my_pk is None: 591 ↛ 592line 591 didn't jump to line 592, because the condition on line 591 was never true

592 return self is other 

593 return my_pk == other.pk 

594 

595 def __hash__(self): 

596 if self.pk is None: 596 ↛ 597line 596 didn't jump to line 597, because the condition on line 596 was never true

597 raise TypeError("Model instances without primary key value are unhashable") 

598 return hash(self.pk) 

599 

600 def __reduce__(self): 

601 data = self.__getstate__() 

602 data[DJANGO_VERSION_PICKLE_KEY] = django.__version__ 

603 class_id = self._meta.app_label, self._meta.object_name 

604 return model_unpickle, (class_id,), data 

605 

606 def __getstate__(self): 

607 """Hook to allow choosing the attributes to pickle.""" 

608 state = self.__dict__.copy() 

609 state["_state"] = copy.copy(state["_state"]) 

610 state["_state"].fields_cache = state["_state"].fields_cache.copy() 

611 # memoryview cannot be pickled, so cast it to bytes and store 

612 # separately. 

613 _memoryview_attrs = [] 

614 for attr, value in state.items(): 

615 if isinstance(value, memoryview): 615 ↛ 616line 615 didn't jump to line 616, because the condition on line 615 was never true

616 _memoryview_attrs.append((attr, bytes(value))) 

617 if _memoryview_attrs: 617 ↛ 618line 617 didn't jump to line 618, because the condition on line 617 was never true

618 state["_memoryview_attrs"] = _memoryview_attrs 

619 for attr, value in _memoryview_attrs: 

620 state.pop(attr) 

621 return state 

622 

623 def __setstate__(self, state): 

624 pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) 

625 if pickled_version: 625 ↛ 635line 625 didn't jump to line 635, because the condition on line 625 was never false

626 if pickled_version != django.__version__: 626 ↛ 627line 626 didn't jump to line 627, because the condition on line 626 was never true

627 warnings.warn( 

628 "Pickled model instance's Django version %s does not " 

629 "match the current version %s." 

630 % (pickled_version, django.__version__), 

631 RuntimeWarning, 

632 stacklevel=2, 

633 ) 

634 else: 

635 warnings.warn( 

636 "Pickled model instance's Django version is not specified.", 

637 RuntimeWarning, 

638 stacklevel=2, 

639 ) 

640 if "_memoryview_attrs" in state: 640 ↛ 641line 640 didn't jump to line 641, because the condition on line 640 was never true

641 for attr, value in state.pop("_memoryview_attrs"): 

642 state[attr] = memoryview(value) 

643 self.__dict__.update(state) 

644 

645 def _get_pk_val(self, meta=None): 

646 meta = meta or self._meta 

647 return getattr(self, meta.pk.attname) 

648 

649 def _set_pk_val(self, value): 

650 for parent_link in self._meta.parents.values(): 650 ↛ 651line 650 didn't jump to line 651, because the loop on line 650 never started

651 if parent_link and parent_link != self._meta.pk: 

652 setattr(self, parent_link.target_field.attname, value) 

653 return setattr(self, self._meta.pk.attname, value) 

654 

655 pk = property(_get_pk_val, _set_pk_val) 

656 

657 def get_deferred_fields(self): 

658 """ 

659 Return a set containing names of deferred fields for this instance. 

660 """ 

661 return { 

662 f.attname 

663 for f in self._meta.concrete_fields 

664 if f.attname not in self.__dict__ 

665 } 

666 

667 def refresh_from_db(self, using=None, fields=None): 

668 """ 

669 Reload field values from the database. 

670 

671 By default, the reloading happens from the database this instance was 

672 loaded from, or by the read router if this instance wasn't loaded from 

673 any database. The using parameter will override the default. 

674 

675 Fields can be used to specify which fields to reload. The fields 

676 should be an iterable of field attnames. If fields is None, then 

677 all non-deferred fields are reloaded. 

678 

679 When accessing deferred fields of an instance, the deferred loading 

680 of the field will call this method. 

681 """ 

682 if fields is None: 

683 self._prefetched_objects_cache = {} 

684 else: 

685 prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ()) 

686 for field in fields: 

687 if field in prefetched_objects_cache: 687 ↛ 688line 687 didn't jump to line 688, because the condition on line 687 was never true

688 del prefetched_objects_cache[field] 

689 fields.remove(field) 

690 if not fields: 690 ↛ 691line 690 didn't jump to line 691, because the condition on line 690 was never true

691 return 

692 if any(LOOKUP_SEP in f for f in fields): 692 ↛ 693line 692 didn't jump to line 693, because the condition on line 692 was never true

693 raise ValueError( 

694 'Found "%s" in fields argument. Relations and transforms ' 

695 "are not allowed in fields." % LOOKUP_SEP 

696 ) 

697 

698 hints = {"instance": self} 

699 db_instance_qs = self.__class__._base_manager.db_manager( 

700 using, hints=hints 

701 ).filter(pk=self.pk) 

702 

703 # Use provided fields, if not set then reload all non-deferred fields. 

704 deferred_fields = self.get_deferred_fields() 

705 if fields is not None: 

706 fields = list(fields) 

707 db_instance_qs = db_instance_qs.only(*fields) 

708 elif deferred_fields: 708 ↛ 709line 708 didn't jump to line 709, because the condition on line 708 was never true

709 fields = [ 

710 f.attname 

711 for f in self._meta.concrete_fields 

712 if f.attname not in deferred_fields 

713 ] 

714 db_instance_qs = db_instance_qs.only(*fields) 

715 

716 db_instance = db_instance_qs.get() 

717 non_loaded_fields = db_instance.get_deferred_fields() 

718 for field in self._meta.concrete_fields: 

719 if field.attname in non_loaded_fields: 

720 # This field wasn't refreshed - skip ahead. 

721 continue 

722 setattr(self, field.attname, getattr(db_instance, field.attname)) 

723 # Clear cached foreign keys. 

724 if field.is_relation and field.is_cached(self): 

725 field.delete_cached_value(self) 

726 

727 # Clear cached relations. 

728 for field in self._meta.related_objects: 

729 if field.is_cached(self): 729 ↛ 730line 729 didn't jump to line 730, because the condition on line 729 was never true

730 field.delete_cached_value(self) 

731 

732 self._state.db = db_instance._state.db 

733 

734 def serializable_value(self, field_name): 

735 """ 

736 Return the value of the field name for this instance. If the field is 

737 a foreign key, return the id value instead of the object. If there's 

738 no Field object with this name on the model, return the model 

739 attribute's value. 

740 

741 Used to serialize a field's value (in the serializer, or form output, 

742 for example). Normally, you would just access the attribute directly 

743 and not use this method. 

744 """ 

745 try: 

746 field = self._meta.get_field(field_name) 

747 except FieldDoesNotExist: 

748 return getattr(self, field_name) 

749 return getattr(self, field.attname) 

750 

751 def save( 

752 self, force_insert=False, force_update=False, using=None, update_fields=None 

753 ): 

754 """ 

755 Save the current instance. Override this in a subclass if you want to 

756 control the saving process. 

757 

758 The 'force_insert' and 'force_update' parameters can be used to insist 

759 that the "save" must be an SQL insert or update (or equivalent for 

760 non-SQL backends), respectively. Normally, they should not be set. 

761 """ 

762 self._prepare_related_fields_for_save(operation_name="save") 

763 

764 using = using or router.db_for_write(self.__class__, instance=self) 

765 if force_insert and (force_update or update_fields): 765 ↛ 766line 765 didn't jump to line 766, because the condition on line 765 was never true

766 raise ValueError("Cannot force both insert and updating in model saving.") 

767 

768 deferred_fields = self.get_deferred_fields() 

769 if update_fields is not None: 

770 # If update_fields is empty, skip the save. We do also check for 

771 # no-op saves later on for inheritance cases. This bailout is 

772 # still needed for skipping signal sending. 

773 if not update_fields: 773 ↛ 774line 773 didn't jump to line 774, because the condition on line 773 was never true

774 return 

775 

776 update_fields = frozenset(update_fields) 

777 field_names = set() 

778 

779 for field in self._meta.concrete_fields: 

780 if not field.primary_key: 

781 field_names.add(field.name) 

782 

783 if field.name != field.attname: 

784 field_names.add(field.attname) 

785 

786 non_model_fields = update_fields.difference(field_names) 

787 

788 if non_model_fields: 788 ↛ 789line 788 didn't jump to line 789, because the condition on line 788 was never true

789 raise ValueError( 

790 "The following fields do not exist in this model, are m2m " 

791 "fields, or are non-concrete fields: %s" 

792 % ", ".join(non_model_fields) 

793 ) 

794 

795 # If saving to the same database, and this model is deferred, then 

796 # automatically do an "update_fields" save on the loaded fields. 

797 elif not force_insert and deferred_fields and using == self._state.db: 797 ↛ 798line 797 didn't jump to line 798, because the condition on line 797 was never true

798 field_names = set() 

799 for field in self._meta.concrete_fields: 

800 if not field.primary_key and not hasattr(field, "through"): 

801 field_names.add(field.attname) 

802 loaded_fields = field_names.difference(deferred_fields) 

803 if loaded_fields: 

804 update_fields = frozenset(loaded_fields) 

805 

806 self.save_base( 

807 using=using, 

808 force_insert=force_insert, 

809 force_update=force_update, 

810 update_fields=update_fields, 

811 ) 

812 

813 save.alters_data = True 

814 

815 def save_base( 

816 self, 

817 raw=False, 

818 force_insert=False, 

819 force_update=False, 

820 using=None, 

821 update_fields=None, 

822 ): 

823 """ 

824 Handle the parts of saving which should be done only once per save, 

825 yet need to be done in raw saves, too. This includes some sanity 

826 checks and signal sending. 

827 

828 The 'raw' argument is telling save_base not to save any parent 

829 models and not to do any changes to the values before save. This 

830 is used by fixture loading. 

831 """ 

832 using = using or router.db_for_write(self.__class__, instance=self) 

833 assert not (force_insert and (force_update or update_fields)) 

834 assert update_fields is None or update_fields 

835 cls = origin = self.__class__ 

836 # Skip proxies, but keep the origin as the proxy model. 

837 if cls._meta.proxy: 

838 cls = cls._meta.concrete_model 

839 meta = cls._meta 

840 if not meta.auto_created: 840 ↛ 849line 840 didn't jump to line 849, because the condition on line 840 was never false

841 pre_save.send( 

842 sender=origin, 

843 instance=self, 

844 raw=raw, 

845 using=using, 

846 update_fields=update_fields, 

847 ) 

848 # A transaction isn't needed if one query is issued. 

849 if meta.parents: 

850 context_manager = transaction.atomic(using=using, savepoint=False) 

851 else: 

852 context_manager = transaction.mark_for_rollback_on_error(using=using) 

853 with context_manager: 

854 parent_inserted = False 

855 if not raw: 855 ↛ 857line 855 didn't jump to line 857, because the condition on line 855 was never false

856 parent_inserted = self._save_parents(cls, using, update_fields) 

857 updated = self._save_table( 

858 raw, 

859 cls, 

860 force_insert or parent_inserted, 

861 force_update, 

862 using, 

863 update_fields, 

864 ) 

865 # Store the database on which the object was saved 

866 self._state.db = using 

867 # Once saved, this is no longer a to-be-added instance. 

868 self._state.adding = False 

869 

870 # Signal that the save is complete 

871 if not meta.auto_created: 871 ↛ exitline 871 didn't return from function 'save_base', because the condition on line 871 was never false

872 post_save.send( 

873 sender=origin, 

874 instance=self, 

875 created=(not updated), 

876 update_fields=update_fields, 

877 raw=raw, 

878 using=using, 

879 ) 

880 

881 save_base.alters_data = True 

882 

883 def _save_parents(self, cls, using, update_fields): 

884 """Save all the parents of cls using values from self.""" 

885 meta = cls._meta 

886 inserted = False 

887 for parent, field in meta.parents.items(): 

888 # Make sure the link fields are synced between parent and self. 

889 if ( 889 ↛ 894line 889 didn't jump to line 894

890 field 

891 and getattr(self, parent._meta.pk.attname) is None 

892 and getattr(self, field.attname) is not None 

893 ): 

894 setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) 

895 parent_inserted = self._save_parents( 

896 cls=parent, using=using, update_fields=update_fields 

897 ) 

898 updated = self._save_table( 

899 cls=parent, 

900 using=using, 

901 update_fields=update_fields, 

902 force_insert=parent_inserted, 

903 ) 

904 if not updated: 

905 inserted = True 

906 # Set the parent's PK value to self. 

907 if field: 907 ↛ 887line 907 didn't jump to line 887, because the condition on line 907 was never false

908 setattr(self, field.attname, self._get_pk_val(parent._meta)) 

909 # Since we didn't have an instance of the parent handy set 

910 # attname directly, bypassing the descriptor. Invalidate 

911 # the related object cache, in case it's been accidentally 

912 # populated. A fresh instance will be re-built from the 

913 # database if necessary. 

914 if field.is_cached(self): 914 ↛ 915line 914 didn't jump to line 915, because the condition on line 914 was never true

915 field.delete_cached_value(self) 

916 return inserted 

917 

918 def _save_table( 

919 self, 

920 raw=False, 

921 cls=None, 

922 force_insert=False, 

923 force_update=False, 

924 using=None, 

925 update_fields=None, 

926 ): 

927 """ 

928 Do the heavy-lifting involved in saving. Update or insert the data 

929 for a single table. 

930 """ 

931 meta = cls._meta 

932 non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] 

933 

934 if update_fields: 

935 non_pks = [ 

936 f 

937 for f in non_pks 

938 if f.name in update_fields or f.attname in update_fields 

939 ] 

940 

941 pk_val = self._get_pk_val(meta) 

942 if pk_val is None: 

943 pk_val = meta.pk.get_pk_value_on_save(self) 

944 setattr(self, meta.pk.attname, pk_val) 

945 pk_set = pk_val is not None 

946 if not pk_set and (force_update or update_fields): 946 ↛ 947line 946 didn't jump to line 947, because the condition on line 946 was never true

947 raise ValueError("Cannot force an update in save() with no primary key.") 

948 updated = False 

949 # Skip an UPDATE when adding an instance and primary key has a default. 

950 if ( 950 ↛ 957line 950 didn't jump to line 957

951 not raw 

952 and not force_insert 

953 and self._state.adding 

954 and meta.pk.default 

955 and meta.pk.default is not NOT_PROVIDED 

956 ): 

957 force_insert = True 

958 # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. 

959 if pk_set and not force_insert: 

960 base_qs = cls._base_manager.using(using) 

961 values = [ 

962 ( 

963 f, 

964 None, 

965 (getattr(self, f.attname) if raw else f.pre_save(self, False)), 

966 ) 

967 for f in non_pks 

968 ] 

969 forced_update = update_fields or force_update 

970 updated = self._do_update( 

971 base_qs, using, pk_val, values, update_fields, forced_update 

972 ) 

973 if force_update and not updated: 973 ↛ 974line 973 didn't jump to line 974, because the condition on line 973 was never true

974 raise DatabaseError("Forced update did not affect any rows.") 

975 if update_fields and not updated: 975 ↛ 976line 975 didn't jump to line 976, because the condition on line 975 was never true

976 raise DatabaseError("Save with update_fields did not affect any rows.") 

977 if not updated: 

978 if meta.order_with_respect_to: 978 ↛ 981line 978 didn't jump to line 981, because the condition on line 978 was never true

979 # If this is a model with an order_with_respect_to 

980 # autopopulate the _order field 

981 field = meta.order_with_respect_to 

982 filter_args = field.get_filter_kwargs_for_object(self) 

983 self._order = ( 

984 cls._base_manager.using(using) 

985 .filter(**filter_args) 

986 .aggregate( 

987 _order__max=Coalesce( 

988 ExpressionWrapper( 

989 Max("_order") + Value(1), output_field=IntegerField() 

990 ), 

991 Value(0), 

992 ), 

993 )["_order__max"] 

994 ) 

995 fields = meta.local_concrete_fields 

996 if not pk_set: 

997 fields = [f for f in fields if f is not meta.auto_field] 

998 

999 returning_fields = meta.db_returning_fields 

1000 results = self._do_insert( 

1001 cls._base_manager, using, fields, returning_fields, raw 

1002 ) 

1003 if results: 

1004 for value, field in zip(results[0], returning_fields): 

1005 setattr(self, field.attname, value) 

1006 return updated 

1007 

1008 def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): 

1009 """ 

1010 Try to update the model. Return True if the model was updated (if an 

1011 update query was done and a matching row was found in the DB). 

1012 """ 

1013 filtered = base_qs.filter(pk=pk_val) 

1014 if not values: 1014 ↛ 1020line 1014 didn't jump to line 1020, because the condition on line 1014 was never true

1015 # We can end up here when saving a model in inheritance chain where 

1016 # update_fields doesn't target any field in current model. In that 

1017 # case we just say the update succeeded. Another case ending up here 

1018 # is a model with just PK - in that case check that the PK still 

1019 # exists. 

1020 return update_fields is not None or filtered.exists() 

1021 if self._meta.select_on_save and not forced_update: 1021 ↛ 1022line 1021 didn't jump to line 1022, because the condition on line 1021 was never true

1022 return ( 

1023 filtered.exists() 

1024 and 

1025 # It may happen that the object is deleted from the DB right after 

1026 # this check, causing the subsequent UPDATE to return zero matching 

1027 # rows. The same result can occur in some rare cases when the 

1028 # database returns zero despite the UPDATE being executed 

1029 # successfully (a row is matched and updated). In order to 

1030 # distinguish these two cases, the object's existence in the 

1031 # database is again checked for if the UPDATE query returns 0. 

1032 (filtered._update(values) > 0 or filtered.exists()) 

1033 ) 

1034 return filtered._update(values) > 0 

1035 

1036 def _do_insert(self, manager, using, fields, returning_fields, raw): 

1037 """ 

1038 Do an INSERT. If returning_fields is defined then this method should 

1039 return the newly created data for the model. 

1040 """ 

1041 return manager._insert( 

1042 [self], 

1043 fields=fields, 

1044 returning_fields=returning_fields, 

1045 using=using, 

1046 raw=raw, 

1047 ) 

1048 

1049 def _prepare_related_fields_for_save(self, operation_name): 

1050 # Ensure that a model instance without a PK hasn't been assigned to 

1051 # a ForeignKey or OneToOneField on this model. If the field is 

1052 # nullable, allowing the save would result in silent data loss. 

1053 for field in self._meta.concrete_fields: 

1054 # If the related field isn't cached, then an instance hasn't been 

1055 # assigned and there's no need to worry about this check. 

1056 if field.is_relation and field.is_cached(self): 

1057 obj = getattr(self, field.name, None) 

1058 if not obj: 

1059 continue 

1060 # A pk may have been assigned manually to a model instance not 

1061 # saved to the database (or auto-generated in a case like 

1062 # UUIDField), but we allow the save to proceed and rely on the 

1063 # database to raise an IntegrityError if applicable. If 

1064 # constraints aren't supported by the database, there's the 

1065 # unavoidable risk of data corruption. 

1066 if obj.pk is None: 1066 ↛ 1068line 1066 didn't jump to line 1068, because the condition on line 1066 was never true

1067 # Remove the object from a related instance cache. 

1068 if not field.remote_field.multiple: 

1069 field.remote_field.delete_cached_value(obj) 

1070 raise ValueError( 

1071 "%s() prohibited to prevent data loss due to unsaved " 

1072 "related object '%s'." % (operation_name, field.name) 

1073 ) 

1074 elif getattr(self, field.attname) in field.empty_values: 1074 ↛ 1077line 1074 didn't jump to line 1077, because the condition on line 1074 was never true

1075 # Use pk from related object if it has been saved after 

1076 # an assignment. 

1077 setattr(self, field.attname, obj.pk) 

1078 # If the relationship's pk/to_field was changed, clear the 

1079 # cached relationship. 

1080 if getattr(obj, field.target_field.attname) != getattr( 1080 ↛ 1083line 1080 didn't jump to line 1083, because the condition on line 1080 was never true

1081 self, field.attname 

1082 ): 

1083 field.delete_cached_value(self) 

1084 

1085 def delete(self, using=None, keep_parents=False): 

1086 if self.pk is None: 1086 ↛ 1087line 1086 didn't jump to line 1087, because the condition on line 1086 was never true

1087 raise ValueError( 

1088 "%s object can't be deleted because its %s attribute is set " 

1089 "to None." % (self._meta.object_name, self._meta.pk.attname) 

1090 ) 

1091 using = using or router.db_for_write(self.__class__, instance=self) 

1092 collector = Collector(using=using) 

1093 collector.collect([self], keep_parents=keep_parents) 

1094 return collector.delete() 

1095 

1096 delete.alters_data = True 

1097 

1098 def _get_FIELD_display(self, field): 

1099 value = getattr(self, field.attname) 

1100 choices_dict = dict(make_hashable(field.flatchoices)) 

1101 # force_str() to coerce lazy strings. 

1102 return force_str( 

1103 choices_dict.get(make_hashable(value), value), strings_only=True 

1104 ) 

1105 

1106 def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): 

1107 if not self.pk: 

1108 raise ValueError("get_next/get_previous cannot be used on unsaved objects.") 

1109 op = "gt" if is_next else "lt" 

1110 order = "" if is_next else "-" 

1111 param = getattr(self, field.attname) 

1112 q = Q((field.name, param), (f"pk__{op}", self.pk), _connector=Q.AND) 

1113 q = Q(q, (f"{field.name}__{op}", param), _connector=Q.OR) 

1114 qs = ( 

1115 self.__class__._default_manager.using(self._state.db) 

1116 .filter(**kwargs) 

1117 .filter(q) 

1118 .order_by("%s%s" % (order, field.name), "%spk" % order) 

1119 ) 

1120 try: 

1121 return qs[0] 

1122 except IndexError: 

1123 raise self.DoesNotExist( 

1124 "%s matching query does not exist." % self.__class__._meta.object_name 

1125 ) 

1126 

1127 def _get_next_or_previous_in_order(self, is_next): 

1128 cachename = "__%s_order_cache" % is_next 

1129 if not hasattr(self, cachename): 

1130 op = "gt" if is_next else "lt" 

1131 order = "_order" if is_next else "-_order" 

1132 order_field = self._meta.order_with_respect_to 

1133 filter_args = order_field.get_filter_kwargs_for_object(self) 

1134 obj = ( 

1135 self.__class__._default_manager.filter(**filter_args) 

1136 .filter( 

1137 **{ 

1138 "_order__%s" 

1139 % op: self.__class__._default_manager.values("_order").filter( 

1140 **{self._meta.pk.name: self.pk} 

1141 ) 

1142 } 

1143 ) 

1144 .order_by(order)[:1] 

1145 .get() 

1146 ) 

1147 setattr(self, cachename, obj) 

1148 return getattr(self, cachename) 

1149 

1150 def prepare_database_save(self, field): 

1151 if self.pk is None: 

1152 raise ValueError( 

1153 "Unsaved model instance %r cannot be used in an ORM query." % self 

1154 ) 

1155 return getattr(self, field.remote_field.get_related_field().attname) 

1156 

1157 def clean(self): 

1158 """ 

1159 Hook for doing any extra model-wide validation after clean() has been 

1160 called on every field by self.clean_fields. Any ValidationError raised 

1161 by this method will not be associated with a particular field; it will 

1162 have a special-case association with the field defined by NON_FIELD_ERRORS. 

1163 """ 

1164 pass 

1165 

1166 def validate_unique(self, exclude=None): 

1167 """ 

1168 Check unique constraints on the model and raise ValidationError if any 

1169 failed. 

1170 """ 

1171 unique_checks, date_checks = self._get_unique_checks(exclude=exclude) 

1172 

1173 errors = self._perform_unique_checks(unique_checks) 

1174 date_errors = self._perform_date_checks(date_checks) 

1175 

1176 for k, v in date_errors.items(): 

1177 errors.setdefault(k, []).extend(v) 

1178 

1179 if errors: 

1180 raise ValidationError(errors) 

1181 

1182 def _get_unique_checks(self, exclude=None): 

1183 """ 

1184 Return a list of checks to perform. Since validate_unique() could be 

1185 called from a ModelForm, some fields may have been excluded; we can't 

1186 perform a unique check on a model that is missing fields involved 

1187 in that check. Fields that did not validate should also be excluded, 

1188 but they need to be passed in via the exclude argument. 

1189 """ 

1190 if exclude is None: 

1191 exclude = [] 

1192 unique_checks = [] 

1193 

1194 unique_togethers = [(self.__class__, self._meta.unique_together)] 

1195 constraints = [(self.__class__, self._meta.total_unique_constraints)] 

1196 for parent_class in self._meta.get_parent_list(): 

1197 if parent_class._meta.unique_together: 

1198 unique_togethers.append( 

1199 (parent_class, parent_class._meta.unique_together) 

1200 ) 

1201 if parent_class._meta.total_unique_constraints: 

1202 constraints.append( 

1203 (parent_class, parent_class._meta.total_unique_constraints) 

1204 ) 

1205 

1206 for model_class, unique_together in unique_togethers: 

1207 for check in unique_together: 

1208 if not any(name in exclude for name in check): 

1209 # Add the check if the field isn't excluded. 

1210 unique_checks.append((model_class, tuple(check))) 

1211 

1212 for model_class, model_constraints in constraints: 

1213 for constraint in model_constraints: 

1214 if not any(name in exclude for name in constraint.fields): 

1215 unique_checks.append((model_class, constraint.fields)) 

1216 

1217 # These are checks for the unique_for_<date/year/month>. 

1218 date_checks = [] 

1219 

1220 # Gather a list of checks for fields declared as unique and add them to 

1221 # the list of checks. 

1222 

1223 fields_with_class = [(self.__class__, self._meta.local_fields)] 

1224 for parent_class in self._meta.get_parent_list(): 

1225 fields_with_class.append((parent_class, parent_class._meta.local_fields)) 

1226 

1227 for model_class, fields in fields_with_class: 

1228 for f in fields: 

1229 name = f.name 

1230 if name in exclude: 

1231 continue 

1232 if f.unique: 

1233 unique_checks.append((model_class, (name,))) 

1234 if f.unique_for_date and f.unique_for_date not in exclude: 

1235 date_checks.append((model_class, "date", name, f.unique_for_date)) 

1236 if f.unique_for_year and f.unique_for_year not in exclude: 

1237 date_checks.append((model_class, "year", name, f.unique_for_year)) 

1238 if f.unique_for_month and f.unique_for_month not in exclude: 

1239 date_checks.append((model_class, "month", name, f.unique_for_month)) 

1240 return unique_checks, date_checks 

1241 

1242 def _perform_unique_checks(self, unique_checks): 

1243 errors = {} 

1244 

1245 for model_class, unique_check in unique_checks: 

1246 # Try to look up an existing object with the same values as this 

1247 # object's values for all the unique field. 

1248 

1249 lookup_kwargs = {} 

1250 for field_name in unique_check: 

1251 f = self._meta.get_field(field_name) 

1252 lookup_value = getattr(self, f.attname) 

1253 # TODO: Handle multiple backends with different feature flags. 

1254 if lookup_value is None or ( 

1255 lookup_value == "" 

1256 and connection.features.interprets_empty_strings_as_nulls 

1257 ): 

1258 # no value, skip the lookup 

1259 continue 

1260 if f.primary_key and not self._state.adding: 

1261 # no need to check for unique primary key when editing 

1262 continue 

1263 lookup_kwargs[str(field_name)] = lookup_value 

1264 

1265 # some fields were skipped, no reason to do the check 

1266 if len(unique_check) != len(lookup_kwargs): 

1267 continue 

1268 

1269 qs = model_class._default_manager.filter(**lookup_kwargs) 

1270 

1271 # Exclude the current object from the query if we are editing an 

1272 # instance (as opposed to creating a new one) 

1273 # Note that we need to use the pk as defined by model_class, not 

1274 # self.pk. These can be different fields because model inheritance 

1275 # allows single model to have effectively multiple primary keys. 

1276 # Refs #17615. 

1277 model_class_pk = self._get_pk_val(model_class._meta) 

1278 if not self._state.adding and model_class_pk is not None: 

1279 qs = qs.exclude(pk=model_class_pk) 

1280 if qs.exists(): 

1281 if len(unique_check) == 1: 

1282 key = unique_check[0] 

1283 else: 

1284 key = NON_FIELD_ERRORS 

1285 errors.setdefault(key, []).append( 

1286 self.unique_error_message(model_class, unique_check) 

1287 ) 

1288 

1289 return errors 

1290 

1291 def _perform_date_checks(self, date_checks): 

1292 errors = {} 

1293 for model_class, lookup_type, field, unique_for in date_checks: 

1294 lookup_kwargs = {} 

1295 # there's a ticket to add a date lookup, we can remove this special 

1296 # case if that makes it's way in 

1297 date = getattr(self, unique_for) 

1298 if date is None: 

1299 continue 

1300 if lookup_type == "date": 

1301 lookup_kwargs["%s__day" % unique_for] = date.day 

1302 lookup_kwargs["%s__month" % unique_for] = date.month 

1303 lookup_kwargs["%s__year" % unique_for] = date.year 

1304 else: 

1305 lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr( 

1306 date, lookup_type 

1307 ) 

1308 lookup_kwargs[field] = getattr(self, field) 

1309 

1310 qs = model_class._default_manager.filter(**lookup_kwargs) 

1311 # Exclude the current object from the query if we are editing an 

1312 # instance (as opposed to creating a new one) 

1313 if not self._state.adding and self.pk is not None: 

1314 qs = qs.exclude(pk=self.pk) 

1315 

1316 if qs.exists(): 

1317 errors.setdefault(field, []).append( 

1318 self.date_error_message(lookup_type, field, unique_for) 

1319 ) 

1320 return errors 

1321 

1322 def date_error_message(self, lookup_type, field_name, unique_for): 

1323 opts = self._meta 

1324 field = opts.get_field(field_name) 

1325 return ValidationError( 

1326 message=field.error_messages["unique_for_date"], 

1327 code="unique_for_date", 

1328 params={ 

1329 "model": self, 

1330 "model_name": capfirst(opts.verbose_name), 

1331 "lookup_type": lookup_type, 

1332 "field": field_name, 

1333 "field_label": capfirst(field.verbose_name), 

1334 "date_field": unique_for, 

1335 "date_field_label": capfirst(opts.get_field(unique_for).verbose_name), 

1336 }, 

1337 ) 

1338 

1339 def unique_error_message(self, model_class, unique_check): 

1340 opts = model_class._meta 

1341 

1342 params = { 

1343 "model": self, 

1344 "model_class": model_class, 

1345 "model_name": capfirst(opts.verbose_name), 

1346 "unique_check": unique_check, 

1347 } 

1348 

1349 # A unique field 

1350 if len(unique_check) == 1: 

1351 field = opts.get_field(unique_check[0]) 

1352 params["field_label"] = capfirst(field.verbose_name) 

1353 return ValidationError( 

1354 message=field.error_messages["unique"], 

1355 code="unique", 

1356 params=params, 

1357 ) 

1358 

1359 # unique_together 

1360 else: 

1361 field_labels = [ 

1362 capfirst(opts.get_field(f).verbose_name) for f in unique_check 

1363 ] 

1364 params["field_labels"] = get_text_list(field_labels, _("and")) 

1365 return ValidationError( 

1366 message=_("%(model_name)s with this %(field_labels)s already exists."), 

1367 code="unique_together", 

1368 params=params, 

1369 ) 

1370 

1371 def full_clean(self, exclude=None, validate_unique=True): 

1372 """ 

1373 Call clean_fields(), clean(), and validate_unique() on the model. 

1374 Raise a ValidationError for any errors that occur. 

1375 """ 

1376 errors = {} 

1377 if exclude is None: 

1378 exclude = [] 

1379 else: 

1380 exclude = list(exclude) 

1381 

1382 try: 

1383 self.clean_fields(exclude=exclude) 

1384 except ValidationError as e: 

1385 errors = e.update_error_dict(errors) 

1386 

1387 # Form.clean() is run even if other validation fails, so do the 

1388 # same with Model.clean() for consistency. 

1389 try: 

1390 self.clean() 

1391 except ValidationError as e: 

1392 errors = e.update_error_dict(errors) 

1393 

1394 # Run unique checks, but only for fields that passed validation. 

1395 if validate_unique: 

1396 for name in errors: 

1397 if name != NON_FIELD_ERRORS and name not in exclude: 

1398 exclude.append(name) 

1399 try: 

1400 self.validate_unique(exclude=exclude) 

1401 except ValidationError as e: 

1402 errors = e.update_error_dict(errors) 

1403 

1404 if errors: 

1405 raise ValidationError(errors) 

1406 

1407 def clean_fields(self, exclude=None): 

1408 """ 

1409 Clean all fields and raise a ValidationError containing a dict 

1410 of all validation errors if any occur. 

1411 """ 

1412 if exclude is None: 

1413 exclude = [] 

1414 

1415 errors = {} 

1416 for f in self._meta.fields: 

1417 if f.name in exclude: 

1418 continue 

1419 # Skip validation for empty fields with blank=True. The developer 

1420 # is responsible for making sure they have a valid value. 

1421 raw_value = getattr(self, f.attname) 

1422 if f.blank and raw_value in f.empty_values: 

1423 continue 

1424 try: 

1425 setattr(self, f.attname, f.clean(raw_value, self)) 

1426 except ValidationError as e: 

1427 errors[f.name] = e.error_list 

1428 

1429 if errors: 

1430 raise ValidationError(errors) 

1431 

1432 @classmethod 

1433 def check(cls, **kwargs): 

1434 errors = [ 

1435 *cls._check_swappable(), 

1436 *cls._check_model(), 

1437 *cls._check_managers(**kwargs), 

1438 ] 

1439 if not cls._meta.swapped: 1439 ↛ 1467line 1439 didn't jump to line 1467, because the condition on line 1439 was never false

1440 databases = kwargs.get("databases") or [] 

1441 errors += [ 

1442 *cls._check_fields(**kwargs), 

1443 *cls._check_m2m_through_same_relationship(), 

1444 *cls._check_long_column_names(databases), 

1445 ] 

1446 clash_errors = ( 

1447 *cls._check_id_field(), 

1448 *cls._check_field_name_clashes(), 

1449 *cls._check_model_name_db_lookup_clashes(), 

1450 *cls._check_property_name_related_field_accessor_clashes(), 

1451 *cls._check_single_primary_key(), 

1452 ) 

1453 errors.extend(clash_errors) 

1454 # If there are field name clashes, hide consequent column name 

1455 # clashes. 

1456 if not clash_errors: 1456 ↛ 1458line 1456 didn't jump to line 1458, because the condition on line 1456 was never false

1457 errors.extend(cls._check_column_name_clashes()) 

1458 errors += [ 

1459 *cls._check_index_together(), 

1460 *cls._check_unique_together(), 

1461 *cls._check_indexes(databases), 

1462 *cls._check_ordering(), 

1463 *cls._check_constraints(databases), 

1464 *cls._check_default_pk(), 

1465 ] 

1466 

1467 return errors 

1468 

1469 @classmethod 

1470 def _check_default_pk(cls): 

1471 if ( 1471 ↛ 1484line 1471 didn't jump to line 1484

1472 not cls._meta.abstract 

1473 and cls._meta.pk.auto_created 

1474 and 

1475 # Inherited PKs are checked in parents models. 

1476 not ( 

1477 isinstance(cls._meta.pk, OneToOneField) 

1478 and cls._meta.pk.remote_field.parent_link 

1479 ) 

1480 and not settings.is_overridden("DEFAULT_AUTO_FIELD") 

1481 and cls._meta.app_config 

1482 and not cls._meta.app_config._is_default_auto_field_overridden 

1483 ): 

1484 return [ 

1485 checks.Warning( 

1486 f"Auto-created primary key used when not defining a " 

1487 f"primary key type, by default " 

1488 f"'{settings.DEFAULT_AUTO_FIELD}'.", 

1489 hint=( 

1490 f"Configure the DEFAULT_AUTO_FIELD setting or the " 

1491 f"{cls._meta.app_config.__class__.__qualname__}." 

1492 f"default_auto_field attribute to point to a subclass " 

1493 f"of AutoField, e.g. 'django.db.models.BigAutoField'." 

1494 ), 

1495 obj=cls, 

1496 id="models.W042", 

1497 ), 

1498 ] 

1499 return [] 

1500 

1501 @classmethod 

1502 def _check_swappable(cls): 

1503 """Check if the swapped model exists.""" 

1504 errors = [] 

1505 if cls._meta.swapped: 1505 ↛ 1506line 1505 didn't jump to line 1506, because the condition on line 1505 was never true

1506 try: 

1507 apps.get_model(cls._meta.swapped) 

1508 except ValueError: 

1509 errors.append( 

1510 checks.Error( 

1511 "'%s' is not of the form 'app_label.app_name'." 

1512 % cls._meta.swappable, 

1513 id="models.E001", 

1514 ) 

1515 ) 

1516 except LookupError: 

1517 app_label, model_name = cls._meta.swapped.split(".") 

1518 errors.append( 

1519 checks.Error( 

1520 "'%s' references '%s.%s', which has not been " 

1521 "installed, or is abstract." 

1522 % (cls._meta.swappable, app_label, model_name), 

1523 id="models.E002", 

1524 ) 

1525 ) 

1526 return errors 

1527 

1528 @classmethod 

1529 def _check_model(cls): 

1530 errors = [] 

1531 if cls._meta.proxy: 

1532 if cls._meta.local_fields or cls._meta.local_many_to_many: 1532 ↛ 1533line 1532 didn't jump to line 1533, because the condition on line 1532 was never true

1533 errors.append( 

1534 checks.Error( 

1535 "Proxy model '%s' contains model fields." % cls.__name__, 

1536 id="models.E017", 

1537 ) 

1538 ) 

1539 return errors 

1540 

1541 @classmethod 

1542 def _check_managers(cls, **kwargs): 

1543 """Perform all manager checks.""" 

1544 errors = [] 

1545 for manager in cls._meta.managers: 

1546 errors.extend(manager.check(**kwargs)) 

1547 return errors 

1548 

1549 @classmethod 

1550 def _check_fields(cls, **kwargs): 

1551 """Perform all field checks.""" 

1552 errors = [] 

1553 for field in cls._meta.local_fields: 

1554 errors.extend(field.check(**kwargs)) 

1555 for field in cls._meta.local_many_to_many: 

1556 errors.extend(field.check(from_model=cls, **kwargs)) 

1557 return errors 

1558 

1559 @classmethod 

1560 def _check_m2m_through_same_relationship(cls): 

1561 """Check if no relationship model is used by more than one m2m field.""" 

1562 

1563 errors = [] 

1564 seen_intermediary_signatures = [] 

1565 

1566 fields = cls._meta.local_many_to_many 

1567 

1568 # Skip when the target model wasn't found. 

1569 fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) 

1570 

1571 # Skip when the relationship model wasn't found. 

1572 fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) 

1573 

1574 for f in fields: 

1575 signature = ( 

1576 f.remote_field.model, 

1577 cls, 

1578 f.remote_field.through, 

1579 f.remote_field.through_fields, 

1580 ) 

1581 if signature in seen_intermediary_signatures: 1581 ↛ 1582line 1581 didn't jump to line 1582, because the condition on line 1581 was never true

1582 errors.append( 

1583 checks.Error( 

1584 "The model has two identical many-to-many relations " 

1585 "through the intermediate model '%s'." 

1586 % f.remote_field.through._meta.label, 

1587 obj=cls, 

1588 id="models.E003", 

1589 ) 

1590 ) 

1591 else: 

1592 seen_intermediary_signatures.append(signature) 

1593 return errors 

1594 

1595 @classmethod 

1596 def _check_id_field(cls): 

1597 """Check if `id` field is a primary key.""" 

1598 fields = [ 

1599 f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk 

1600 ] 

1601 # fields is empty or consists of the invalid "id" field 

1602 if fields and not fields[0].primary_key and cls._meta.pk.name == "id": 1602 ↛ 1603line 1602 didn't jump to line 1603, because the condition on line 1602 was never true

1603 return [ 

1604 checks.Error( 

1605 "'id' can only be used as a field name if the field also " 

1606 "sets 'primary_key=True'.", 

1607 obj=cls, 

1608 id="models.E004", 

1609 ) 

1610 ] 

1611 else: 

1612 return [] 

1613 

1614 @classmethod 

1615 def _check_field_name_clashes(cls): 

1616 """Forbid field shadowing in multi-table inheritance.""" 

1617 errors = [] 

1618 used_fields = {} # name or attname -> field 

1619 

1620 # Check that multi-inheritance doesn't cause field name shadowing. 

1621 for parent in cls._meta.get_parent_list(): 

1622 for f in parent._meta.local_fields: 

1623 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None 

1624 if clash: 1624 ↛ 1625line 1624 didn't jump to line 1625, because the condition on line 1624 was never true

1625 errors.append( 

1626 checks.Error( 

1627 "The field '%s' from parent model " 

1628 "'%s' clashes with the field '%s' " 

1629 "from parent model '%s'." 

1630 % (clash.name, clash.model._meta, f.name, f.model._meta), 

1631 obj=cls, 

1632 id="models.E005", 

1633 ) 

1634 ) 

1635 used_fields[f.name] = f 

1636 used_fields[f.attname] = f 

1637 

1638 # Check that fields defined in the model don't clash with fields from 

1639 # parents, including auto-generated fields like multi-table inheritance 

1640 # child accessors. 

1641 for parent in cls._meta.get_parent_list(): 

1642 for f in parent._meta.get_fields(): 

1643 if f not in used_fields: 1643 ↛ 1642line 1643 didn't jump to line 1642, because the condition on line 1643 was never false

1644 used_fields[f.name] = f 

1645 

1646 for f in cls._meta.local_fields: 

1647 clash = used_fields.get(f.name) or used_fields.get(f.attname) or None 

1648 # Note that we may detect clash between user-defined non-unique 

1649 # field "id" and automatically added unique field "id", both 

1650 # defined at the same model. This special case is considered in 

1651 # _check_id_field and here we ignore it. 

1652 id_conflict = ( 

1653 f.name == "id" and clash and clash.name == "id" and clash.model == cls 

1654 ) 

1655 if clash and not id_conflict: 1655 ↛ 1656line 1655 didn't jump to line 1656, because the condition on line 1655 was never true

1656 errors.append( 

1657 checks.Error( 

1658 "The field '%s' clashes with the field '%s' " 

1659 "from model '%s'." % (f.name, clash.name, clash.model._meta), 

1660 obj=f, 

1661 id="models.E006", 

1662 ) 

1663 ) 

1664 used_fields[f.name] = f 

1665 used_fields[f.attname] = f 

1666 

1667 return errors 

1668 

1669 @classmethod 

1670 def _check_column_name_clashes(cls): 

1671 # Store a list of column names which have already been used by other fields. 

1672 used_column_names = [] 

1673 errors = [] 

1674 

1675 for f in cls._meta.local_fields: 

1676 _, column_name = f.get_attname_column() 

1677 

1678 # Ensure the column name is not already in use. 

1679 if column_name and column_name in used_column_names: 1679 ↛ 1680line 1679 didn't jump to line 1680, because the condition on line 1679 was never true

1680 errors.append( 

1681 checks.Error( 

1682 "Field '%s' has column name '%s' that is used by " 

1683 "another field." % (f.name, column_name), 

1684 hint="Specify a 'db_column' for the field.", 

1685 obj=cls, 

1686 id="models.E007", 

1687 ) 

1688 ) 

1689 else: 

1690 used_column_names.append(column_name) 

1691 

1692 return errors 

1693 

1694 @classmethod 

1695 def _check_model_name_db_lookup_clashes(cls): 

1696 errors = [] 

1697 model_name = cls.__name__ 

1698 if model_name.startswith("_") or model_name.endswith("_"): 1698 ↛ 1699line 1698 didn't jump to line 1699, because the condition on line 1698 was never true

1699 errors.append( 

1700 checks.Error( 

1701 "The model name '%s' cannot start or end with an underscore " 

1702 "as it collides with the query lookup syntax." % model_name, 

1703 obj=cls, 

1704 id="models.E023", 

1705 ) 

1706 ) 

1707 elif LOOKUP_SEP in model_name: 1707 ↛ 1708line 1707 didn't jump to line 1708, because the condition on line 1707 was never true

1708 errors.append( 

1709 checks.Error( 

1710 "The model name '%s' cannot contain double underscores as " 

1711 "it collides with the query lookup syntax." % model_name, 

1712 obj=cls, 

1713 id="models.E024", 

1714 ) 

1715 ) 

1716 return errors 

1717 

1718 @classmethod 

1719 def _check_property_name_related_field_accessor_clashes(cls): 

1720 errors = [] 

1721 property_names = cls._meta._property_names 

1722 related_field_accessors = ( 

1723 f.get_attname() 

1724 for f in cls._meta._get_fields(reverse=False) 

1725 if f.is_relation and f.related_model is not None 

1726 ) 

1727 for accessor in related_field_accessors: 

1728 if accessor in property_names: 1728 ↛ 1729line 1728 didn't jump to line 1729, because the condition on line 1728 was never true

1729 errors.append( 

1730 checks.Error( 

1731 "The property '%s' clashes with a related field " 

1732 "accessor." % accessor, 

1733 obj=cls, 

1734 id="models.E025", 

1735 ) 

1736 ) 

1737 return errors 

1738 

1739 @classmethod 

1740 def _check_single_primary_key(cls): 

1741 errors = [] 

1742 if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1: 1742 ↛ 1743line 1742 didn't jump to line 1743, because the condition on line 1742 was never true

1743 errors.append( 

1744 checks.Error( 

1745 "The model cannot have more than one field with " 

1746 "'primary_key=True'.", 

1747 obj=cls, 

1748 id="models.E026", 

1749 ) 

1750 ) 

1751 return errors 

1752 

1753 @classmethod 

1754 def _check_index_together(cls): 

1755 """Check the value of "index_together" option.""" 

1756 if not isinstance(cls._meta.index_together, (tuple, list)): 1756 ↛ 1757line 1756 didn't jump to line 1757, because the condition on line 1756 was never true

1757 return [ 

1758 checks.Error( 

1759 "'index_together' must be a list or tuple.", 

1760 obj=cls, 

1761 id="models.E008", 

1762 ) 

1763 ] 

1764 

1765 elif any( 1765 ↛ 1768line 1765 didn't jump to line 1768, because the condition on line 1765 was never true

1766 not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together 

1767 ): 

1768 return [ 

1769 checks.Error( 

1770 "All 'index_together' elements must be lists or tuples.", 

1771 obj=cls, 

1772 id="models.E009", 

1773 ) 

1774 ] 

1775 

1776 else: 

1777 errors = [] 

1778 for fields in cls._meta.index_together: 

1779 errors.extend(cls._check_local_fields(fields, "index_together")) 

1780 return errors 

1781 

1782 @classmethod 

1783 def _check_unique_together(cls): 

1784 """Check the value of "unique_together" option.""" 

1785 if not isinstance(cls._meta.unique_together, (tuple, list)): 1785 ↛ 1786line 1785 didn't jump to line 1786, because the condition on line 1785 was never true

1786 return [ 

1787 checks.Error( 

1788 "'unique_together' must be a list or tuple.", 

1789 obj=cls, 

1790 id="models.E010", 

1791 ) 

1792 ] 

1793 

1794 elif any( 1794 ↛ 1798line 1794 didn't jump to line 1798, because the condition on line 1794 was never true

1795 not isinstance(fields, (tuple, list)) 

1796 for fields in cls._meta.unique_together 

1797 ): 

1798 return [ 

1799 checks.Error( 

1800 "All 'unique_together' elements must be lists or tuples.", 

1801 obj=cls, 

1802 id="models.E011", 

1803 ) 

1804 ] 

1805 

1806 else: 

1807 errors = [] 

1808 for fields in cls._meta.unique_together: 

1809 errors.extend(cls._check_local_fields(fields, "unique_together")) 

1810 return errors 

1811 

1812 @classmethod 

1813 def _check_indexes(cls, databases): 

1814 """Check fields, names, and conditions of indexes.""" 

1815 errors = [] 

1816 references = set() 

1817 for index in cls._meta.indexes: 

1818 # Index name can't start with an underscore or a number, restricted 

1819 # for cross-database compatibility with Oracle. 

1820 if index.name[0] == "_" or index.name[0].isdigit(): 1820 ↛ 1821line 1820 didn't jump to line 1821, because the condition on line 1820 was never true

1821 errors.append( 

1822 checks.Error( 

1823 "The index name '%s' cannot start with an underscore " 

1824 "or a number." % index.name, 

1825 obj=cls, 

1826 id="models.E033", 

1827 ), 

1828 ) 

1829 if len(index.name) > index.max_name_length: 1829 ↛ 1830line 1829 didn't jump to line 1830, because the condition on line 1829 was never true

1830 errors.append( 

1831 checks.Error( 

1832 "The index name '%s' cannot be longer than %d " 

1833 "characters." % (index.name, index.max_name_length), 

1834 obj=cls, 

1835 id="models.E034", 

1836 ), 

1837 ) 

1838 if index.contains_expressions: 1838 ↛ 1839line 1838 didn't jump to line 1839, because the condition on line 1838 was never true

1839 for expression in index.expressions: 

1840 references.update( 

1841 ref[0] for ref in cls._get_expr_references(expression) 

1842 ) 

1843 for db in databases: 

1844 if not router.allow_migrate_model(db, cls): 1844 ↛ 1845line 1844 didn't jump to line 1845, because the condition on line 1844 was never true

1845 continue 

1846 connection = connections[db] 

1847 if not ( 1847 ↛ exit,   1847 ↛ 18512 missed branches: 1) line 1847 didn't jump to the function exit, 2) line 1847 didn't jump to line 1851, because the condition on line 1847 was never true

1848 connection.features.supports_partial_indexes 

1849 or "supports_partial_indexes" in cls._meta.required_db_features 

1850 ) and any(index.condition is not None for index in cls._meta.indexes): 

1851 errors.append( 

1852 checks.Warning( 

1853 "%s does not support indexes with conditions." 

1854 % connection.display_name, 

1855 hint=( 

1856 "Conditions will be ignored. Silence this warning " 

1857 "if you don't care about it." 

1858 ), 

1859 obj=cls, 

1860 id="models.W037", 

1861 ) 

1862 ) 

1863 if not ( 1863 ↛ exit,   1863 ↛ 18672 missed branches: 1) line 1863 didn't jump to the function exit, 2) line 1863 didn't jump to line 1867, because the condition on line 1863 was never true

1864 connection.features.supports_covering_indexes 

1865 or "supports_covering_indexes" in cls._meta.required_db_features 

1866 ) and any(index.include for index in cls._meta.indexes): 

1867 errors.append( 

1868 checks.Warning( 

1869 "%s does not support indexes with non-key columns." 

1870 % connection.display_name, 

1871 hint=( 

1872 "Non-key columns will be ignored. Silence this " 

1873 "warning if you don't care about it." 

1874 ), 

1875 obj=cls, 

1876 id="models.W040", 

1877 ) 

1878 ) 

1879 if not ( 1879 ↛ exit,   1879 ↛ 18832 missed branches: 1) line 1879 didn't jump to the function exit, 2) line 1879 didn't jump to line 1883, because the condition on line 1879 was never true

1880 connection.features.supports_expression_indexes 

1881 or "supports_expression_indexes" in cls._meta.required_db_features 

1882 ) and any(index.contains_expressions for index in cls._meta.indexes): 

1883 errors.append( 

1884 checks.Warning( 

1885 "%s does not support indexes on expressions." 

1886 % connection.display_name, 

1887 hint=( 

1888 "An index won't be created. Silence this warning " 

1889 "if you don't care about it." 

1890 ), 

1891 obj=cls, 

1892 id="models.W043", 

1893 ) 

1894 ) 

1895 fields = [ 

1896 field for index in cls._meta.indexes for field, _ in index.fields_orders 

1897 ] 

1898 fields += [include for index in cls._meta.indexes for include in index.include] 

1899 fields += references 

1900 errors.extend(cls._check_local_fields(fields, "indexes")) 

1901 return errors 

1902 

1903 @classmethod 

1904 def _check_local_fields(cls, fields, option): 

1905 from django.db import models 

1906 

1907 # In order to avoid hitting the relation tree prematurely, we use our 

1908 # own fields_map instead of using get_field() 

1909 forward_fields_map = {} 

1910 for field in cls._meta._get_fields(reverse=False): 

1911 forward_fields_map[field.name] = field 

1912 if hasattr(field, "attname"): 

1913 forward_fields_map[field.attname] = field 

1914 

1915 errors = [] 

1916 for field_name in fields: 

1917 try: 

1918 field = forward_fields_map[field_name] 

1919 except KeyError: 

1920 errors.append( 

1921 checks.Error( 

1922 "'%s' refers to the nonexistent field '%s'." 

1923 % ( 

1924 option, 

1925 field_name, 

1926 ), 

1927 obj=cls, 

1928 id="models.E012", 

1929 ) 

1930 ) 

1931 else: 

1932 if isinstance(field.remote_field, models.ManyToManyRel): 1932 ↛ 1933line 1932 didn't jump to line 1933, because the condition on line 1932 was never true

1933 errors.append( 

1934 checks.Error( 

1935 "'%s' refers to a ManyToManyField '%s', but " 

1936 "ManyToManyFields are not permitted in '%s'." 

1937 % ( 

1938 option, 

1939 field_name, 

1940 option, 

1941 ), 

1942 obj=cls, 

1943 id="models.E013", 

1944 ) 

1945 ) 

1946 elif field not in cls._meta.local_fields: 1946 ↛ 1947line 1946 didn't jump to line 1947, because the condition on line 1946 was never true

1947 errors.append( 

1948 checks.Error( 

1949 "'%s' refers to field '%s' which is not local to model " 

1950 "'%s'." % (option, field_name, cls._meta.object_name), 

1951 hint="This issue may be caused by multi-table inheritance.", 

1952 obj=cls, 

1953 id="models.E016", 

1954 ) 

1955 ) 

1956 return errors 

1957 

1958 @classmethod 

1959 def _check_ordering(cls): 

1960 """ 

1961 Check "ordering" option -- is it a list of strings and do all fields 

1962 exist? 

1963 """ 

1964 if cls._meta._ordering_clash: 1964 ↛ 1965line 1964 didn't jump to line 1965, because the condition on line 1964 was never true

1965 return [ 

1966 checks.Error( 

1967 "'ordering' and 'order_with_respect_to' cannot be used together.", 

1968 obj=cls, 

1969 id="models.E021", 

1970 ), 

1971 ] 

1972 

1973 if cls._meta.order_with_respect_to or not cls._meta.ordering: 

1974 return [] 

1975 

1976 if not isinstance(cls._meta.ordering, (list, tuple)): 1976 ↛ 1977line 1976 didn't jump to line 1977, because the condition on line 1976 was never true

1977 return [ 

1978 checks.Error( 

1979 "'ordering' must be a tuple or list (even if you want to order by " 

1980 "only one field).", 

1981 obj=cls, 

1982 id="models.E014", 

1983 ) 

1984 ] 

1985 

1986 errors = [] 

1987 fields = cls._meta.ordering 

1988 

1989 # Skip expressions and '?' fields. 

1990 fields = (f for f in fields if isinstance(f, str) and f != "?") 

1991 

1992 # Convert "-field" to "field". 

1993 fields = ((f[1:] if f.startswith("-") else f) for f in fields) 

1994 

1995 # Separate related fields and non-related fields. 

1996 _fields = [] 

1997 related_fields = [] 

1998 for f in fields: 

1999 if LOOKUP_SEP in f: 

2000 related_fields.append(f) 

2001 else: 

2002 _fields.append(f) 

2003 fields = _fields 

2004 

2005 # Check related fields. 

2006 for field in related_fields: 

2007 _cls = cls 

2008 fld = None 

2009 for part in field.split(LOOKUP_SEP): 

2010 try: 

2011 # pk is an alias that won't be found by opts.get_field. 

2012 if part == "pk": 2012 ↛ 2013line 2012 didn't jump to line 2013, because the condition on line 2012 was never true

2013 fld = _cls._meta.pk 

2014 else: 

2015 fld = _cls._meta.get_field(part) 

2016 if fld.is_relation: 

2017 _cls = fld.get_path_info()[-1].to_opts.model 

2018 else: 

2019 _cls = None 

2020 except (FieldDoesNotExist, AttributeError): 

2021 if fld is None or ( 

2022 fld.get_transform(part) is None and fld.get_lookup(part) is None 

2023 ): 

2024 errors.append( 

2025 checks.Error( 

2026 "'ordering' refers to the nonexistent field, " 

2027 "related field, or lookup '%s'." % field, 

2028 obj=cls, 

2029 id="models.E015", 

2030 ) 

2031 ) 

2032 

2033 # Skip ordering on pk. This is always a valid order_by field 

2034 # but is an alias and therefore won't be found by opts.get_field. 

2035 fields = {f for f in fields if f != "pk"} 

2036 

2037 # Check for invalid or nonexistent fields in ordering. 

2038 invalid_fields = [] 

2039 

2040 # Any field name that is not present in field_names does not exist. 

2041 # Also, ordering by m2m fields is not allowed. 

2042 opts = cls._meta 

2043 valid_fields = set( 

2044 chain.from_iterable( 

2045 (f.name, f.attname) 

2046 if not (f.auto_created and not f.concrete) 

2047 else (f.field.related_query_name(),) 

2048 for f in chain(opts.fields, opts.related_objects) 

2049 ) 

2050 ) 

2051 

2052 invalid_fields.extend(fields - valid_fields) 

2053 

2054 for invalid_field in invalid_fields: 2054 ↛ 2055line 2054 didn't jump to line 2055, because the loop on line 2054 never started

2055 errors.append( 

2056 checks.Error( 

2057 "'ordering' refers to the nonexistent field, related " 

2058 "field, or lookup '%s'." % invalid_field, 

2059 obj=cls, 

2060 id="models.E015", 

2061 ) 

2062 ) 

2063 return errors 

2064 

2065 @classmethod 

2066 def _check_long_column_names(cls, databases): 

2067 """ 

2068 Check that any auto-generated column names are shorter than the limits 

2069 for each database in which the model will be created. 

2070 """ 

2071 if not databases: 2071 ↛ 2072line 2071 didn't jump to line 2072, because the condition on line 2071 was never true

2072 return [] 

2073 errors = [] 

2074 allowed_len = None 

2075 db_alias = None 

2076 

2077 # Find the minimum max allowed length among all specified db_aliases. 

2078 for db in databases: 

2079 # skip databases where the model won't be created 

2080 if not router.allow_migrate_model(db, cls): 2080 ↛ 2081line 2080 didn't jump to line 2081, because the condition on line 2080 was never true

2081 continue 

2082 connection = connections[db] 

2083 max_name_length = connection.ops.max_name_length() 

2084 if max_name_length is None or connection.features.truncates_names: 2084 ↛ 2085line 2084 didn't jump to line 2085, because the condition on line 2084 was never true

2085 continue 

2086 else: 

2087 if allowed_len is None: 2087 ↛ 2090line 2087 didn't jump to line 2090, because the condition on line 2087 was never false

2088 allowed_len = max_name_length 

2089 db_alias = db 

2090 elif max_name_length < allowed_len: 

2091 allowed_len = max_name_length 

2092 db_alias = db 

2093 

2094 if allowed_len is None: 2094 ↛ 2095line 2094 didn't jump to line 2095, because the condition on line 2094 was never true

2095 return errors 

2096 

2097 for f in cls._meta.local_fields: 

2098 _, column_name = f.get_attname_column() 

2099 

2100 # Check if auto-generated name for the field is too long 

2101 # for the database. 

2102 if ( 2102 ↛ 2107line 2102 didn't jump to line 2107

2103 f.db_column is None 

2104 and column_name is not None 

2105 and len(column_name) > allowed_len 

2106 ): 

2107 errors.append( 

2108 checks.Error( 

2109 'Autogenerated column name too long for field "%s". ' 

2110 'Maximum length is "%s" for database "%s".' 

2111 % (column_name, allowed_len, db_alias), 

2112 hint="Set the column name manually using 'db_column'.", 

2113 obj=cls, 

2114 id="models.E018", 

2115 ) 

2116 ) 

2117 

2118 for f in cls._meta.local_many_to_many: 

2119 # Skip nonexistent models. 

2120 if isinstance(f.remote_field.through, str): 2120 ↛ 2121line 2120 didn't jump to line 2121, because the condition on line 2120 was never true

2121 continue 

2122 

2123 # Check if auto-generated name for the M2M field is too long 

2124 # for the database. 

2125 for m2m in f.remote_field.through._meta.local_fields: 

2126 _, rel_name = m2m.get_attname_column() 

2127 if ( 2127 ↛ 2132line 2127 didn't jump to line 2132

2128 m2m.db_column is None 

2129 and rel_name is not None 

2130 and len(rel_name) > allowed_len 

2131 ): 

2132 errors.append( 

2133 checks.Error( 

2134 "Autogenerated column name too long for M2M field " 

2135 '"%s". Maximum length is "%s" for database "%s".' 

2136 % (rel_name, allowed_len, db_alias), 

2137 hint=( 

2138 "Use 'through' to create a separate model for " 

2139 "M2M and then set column_name using 'db_column'." 

2140 ), 

2141 obj=cls, 

2142 id="models.E019", 

2143 ) 

2144 ) 

2145 

2146 return errors 

2147 

2148 @classmethod 

2149 def _get_expr_references(cls, expr): 

2150 if isinstance(expr, Q): 

2151 for child in expr.children: 

2152 if isinstance(child, tuple): 

2153 lookup, value = child 

2154 yield tuple(lookup.split(LOOKUP_SEP)) 

2155 yield from cls._get_expr_references(value) 

2156 else: 

2157 yield from cls._get_expr_references(child) 

2158 elif isinstance(expr, F): 

2159 yield tuple(expr.name.split(LOOKUP_SEP)) 

2160 elif hasattr(expr, "get_source_expressions"): 

2161 for src_expr in expr.get_source_expressions(): 

2162 yield from cls._get_expr_references(src_expr) 

2163 

2164 @classmethod 

2165 def _check_constraints(cls, databases): 

2166 errors = [] 

2167 for db in databases: 

2168 if not router.allow_migrate_model(db, cls): 2168 ↛ 2169line 2168 didn't jump to line 2169, because the condition on line 2168 was never true

2169 continue 

2170 connection = connections[db] 

2171 if not ( 2171 ↛ exit,   2171 ↛ 21782 missed branches: 1) line 2171 didn't jump to the function exit, 2) line 2171 didn't jump to line 2178, because the condition on line 2171 was never true

2172 connection.features.supports_table_check_constraints 

2173 or "supports_table_check_constraints" in cls._meta.required_db_features 

2174 ) and any( 

2175 isinstance(constraint, CheckConstraint) 

2176 for constraint in cls._meta.constraints 

2177 ): 

2178 errors.append( 

2179 checks.Warning( 

2180 "%s does not support check constraints." 

2181 % connection.display_name, 

2182 hint=( 

2183 "A constraint won't be created. Silence this " 

2184 "warning if you don't care about it." 

2185 ), 

2186 obj=cls, 

2187 id="models.W027", 

2188 ) 

2189 ) 

2190 if not ( 2190 ↛ exit,   2190 ↛ 21982 missed branches: 1) line 2190 didn't jump to the function exit, 2) line 2190 didn't jump to line 2198, because the condition on line 2190 was never true

2191 connection.features.supports_partial_indexes 

2192 or "supports_partial_indexes" in cls._meta.required_db_features 

2193 ) and any( 

2194 isinstance(constraint, UniqueConstraint) 

2195 and constraint.condition is not None 

2196 for constraint in cls._meta.constraints 

2197 ): 

2198 errors.append( 

2199 checks.Warning( 

2200 "%s does not support unique constraints with " 

2201 "conditions." % connection.display_name, 

2202 hint=( 

2203 "A constraint won't be created. Silence this " 

2204 "warning if you don't care about it." 

2205 ), 

2206 obj=cls, 

2207 id="models.W036", 

2208 ) 

2209 ) 

2210 if not ( 2210 ↛ exit,   2210 ↛ 22192 missed branches: 1) line 2210 didn't jump to the function exit, 2) line 2210 didn't jump to line 2219, because the condition on line 2210 was never true

2211 connection.features.supports_deferrable_unique_constraints 

2212 or "supports_deferrable_unique_constraints" 

2213 in cls._meta.required_db_features 

2214 ) and any( 

2215 isinstance(constraint, UniqueConstraint) 

2216 and constraint.deferrable is not None 

2217 for constraint in cls._meta.constraints 

2218 ): 

2219 errors.append( 

2220 checks.Warning( 

2221 "%s does not support deferrable unique constraints." 

2222 % connection.display_name, 

2223 hint=( 

2224 "A constraint won't be created. Silence this " 

2225 "warning if you don't care about it." 

2226 ), 

2227 obj=cls, 

2228 id="models.W038", 

2229 ) 

2230 ) 

2231 if not ( 2231 ↛ exit,   2231 ↛ 22382 missed branches: 1) line 2231 didn't jump to the function exit, 2) line 2231 didn't jump to line 2238, because the condition on line 2231 was never true

2232 connection.features.supports_covering_indexes 

2233 or "supports_covering_indexes" in cls._meta.required_db_features 

2234 ) and any( 

2235 isinstance(constraint, UniqueConstraint) and constraint.include 

2236 for constraint in cls._meta.constraints 

2237 ): 

2238 errors.append( 

2239 checks.Warning( 

2240 "%s does not support unique constraints with non-key " 

2241 "columns." % connection.display_name, 

2242 hint=( 

2243 "A constraint won't be created. Silence this " 

2244 "warning if you don't care about it." 

2245 ), 

2246 obj=cls, 

2247 id="models.W039", 

2248 ) 

2249 ) 

2250 if not ( 2250 ↛ exit,   2250 ↛ 22582 missed branches: 1) line 2250 didn't jump to the function exit, 2) line 2250 didn't jump to line 2258, because the condition on line 2250 was never true

2251 connection.features.supports_expression_indexes 

2252 or "supports_expression_indexes" in cls._meta.required_db_features 

2253 ) and any( 

2254 isinstance(constraint, UniqueConstraint) 

2255 and constraint.contains_expressions 

2256 for constraint in cls._meta.constraints 

2257 ): 

2258 errors.append( 

2259 checks.Warning( 

2260 "%s does not support unique constraints on " 

2261 "expressions." % connection.display_name, 

2262 hint=( 

2263 "A constraint won't be created. Silence this " 

2264 "warning if you don't care about it." 

2265 ), 

2266 obj=cls, 

2267 id="models.W044", 

2268 ) 

2269 ) 

2270 fields = set( 

2271 chain.from_iterable( 

2272 (*constraint.fields, *constraint.include) 

2273 for constraint in cls._meta.constraints 

2274 if isinstance(constraint, UniqueConstraint) 

2275 ) 

2276 ) 

2277 references = set() 

2278 for constraint in cls._meta.constraints: 2278 ↛ 2279line 2278 didn't jump to line 2279, because the loop on line 2278 never started

2279 if isinstance(constraint, UniqueConstraint): 

2280 if ( 

2281 connection.features.supports_partial_indexes 

2282 or "supports_partial_indexes" 

2283 not in cls._meta.required_db_features 

2284 ) and isinstance(constraint.condition, Q): 

2285 references.update( 

2286 cls._get_expr_references(constraint.condition) 

2287 ) 

2288 if ( 

2289 connection.features.supports_expression_indexes 

2290 or "supports_expression_indexes" 

2291 not in cls._meta.required_db_features 

2292 ) and constraint.contains_expressions: 

2293 for expression in constraint.expressions: 

2294 references.update(cls._get_expr_references(expression)) 

2295 elif isinstance(constraint, CheckConstraint): 

2296 if ( 

2297 connection.features.supports_table_check_constraints 

2298 or "supports_table_check_constraints" 

2299 not in cls._meta.required_db_features 

2300 ) and isinstance(constraint.check, Q): 

2301 references.update(cls._get_expr_references(constraint.check)) 

2302 for field_name, *lookups in references: 2302 ↛ 2304line 2302 didn't jump to line 2304, because the loop on line 2302 never started

2303 # pk is an alias that won't be found by opts.get_field. 

2304 if field_name != "pk": 

2305 fields.add(field_name) 

2306 if not lookups: 

2307 # If it has no lookups it cannot result in a JOIN. 

2308 continue 

2309 try: 

2310 if field_name == "pk": 

2311 field = cls._meta.pk 

2312 else: 

2313 field = cls._meta.get_field(field_name) 

2314 if not field.is_relation or field.many_to_many or field.one_to_many: 

2315 continue 

2316 except FieldDoesNotExist: 

2317 continue 

2318 # JOIN must happen at the first lookup. 

2319 first_lookup = lookups[0] 

2320 if ( 

2321 hasattr(field, "get_transform") 

2322 and hasattr(field, "get_lookup") 

2323 and field.get_transform(first_lookup) is None 

2324 and field.get_lookup(first_lookup) is None 

2325 ): 

2326 errors.append( 

2327 checks.Error( 

2328 "'constraints' refers to the joined field '%s'." 

2329 % LOOKUP_SEP.join([field_name] + lookups), 

2330 obj=cls, 

2331 id="models.E041", 

2332 ) 

2333 ) 

2334 errors.extend(cls._check_local_fields(fields, "constraints")) 

2335 return errors 

2336 

2337 

2338############################################ 

2339# HELPER FUNCTIONS (CURRIED MODEL METHODS) # 

2340############################################ 

2341 

2342# ORDERING METHODS ######################### 

2343 

2344 

2345def method_set_order(self, ordered_obj, id_list, using=None): 

2346 if using is None: 

2347 using = DEFAULT_DB_ALIAS 

2348 order_wrt = ordered_obj._meta.order_with_respect_to 

2349 filter_args = order_wrt.get_forward_related_filter(self) 

2350 ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update( 

2351 [ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)], 

2352 ["_order"], 

2353 ) 

2354 

2355 

2356def method_get_order(self, ordered_obj): 

2357 order_wrt = ordered_obj._meta.order_with_respect_to 

2358 filter_args = order_wrt.get_forward_related_filter(self) 

2359 pk_name = ordered_obj._meta.pk.name 

2360 return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) 

2361 

2362 

2363def make_foreign_order_accessors(model, related_model): 

2364 setattr( 

2365 related_model, 

2366 "get_%s_order" % model.__name__.lower(), 

2367 partialmethod(method_get_order, model), 

2368 ) 

2369 setattr( 

2370 related_model, 

2371 "set_%s_order" % model.__name__.lower(), 

2372 partialmethod(method_set_order, model), 

2373 ) 

2374 

2375 

2376######## 

2377# MISC # 

2378######## 

2379 

2380 

2381def model_unpickle(model_id): 

2382 """Used to unpickle Model subclasses with deferred fields.""" 

2383 if isinstance(model_id, tuple): 2383 ↛ 2387line 2383 didn't jump to line 2387, because the condition on line 2383 was never false

2384 model = apps.get_model(*model_id) 

2385 else: 

2386 # Backwards compat - the model was cached directly in earlier versions. 

2387 model = model_id 

2388 return model.__new__(model) 

2389 

2390 

2391model_unpickle.__safe_for_unpickle__ = True