Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/numpy/core/_internal.py: 19%

447 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2023-07-17 14:22 -0600

1""" 

2A place for internal code 

3 

4Some things are more easily handled Python. 

5 

6""" 

7import ast 

8import re 

9import sys 

10import platform 

11import warnings 

12 

13from .multiarray import dtype, array, ndarray, promote_types 

14try: 

15 import ctypes 

16except ImportError: 

17 ctypes = None 

18 

19IS_PYPY = platform.python_implementation() == 'PyPy' 

20 

21if sys.byteorder == 'little': 21 ↛ 24line 21 didn't jump to line 24, because the condition on line 21 was never false

22 _nbo = '<' 

23else: 

24 _nbo = '>' 

25 

26def _makenames_list(adict, align): 

27 allfields = [] 

28 

29 for fname, obj in adict.items(): 

30 n = len(obj) 

31 if not isinstance(obj, tuple) or n not in (2, 3): 

32 raise ValueError("entry not a 2- or 3- tuple") 

33 if n > 2 and obj[2] == fname: 

34 continue 

35 num = int(obj[1]) 

36 if num < 0: 

37 raise ValueError("invalid offset.") 

38 format = dtype(obj[0], align=align) 

39 if n > 2: 

40 title = obj[2] 

41 else: 

42 title = None 

43 allfields.append((fname, format, num, title)) 

44 # sort by offsets 

45 allfields.sort(key=lambda x: x[2]) 

46 names = [x[0] for x in allfields] 

47 formats = [x[1] for x in allfields] 

48 offsets = [x[2] for x in allfields] 

49 titles = [x[3] for x in allfields] 

50 

51 return names, formats, offsets, titles 

52 

53# Called in PyArray_DescrConverter function when 

54# a dictionary without "names" and "formats" 

55# fields is used as a data-type descriptor. 

56def _usefields(adict, align): 

57 try: 

58 names = adict[-1] 

59 except KeyError: 

60 names = None 

61 if names is None: 

62 names, formats, offsets, titles = _makenames_list(adict, align) 

63 else: 

64 formats = [] 

65 offsets = [] 

66 titles = [] 

67 for name in names: 

68 res = adict[name] 

69 formats.append(res[0]) 

70 offsets.append(res[1]) 

71 if len(res) > 2: 

72 titles.append(res[2]) 

73 else: 

74 titles.append(None) 

75 

76 return dtype({"names": names, 

77 "formats": formats, 

78 "offsets": offsets, 

79 "titles": titles}, align) 

80 

81 

82# construct an array_protocol descriptor list 

83# from the fields attribute of a descriptor 

84# This calls itself recursively but should eventually hit 

85# a descriptor that has no fields and then return 

86# a simple typestring 

87 

88def _array_descr(descriptor): 

89 fields = descriptor.fields 

90 if fields is None: 

91 subdtype = descriptor.subdtype 

92 if subdtype is None: 

93 if descriptor.metadata is None: 

94 return descriptor.str 

95 else: 

96 new = descriptor.metadata.copy() 

97 if new: 

98 return (descriptor.str, new) 

99 else: 

100 return descriptor.str 

101 else: 

102 return (_array_descr(subdtype[0]), subdtype[1]) 

103 

104 names = descriptor.names 

105 ordered_fields = [fields[x] + (x,) for x in names] 

106 result = [] 

107 offset = 0 

108 for field in ordered_fields: 

109 if field[1] > offset: 

110 num = field[1] - offset 

111 result.append(('', f'|V{num}')) 

112 offset += num 

113 elif field[1] < offset: 

114 raise ValueError( 

115 "dtype.descr is not defined for types with overlapping or " 

116 "out-of-order fields") 

117 if len(field) > 3: 

118 name = (field[2], field[3]) 

119 else: 

120 name = field[2] 

121 if field[0].subdtype: 

122 tup = (name, _array_descr(field[0].subdtype[0]), 

123 field[0].subdtype[1]) 

124 else: 

125 tup = (name, _array_descr(field[0])) 

126 offset += field[0].itemsize 

127 result.append(tup) 

128 

129 if descriptor.itemsize > offset: 

130 num = descriptor.itemsize - offset 

131 result.append(('', f'|V{num}')) 

132 

133 return result 

134 

135# Build a new array from the information in a pickle. 

136# Note that the name numpy.core._internal._reconstruct is embedded in 

137# pickles of ndarrays made with NumPy before release 1.0 

138# so don't remove the name here, or you'll 

139# break backward compatibility. 

140def _reconstruct(subtype, shape, dtype): 

141 return ndarray.__new__(subtype, shape, dtype) 

142 

143 

144# format_re was originally from numarray by J. Todd Miller 

145 

146format_re = re.compile(r'(?P<order1>[<>|=]?)' 

147 r'(?P<repeats> *[(]?[ ,0-9]*[)]? *)' 

148 r'(?P<order2>[<>|=]?)' 

149 r'(?P<dtype>[A-Za-z0-9.?]*(?:\[[a-zA-Z0-9,.]+\])?)') 

150sep_re = re.compile(r'\s*,\s*') 

151space_re = re.compile(r'\s+$') 

152 

153# astr is a string (perhaps comma separated) 

154 

155_convorder = {'=': _nbo} 

156 

157def _commastring(astr): 

158 startindex = 0 

159 result = [] 

160 while startindex < len(astr): 

161 mo = format_re.match(astr, pos=startindex) 

162 try: 

163 (order1, repeats, order2, dtype) = mo.groups() 

164 except (TypeError, AttributeError): 

165 raise ValueError( 

166 f'format number {len(result)+1} of "{astr}" is not recognized' 

167 ) from None 

168 startindex = mo.end() 

169 # Separator or ending padding 

170 if startindex < len(astr): 

171 if space_re.match(astr, pos=startindex): 

172 startindex = len(astr) 

173 else: 

174 mo = sep_re.match(astr, pos=startindex) 

175 if not mo: 

176 raise ValueError( 

177 'format number %d of "%s" is not recognized' % 

178 (len(result)+1, astr)) 

179 startindex = mo.end() 

180 

181 if order2 == '': 

182 order = order1 

183 elif order1 == '': 

184 order = order2 

185 else: 

186 order1 = _convorder.get(order1, order1) 

187 order2 = _convorder.get(order2, order2) 

188 if (order1 != order2): 

189 raise ValueError( 

190 'inconsistent byte-order specification %s and %s' % 

191 (order1, order2)) 

192 order = order1 

193 

194 if order in ('|', '=', _nbo): 

195 order = '' 

196 dtype = order + dtype 

197 if (repeats == ''): 

198 newitem = dtype 

199 else: 

200 newitem = (dtype, ast.literal_eval(repeats)) 

201 result.append(newitem) 

202 

203 return result 

204 

205class dummy_ctype: 

206 def __init__(self, cls): 

207 self._cls = cls 

208 def __mul__(self, other): 

209 return self 

210 def __call__(self, *other): 

211 return self._cls(other) 

212 def __eq__(self, other): 

213 return self._cls == other._cls 

214 def __ne__(self, other): 

215 return self._cls != other._cls 

216 

217def _getintp_ctype(): 

218 val = _getintp_ctype.cache 

219 if val is not None: 219 ↛ 220line 219 didn't jump to line 220, because the condition on line 219 was never true

220 return val 

221 if ctypes is None: 221 ↛ 222line 221 didn't jump to line 222, because the condition on line 221 was never true

222 import numpy as np 

223 val = dummy_ctype(np.intp) 

224 else: 

225 char = dtype('p').char 

226 if char == 'i': 226 ↛ 227line 226 didn't jump to line 227, because the condition on line 226 was never true

227 val = ctypes.c_int 

228 elif char == 'l': 228 ↛ 230line 228 didn't jump to line 230, because the condition on line 228 was never false

229 val = ctypes.c_long 

230 elif char == 'q': 

231 val = ctypes.c_longlong 

232 else: 

233 val = ctypes.c_long 

234 _getintp_ctype.cache = val 

235 return val 

236_getintp_ctype.cache = None 

237 

238# Used for .ctypes attribute of ndarray 

239 

240class _missing_ctypes: 

241 def cast(self, num, obj): 

242 return num.value 

243 

244 class c_void_p: 

245 def __init__(self, ptr): 

246 self.value = ptr 

247 

248 

249class _ctypes: 

250 def __init__(self, array, ptr=None): 

251 self._arr = array 

252 

253 if ctypes: 

254 self._ctypes = ctypes 

255 self._data = self._ctypes.c_void_p(ptr) 

256 else: 

257 # fake a pointer-like object that holds onto the reference 

258 self._ctypes = _missing_ctypes() 

259 self._data = self._ctypes.c_void_p(ptr) 

260 self._data._objects = array 

261 

262 if self._arr.ndim == 0: 

263 self._zerod = True 

264 else: 

265 self._zerod = False 

266 

267 def data_as(self, obj): 

268 """ 

269 Return the data pointer cast to a particular c-types object. 

270 For example, calling ``self._as_parameter_`` is equivalent to 

271 ``self.data_as(ctypes.c_void_p)``. Perhaps you want to use the data as a 

272 pointer to a ctypes array of floating-point data: 

273 ``self.data_as(ctypes.POINTER(ctypes.c_double))``. 

274 

275 The returned pointer will keep a reference to the array. 

276 """ 

277 # _ctypes.cast function causes a circular reference of self._data in 

278 # self._data._objects. Attributes of self._data cannot be released 

279 # until gc.collect is called. Make a copy of the pointer first then let 

280 # it hold the array reference. This is a workaround to circumvent the 

281 # CPython bug https://bugs.python.org/issue12836 

282 ptr = self._ctypes.cast(self._data, obj) 

283 ptr._arr = self._arr 

284 return ptr 

285 

286 def shape_as(self, obj): 

287 """ 

288 Return the shape tuple as an array of some other c-types 

289 type. For example: ``self.shape_as(ctypes.c_short)``. 

290 """ 

291 if self._zerod: 

292 return None 

293 return (obj*self._arr.ndim)(*self._arr.shape) 

294 

295 def strides_as(self, obj): 

296 """ 

297 Return the strides tuple as an array of some other 

298 c-types type. For example: ``self.strides_as(ctypes.c_longlong)``. 

299 """ 

300 if self._zerod: 

301 return None 

302 return (obj*self._arr.ndim)(*self._arr.strides) 

303 

304 @property 

305 def data(self): 

306 """ 

307 A pointer to the memory area of the array as a Python integer. 

308 This memory area may contain data that is not aligned, or not in correct 

309 byte-order. The memory area may not even be writeable. The array 

310 flags and data-type of this array should be respected when passing this 

311 attribute to arbitrary C-code to avoid trouble that can include Python 

312 crashing. User Beware! The value of this attribute is exactly the same 

313 as ``self._array_interface_['data'][0]``. 

314 

315 Note that unlike ``data_as``, a reference will not be kept to the array: 

316 code like ``ctypes.c_void_p((a + b).ctypes.data)`` will result in a 

317 pointer to a deallocated array, and should be spelt 

318 ``(a + b).ctypes.data_as(ctypes.c_void_p)`` 

319 """ 

320 return self._data.value 

321 

322 @property 

323 def shape(self): 

324 """ 

325 (c_intp*self.ndim): A ctypes array of length self.ndim where 

326 the basetype is the C-integer corresponding to ``dtype('p')`` on this 

327 platform (see `~numpy.ctypeslib.c_intp`). This base-type could be 

328 `ctypes.c_int`, `ctypes.c_long`, or `ctypes.c_longlong` depending on 

329 the platform. The ctypes array contains the shape of 

330 the underlying array. 

331 """ 

332 return self.shape_as(_getintp_ctype()) 

333 

334 @property 

335 def strides(self): 

336 """ 

337 (c_intp*self.ndim): A ctypes array of length self.ndim where 

338 the basetype is the same as for the shape attribute. This ctypes array 

339 contains the strides information from the underlying array. This strides 

340 information is important for showing how many bytes must be jumped to 

341 get to the next element in the array. 

342 """ 

343 return self.strides_as(_getintp_ctype()) 

344 

345 @property 

346 def _as_parameter_(self): 

347 """ 

348 Overrides the ctypes semi-magic method 

349 

350 Enables `c_func(some_array.ctypes)` 

351 """ 

352 return self.data_as(ctypes.c_void_p) 

353 

354 # Numpy 1.21.0, 2021-05-18 

355 

356 def get_data(self): 

357 """Deprecated getter for the `_ctypes.data` property. 

358 

359 .. deprecated:: 1.21 

360 """ 

361 warnings.warn('"get_data" is deprecated. Use "data" instead', 

362 DeprecationWarning, stacklevel=2) 

363 return self.data 

364 

365 def get_shape(self): 

366 """Deprecated getter for the `_ctypes.shape` property. 

367 

368 .. deprecated:: 1.21 

369 """ 

370 warnings.warn('"get_shape" is deprecated. Use "shape" instead', 

371 DeprecationWarning, stacklevel=2) 

372 return self.shape 

373 

374 def get_strides(self): 

375 """Deprecated getter for the `_ctypes.strides` property. 

376 

377 .. deprecated:: 1.21 

378 """ 

379 warnings.warn('"get_strides" is deprecated. Use "strides" instead', 

380 DeprecationWarning, stacklevel=2) 

381 return self.strides 

382 

383 def get_as_parameter(self): 

384 """Deprecated getter for the `_ctypes._as_parameter_` property. 

385 

386 .. deprecated:: 1.21 

387 """ 

388 warnings.warn( 

389 '"get_as_parameter" is deprecated. Use "_as_parameter_" instead', 

390 DeprecationWarning, stacklevel=2, 

391 ) 

392 return self._as_parameter_ 

393 

394 

395def _newnames(datatype, order): 

396 """ 

397 Given a datatype and an order object, return a new names tuple, with the 

398 order indicated 

399 """ 

400 oldnames = datatype.names 

401 nameslist = list(oldnames) 

402 if isinstance(order, str): 

403 order = [order] 

404 seen = set() 

405 if isinstance(order, (list, tuple)): 

406 for name in order: 

407 try: 

408 nameslist.remove(name) 

409 except ValueError: 

410 if name in seen: 

411 raise ValueError(f"duplicate field name: {name}") from None 

412 else: 

413 raise ValueError(f"unknown field name: {name}") from None 

414 seen.add(name) 

415 return tuple(list(order) + nameslist) 

416 raise ValueError(f"unsupported order value: {order}") 

417 

418def _copy_fields(ary): 

419 """Return copy of structured array with padding between fields removed. 

420 

421 Parameters 

422 ---------- 

423 ary : ndarray 

424 Structured array from which to remove padding bytes 

425 

426 Returns 

427 ------- 

428 ary_copy : ndarray 

429 Copy of ary with padding bytes removed 

430 """ 

431 dt = ary.dtype 

432 copy_dtype = {'names': dt.names, 

433 'formats': [dt.fields[name][0] for name in dt.names]} 

434 return array(ary, dtype=copy_dtype, copy=True) 

435 

436def _promote_fields(dt1, dt2): 

437 """ Perform type promotion for two structured dtypes. 

438 

439 Parameters 

440 ---------- 

441 dt1 : structured dtype 

442 First dtype. 

443 dt2 : structured dtype 

444 Second dtype. 

445 

446 Returns 

447 ------- 

448 out : dtype 

449 The promoted dtype 

450 

451 Notes 

452 ----- 

453 If one of the inputs is aligned, the result will be. The titles of 

454 both descriptors must match (point to the same field). 

455 """ 

456 # Both must be structured and have the same names in the same order 

457 if (dt1.names is None or dt2.names is None) or dt1.names != dt2.names: 

458 raise TypeError("invalid type promotion") 

459 

460 # if both are identical, we can (maybe!) just return the same dtype. 

461 identical = dt1 is dt2 

462 new_fields = [] 

463 for name in dt1.names: 

464 field1 = dt1.fields[name] 

465 field2 = dt2.fields[name] 

466 new_descr = promote_types(field1[0], field2[0]) 

467 identical = identical and new_descr is field1[0] 

468 

469 # Check that the titles match (if given): 

470 if field1[2:] != field2[2:]: 

471 raise TypeError("invalid type promotion") 

472 if len(field1) == 2: 

473 new_fields.append((name, new_descr)) 

474 else: 

475 new_fields.append(((field1[2], name), new_descr)) 

476 

477 res = dtype(new_fields, align=dt1.isalignedstruct or dt2.isalignedstruct) 

478 

479 # Might as well preserve identity (and metadata) if the dtype is identical 

480 # and the itemsize, offsets are also unmodified. This could probably be 

481 # sped up, but also probably just be removed entirely. 

482 if identical and res.itemsize == dt1.itemsize: 

483 for name in dt1.names: 

484 if dt1.fields[name][1] != res.fields[name][1]: 

485 return res # the dtype changed. 

486 return dt1 

487 

488 return res 

489 

490 

491def _getfield_is_safe(oldtype, newtype, offset): 

492 """ Checks safety of getfield for object arrays. 

493 

494 As in _view_is_safe, we need to check that memory containing objects is not 

495 reinterpreted as a non-object datatype and vice versa. 

496 

497 Parameters 

498 ---------- 

499 oldtype : data-type 

500 Data type of the original ndarray. 

501 newtype : data-type 

502 Data type of the field being accessed by ndarray.getfield 

503 offset : int 

504 Offset of the field being accessed by ndarray.getfield 

505 

506 Raises 

507 ------ 

508 TypeError 

509 If the field access is invalid 

510 

511 """ 

512 if newtype.hasobject or oldtype.hasobject: 

513 if offset == 0 and newtype == oldtype: 

514 return 

515 if oldtype.names is not None: 

516 for name in oldtype.names: 

517 if (oldtype.fields[name][1] == offset and 

518 oldtype.fields[name][0] == newtype): 

519 return 

520 raise TypeError("Cannot get/set field of an object array") 

521 return 

522 

523def _view_is_safe(oldtype, newtype): 

524 """ Checks safety of a view involving object arrays, for example when 

525 doing:: 

526 

527 np.zeros(10, dtype=oldtype).view(newtype) 

528 

529 Parameters 

530 ---------- 

531 oldtype : data-type 

532 Data type of original ndarray 

533 newtype : data-type 

534 Data type of the view 

535 

536 Raises 

537 ------ 

538 TypeError 

539 If the new type is incompatible with the old type. 

540 

541 """ 

542 

543 # if the types are equivalent, there is no problem. 

544 # for example: dtype((np.record, 'i4,i4')) == dtype((np.void, 'i4,i4')) 

545 if oldtype == newtype: 

546 return 

547 

548 if newtype.hasobject or oldtype.hasobject: 

549 raise TypeError("Cannot change data-type for object array.") 

550 return 

551 

552# Given a string containing a PEP 3118 format specifier, 

553# construct a NumPy dtype 

554 

555_pep3118_native_map = { 

556 '?': '?', 

557 'c': 'S1', 

558 'b': 'b', 

559 'B': 'B', 

560 'h': 'h', 

561 'H': 'H', 

562 'i': 'i', 

563 'I': 'I', 

564 'l': 'l', 

565 'L': 'L', 

566 'q': 'q', 

567 'Q': 'Q', 

568 'e': 'e', 

569 'f': 'f', 

570 'd': 'd', 

571 'g': 'g', 

572 'Zf': 'F', 

573 'Zd': 'D', 

574 'Zg': 'G', 

575 's': 'S', 

576 'w': 'U', 

577 'O': 'O', 

578 'x': 'V', # padding 

579} 

580_pep3118_native_typechars = ''.join(_pep3118_native_map.keys()) 

581 

582_pep3118_standard_map = { 

583 '?': '?', 

584 'c': 'S1', 

585 'b': 'b', 

586 'B': 'B', 

587 'h': 'i2', 

588 'H': 'u2', 

589 'i': 'i4', 

590 'I': 'u4', 

591 'l': 'i4', 

592 'L': 'u4', 

593 'q': 'i8', 

594 'Q': 'u8', 

595 'e': 'f2', 

596 'f': 'f', 

597 'd': 'd', 

598 'Zf': 'F', 

599 'Zd': 'D', 

600 's': 'S', 

601 'w': 'U', 

602 'O': 'O', 

603 'x': 'V', # padding 

604} 

605_pep3118_standard_typechars = ''.join(_pep3118_standard_map.keys()) 

606 

607_pep3118_unsupported_map = { 

608 'u': 'UCS-2 strings', 

609 '&': 'pointers', 

610 't': 'bitfields', 

611 'X': 'function pointers', 

612} 

613 

614class _Stream: 

615 def __init__(self, s): 

616 self.s = s 

617 self.byteorder = '@' 

618 

619 def advance(self, n): 

620 res = self.s[:n] 

621 self.s = self.s[n:] 

622 return res 

623 

624 def consume(self, c): 

625 if self.s[:len(c)] == c: 

626 self.advance(len(c)) 

627 return True 

628 return False 

629 

630 def consume_until(self, c): 

631 if callable(c): 

632 i = 0 

633 while i < len(self.s) and not c(self.s[i]): 

634 i = i + 1 

635 return self.advance(i) 

636 else: 

637 i = self.s.index(c) 

638 res = self.advance(i) 

639 self.advance(len(c)) 

640 return res 

641 

642 @property 

643 def next(self): 

644 return self.s[0] 

645 

646 def __bool__(self): 

647 return bool(self.s) 

648 

649 

650def _dtype_from_pep3118(spec): 

651 stream = _Stream(spec) 

652 dtype, align = __dtype_from_pep3118(stream, is_subdtype=False) 

653 return dtype 

654 

655def __dtype_from_pep3118(stream, is_subdtype): 

656 field_spec = dict( 

657 names=[], 

658 formats=[], 

659 offsets=[], 

660 itemsize=0 

661 ) 

662 offset = 0 

663 common_alignment = 1 

664 is_padding = False 

665 

666 # Parse spec 

667 while stream: 

668 value = None 

669 

670 # End of structure, bail out to upper level 

671 if stream.consume('}'): 

672 break 

673 

674 # Sub-arrays (1) 

675 shape = None 

676 if stream.consume('('): 

677 shape = stream.consume_until(')') 

678 shape = tuple(map(int, shape.split(','))) 

679 

680 # Byte order 

681 if stream.next in ('@', '=', '<', '>', '^', '!'): 

682 byteorder = stream.advance(1) 

683 if byteorder == '!': 

684 byteorder = '>' 

685 stream.byteorder = byteorder 

686 

687 # Byte order characters also control native vs. standard type sizes 

688 if stream.byteorder in ('@', '^'): 

689 type_map = _pep3118_native_map 

690 type_map_chars = _pep3118_native_typechars 

691 else: 

692 type_map = _pep3118_standard_map 

693 type_map_chars = _pep3118_standard_typechars 

694 

695 # Item sizes 

696 itemsize_str = stream.consume_until(lambda c: not c.isdigit()) 

697 if itemsize_str: 

698 itemsize = int(itemsize_str) 

699 else: 

700 itemsize = 1 

701 

702 # Data types 

703 is_padding = False 

704 

705 if stream.consume('T{'): 

706 value, align = __dtype_from_pep3118( 

707 stream, is_subdtype=True) 

708 elif stream.next in type_map_chars: 

709 if stream.next == 'Z': 

710 typechar = stream.advance(2) 

711 else: 

712 typechar = stream.advance(1) 

713 

714 is_padding = (typechar == 'x') 

715 dtypechar = type_map[typechar] 

716 if dtypechar in 'USV': 

717 dtypechar += '%d' % itemsize 

718 itemsize = 1 

719 numpy_byteorder = {'@': '=', '^': '='}.get( 

720 stream.byteorder, stream.byteorder) 

721 value = dtype(numpy_byteorder + dtypechar) 

722 align = value.alignment 

723 elif stream.next in _pep3118_unsupported_map: 

724 desc = _pep3118_unsupported_map[stream.next] 

725 raise NotImplementedError( 

726 "Unrepresentable PEP 3118 data type {!r} ({})" 

727 .format(stream.next, desc)) 

728 else: 

729 raise ValueError("Unknown PEP 3118 data type specifier %r" % stream.s) 

730 

731 # 

732 # Native alignment may require padding 

733 # 

734 # Here we assume that the presence of a '@' character implicitly implies 

735 # that the start of the array is *already* aligned. 

736 # 

737 extra_offset = 0 

738 if stream.byteorder == '@': 

739 start_padding = (-offset) % align 

740 intra_padding = (-value.itemsize) % align 

741 

742 offset += start_padding 

743 

744 if intra_padding != 0: 

745 if itemsize > 1 or (shape is not None and _prod(shape) > 1): 

746 # Inject internal padding to the end of the sub-item 

747 value = _add_trailing_padding(value, intra_padding) 

748 else: 

749 # We can postpone the injection of internal padding, 

750 # as the item appears at most once 

751 extra_offset += intra_padding 

752 

753 # Update common alignment 

754 common_alignment = _lcm(align, common_alignment) 

755 

756 # Convert itemsize to sub-array 

757 if itemsize != 1: 

758 value = dtype((value, (itemsize,))) 

759 

760 # Sub-arrays (2) 

761 if shape is not None: 

762 value = dtype((value, shape)) 

763 

764 # Field name 

765 if stream.consume(':'): 

766 name = stream.consume_until(':') 

767 else: 

768 name = None 

769 

770 if not (is_padding and name is None): 

771 if name is not None and name in field_spec['names']: 

772 raise RuntimeError(f"Duplicate field name '{name}' in PEP3118 format") 

773 field_spec['names'].append(name) 

774 field_spec['formats'].append(value) 

775 field_spec['offsets'].append(offset) 

776 

777 offset += value.itemsize 

778 offset += extra_offset 

779 

780 field_spec['itemsize'] = offset 

781 

782 # extra final padding for aligned types 

783 if stream.byteorder == '@': 

784 field_spec['itemsize'] += (-offset) % common_alignment 

785 

786 # Check if this was a simple 1-item type, and unwrap it 

787 if (field_spec['names'] == [None] 

788 and field_spec['offsets'][0] == 0 

789 and field_spec['itemsize'] == field_spec['formats'][0].itemsize 

790 and not is_subdtype): 

791 ret = field_spec['formats'][0] 

792 else: 

793 _fix_names(field_spec) 

794 ret = dtype(field_spec) 

795 

796 # Finished 

797 return ret, common_alignment 

798 

799def _fix_names(field_spec): 

800 """ Replace names which are None with the next unused f%d name """ 

801 names = field_spec['names'] 

802 for i, name in enumerate(names): 

803 if name is not None: 

804 continue 

805 

806 j = 0 

807 while True: 

808 name = f'f{j}' 

809 if name not in names: 

810 break 

811 j = j + 1 

812 names[i] = name 

813 

814def _add_trailing_padding(value, padding): 

815 """Inject the specified number of padding bytes at the end of a dtype""" 

816 if value.fields is None: 

817 field_spec = dict( 

818 names=['f0'], 

819 formats=[value], 

820 offsets=[0], 

821 itemsize=value.itemsize 

822 ) 

823 else: 

824 fields = value.fields 

825 names = value.names 

826 field_spec = dict( 

827 names=names, 

828 formats=[fields[name][0] for name in names], 

829 offsets=[fields[name][1] for name in names], 

830 itemsize=value.itemsize 

831 ) 

832 

833 field_spec['itemsize'] += padding 

834 return dtype(field_spec) 

835 

836def _prod(a): 

837 p = 1 

838 for x in a: 

839 p *= x 

840 return p 

841 

842def _gcd(a, b): 

843 """Calculate the greatest common divisor of a and b""" 

844 while b: 

845 a, b = b, a % b 

846 return a 

847 

848def _lcm(a, b): 

849 return a // _gcd(a, b) * b 

850 

851def array_ufunc_errmsg_formatter(dummy, ufunc, method, *inputs, **kwargs): 

852 """ Format the error message for when __array_ufunc__ gives up. """ 

853 args_string = ', '.join(['{!r}'.format(arg) for arg in inputs] + 

854 ['{}={!r}'.format(k, v) 

855 for k, v in kwargs.items()]) 

856 args = inputs + kwargs.get('out', ()) 

857 types_string = ', '.join(repr(type(arg).__name__) for arg in args) 

858 return ('operand type(s) all returned NotImplemented from ' 

859 '__array_ufunc__({!r}, {!r}, {}): {}' 

860 .format(ufunc, method, args_string, types_string)) 

861 

862 

863def array_function_errmsg_formatter(public_api, types): 

864 """ Format the error message for when __array_ufunc__ gives up. """ 

865 func_name = '{}.{}'.format(public_api.__module__, public_api.__name__) 

866 return ("no implementation found for '{}' on types that implement " 

867 '__array_function__: {}'.format(func_name, list(types))) 

868 

869 

870def _ufunc_doc_signature_formatter(ufunc): 

871 """ 

872 Builds a signature string which resembles PEP 457 

873 

874 This is used to construct the first line of the docstring 

875 """ 

876 

877 # input arguments are simple 

878 if ufunc.nin == 1: 

879 in_args = 'x' 

880 else: 

881 in_args = ', '.join(f'x{i+1}' for i in range(ufunc.nin)) 

882 

883 # output arguments are both keyword or positional 

884 if ufunc.nout == 0: 884 ↛ 885line 884 didn't jump to line 885, because the condition on line 884 was never true

885 out_args = ', /, out=()' 

886 elif ufunc.nout == 1: 886 ↛ 889line 886 didn't jump to line 889, because the condition on line 886 was never false

887 out_args = ', /, out=None' 

888 else: 

889 out_args = '[, {positional}], / [, out={default}]'.format( 

890 positional=', '.join( 

891 'out{}'.format(i+1) for i in range(ufunc.nout)), 

892 default=repr((None,)*ufunc.nout) 

893 ) 

894 

895 # keyword only args depend on whether this is a gufunc 

896 kwargs = ( 

897 ", casting='same_kind'" 

898 ", order='K'" 

899 ", dtype=None" 

900 ", subok=True" 

901 ) 

902 

903 # NOTE: gufuncs may or may not support the `axis` parameter 

904 if ufunc.signature is None: 904 ↛ 907line 904 didn't jump to line 907, because the condition on line 904 was never false

905 kwargs = f", where=True{kwargs}[, signature, extobj]" 

906 else: 

907 kwargs += "[, signature, extobj, axes, axis]" 

908 

909 # join all the parts together 

910 return '{name}({in_args}{out_args}, *{kwargs})'.format( 

911 name=ufunc.__name__, 

912 in_args=in_args, 

913 out_args=out_args, 

914 kwargs=kwargs 

915 ) 

916 

917 

918def npy_ctypes_check(cls): 

919 # determine if a class comes from ctypes, in order to work around 

920 # a bug in the buffer protocol for those objects, bpo-10746 

921 try: 

922 # ctypes class are new-style, so have an __mro__. This probably fails 

923 # for ctypes classes with multiple inheritance. 

924 if IS_PYPY: 924 ↛ 926line 924 didn't jump to line 926, because the condition on line 924 was never true

925 # (..., _ctypes.basics._CData, Bufferable, object) 

926 ctype_base = cls.__mro__[-3] 

927 else: 

928 # # (..., _ctypes._CData, object) 

929 ctype_base = cls.__mro__[-2] 

930 # right now, they're part of the _ctypes module 

931 return '_ctypes' in ctype_base.__module__ 

932 except Exception: 

933 return False