Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/numpy/core/shape_base.py: 15%

192 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2023-07-17 14:22 -0600

1__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack', 

2 'stack', 'vstack'] 

3 

4import functools 

5import itertools 

6import operator 

7import warnings 

8 

9from . import numeric as _nx 

10from . import overrides 

11from .multiarray import array, asanyarray, normalize_axis_index 

12from . import fromnumeric as _from_nx 

13 

14 

15array_function_dispatch = functools.partial( 

16 overrides.array_function_dispatch, module='numpy') 

17 

18 

19def _atleast_1d_dispatcher(*arys): 

20 return arys 

21 

22 

23@array_function_dispatch(_atleast_1d_dispatcher) 

24def atleast_1d(*arys): 

25 """ 

26 Convert inputs to arrays with at least one dimension. 

27 

28 Scalar inputs are converted to 1-dimensional arrays, whilst 

29 higher-dimensional inputs are preserved. 

30 

31 Parameters 

32 ---------- 

33 arys1, arys2, ... : array_like 

34 One or more input arrays. 

35 

36 Returns 

37 ------- 

38 ret : ndarray 

39 An array, or list of arrays, each with ``a.ndim >= 1``. 

40 Copies are made only if necessary. 

41 

42 See Also 

43 -------- 

44 atleast_2d, atleast_3d 

45 

46 Examples 

47 -------- 

48 >>> np.atleast_1d(1.0) 

49 array([1.]) 

50 

51 >>> x = np.arange(9.0).reshape(3,3) 

52 >>> np.atleast_1d(x) 

53 array([[0., 1., 2.], 

54 [3., 4., 5.], 

55 [6., 7., 8.]]) 

56 >>> np.atleast_1d(x) is x 

57 True 

58 

59 >>> np.atleast_1d(1, [3, 4]) 

60 [array([1]), array([3, 4])] 

61 

62 """ 

63 res = [] 

64 for ary in arys: 

65 ary = asanyarray(ary) 

66 if ary.ndim == 0: 

67 result = ary.reshape(1) 

68 else: 

69 result = ary 

70 res.append(result) 

71 if len(res) == 1: 

72 return res[0] 

73 else: 

74 return res 

75 

76 

77def _atleast_2d_dispatcher(*arys): 

78 return arys 

79 

80 

81@array_function_dispatch(_atleast_2d_dispatcher) 

82def atleast_2d(*arys): 

83 """ 

84 View inputs as arrays with at least two dimensions. 

85 

86 Parameters 

87 ---------- 

88 arys1, arys2, ... : array_like 

89 One or more array-like sequences. Non-array inputs are converted 

90 to arrays. Arrays that already have two or more dimensions are 

91 preserved. 

92 

93 Returns 

94 ------- 

95 res, res2, ... : ndarray 

96 An array, or list of arrays, each with ``a.ndim >= 2``. 

97 Copies are avoided where possible, and views with two or more 

98 dimensions are returned. 

99 

100 See Also 

101 -------- 

102 atleast_1d, atleast_3d 

103 

104 Examples 

105 -------- 

106 >>> np.atleast_2d(3.0) 

107 array([[3.]]) 

108 

109 >>> x = np.arange(3.0) 

110 >>> np.atleast_2d(x) 

111 array([[0., 1., 2.]]) 

112 >>> np.atleast_2d(x).base is x 

113 True 

114 

115 >>> np.atleast_2d(1, [1, 2], [[1, 2]]) 

116 [array([[1]]), array([[1, 2]]), array([[1, 2]])] 

117 

118 """ 

119 res = [] 

120 for ary in arys: 

121 ary = asanyarray(ary) 

122 if ary.ndim == 0: 

123 result = ary.reshape(1, 1) 

124 elif ary.ndim == 1: 

125 result = ary[_nx.newaxis, :] 

126 else: 

127 result = ary 

128 res.append(result) 

129 if len(res) == 1: 

130 return res[0] 

131 else: 

132 return res 

133 

134 

135def _atleast_3d_dispatcher(*arys): 

136 return arys 

137 

138 

139@array_function_dispatch(_atleast_3d_dispatcher) 

140def atleast_3d(*arys): 

141 """ 

142 View inputs as arrays with at least three dimensions. 

143 

144 Parameters 

145 ---------- 

146 arys1, arys2, ... : array_like 

147 One or more array-like sequences. Non-array inputs are converted to 

148 arrays. Arrays that already have three or more dimensions are 

149 preserved. 

150 

151 Returns 

152 ------- 

153 res1, res2, ... : ndarray 

154 An array, or list of arrays, each with ``a.ndim >= 3``. Copies are 

155 avoided where possible, and views with three or more dimensions are 

156 returned. For example, a 1-D array of shape ``(N,)`` becomes a view 

157 of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a 

158 view of shape ``(M, N, 1)``. 

159 

160 See Also 

161 -------- 

162 atleast_1d, atleast_2d 

163 

164 Examples 

165 -------- 

166 >>> np.atleast_3d(3.0) 

167 array([[[3.]]]) 

168 

169 >>> x = np.arange(3.0) 

170 >>> np.atleast_3d(x).shape 

171 (1, 3, 1) 

172 

173 >>> x = np.arange(12.0).reshape(4,3) 

174 >>> np.atleast_3d(x).shape 

175 (4, 3, 1) 

176 >>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself 

177 True 

178 

179 >>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]): 

180 ... print(arr, arr.shape) # doctest: +SKIP 

181 ... 

182 [[[1] 

183 [2]]] (1, 2, 1) 

184 [[[1] 

185 [2]]] (1, 2, 1) 

186 [[[1 2]]] (1, 1, 2) 

187 

188 """ 

189 res = [] 

190 for ary in arys: 

191 ary = asanyarray(ary) 

192 if ary.ndim == 0: 

193 result = ary.reshape(1, 1, 1) 

194 elif ary.ndim == 1: 

195 result = ary[_nx.newaxis, :, _nx.newaxis] 

196 elif ary.ndim == 2: 

197 result = ary[:, :, _nx.newaxis] 

198 else: 

199 result = ary 

200 res.append(result) 

201 if len(res) == 1: 

202 return res[0] 

203 else: 

204 return res 

205 

206 

207def _arrays_for_stack_dispatcher(arrays, stacklevel=4): 

208 if not hasattr(arrays, '__getitem__') and hasattr(arrays, '__iter__'): 

209 warnings.warn('arrays to stack must be passed as a "sequence" type ' 

210 'such as list or tuple. Support for non-sequence ' 

211 'iterables such as generators is deprecated as of ' 

212 'NumPy 1.16 and will raise an error in the future.', 

213 FutureWarning, stacklevel=stacklevel) 

214 return () 

215 return arrays 

216 

217 

218def _vhstack_dispatcher(tup): 

219 return _arrays_for_stack_dispatcher(tup) 

220 

221 

222@array_function_dispatch(_vhstack_dispatcher) 

223def vstack(tup): 

224 """ 

225 Stack arrays in sequence vertically (row wise). 

226 

227 This is equivalent to concatenation along the first axis after 1-D arrays 

228 of shape `(N,)` have been reshaped to `(1,N)`. Rebuilds arrays divided by 

229 `vsplit`. 

230 

231 This function makes most sense for arrays with up to 3 dimensions. For 

232 instance, for pixel-data with a height (first axis), width (second axis), 

233 and r/g/b channels (third axis). The functions `concatenate`, `stack` and 

234 `block` provide more general stacking and concatenation operations. 

235 

236 Parameters 

237 ---------- 

238 tup : sequence of ndarrays 

239 The arrays must have the same shape along all but the first axis. 

240 1-D arrays must have the same length. 

241 

242 Returns 

243 ------- 

244 stacked : ndarray 

245 The array formed by stacking the given arrays, will be at least 2-D. 

246 

247 See Also 

248 -------- 

249 concatenate : Join a sequence of arrays along an existing axis. 

250 stack : Join a sequence of arrays along a new axis. 

251 block : Assemble an nd-array from nested lists of blocks. 

252 hstack : Stack arrays in sequence horizontally (column wise). 

253 dstack : Stack arrays in sequence depth wise (along third axis). 

254 column_stack : Stack 1-D arrays as columns into a 2-D array. 

255 vsplit : Split an array into multiple sub-arrays vertically (row-wise). 

256 

257 Examples 

258 -------- 

259 >>> a = np.array([1, 2, 3]) 

260 >>> b = np.array([4, 5, 6]) 

261 >>> np.vstack((a,b)) 

262 array([[1, 2, 3], 

263 [4, 5, 6]]) 

264 

265 >>> a = np.array([[1], [2], [3]]) 

266 >>> b = np.array([[4], [5], [6]]) 

267 >>> np.vstack((a,b)) 

268 array([[1], 

269 [2], 

270 [3], 

271 [4], 

272 [5], 

273 [6]]) 

274 

275 """ 

276 if not overrides.ARRAY_FUNCTION_ENABLED: 

277 # raise warning if necessary 

278 _arrays_for_stack_dispatcher(tup, stacklevel=2) 

279 arrs = atleast_2d(*tup) 

280 if not isinstance(arrs, list): 

281 arrs = [arrs] 

282 return _nx.concatenate(arrs, 0) 

283 

284 

285@array_function_dispatch(_vhstack_dispatcher) 

286def hstack(tup): 

287 """ 

288 Stack arrays in sequence horizontally (column wise). 

289 

290 This is equivalent to concatenation along the second axis, except for 1-D 

291 arrays where it concatenates along the first axis. Rebuilds arrays divided 

292 by `hsplit`. 

293 

294 This function makes most sense for arrays with up to 3 dimensions. For 

295 instance, for pixel-data with a height (first axis), width (second axis), 

296 and r/g/b channels (third axis). The functions `concatenate`, `stack` and 

297 `block` provide more general stacking and concatenation operations. 

298 

299 Parameters 

300 ---------- 

301 tup : sequence of ndarrays 

302 The arrays must have the same shape along all but the second axis, 

303 except 1-D arrays which can be any length. 

304 

305 Returns 

306 ------- 

307 stacked : ndarray 

308 The array formed by stacking the given arrays. 

309 

310 See Also 

311 -------- 

312 concatenate : Join a sequence of arrays along an existing axis. 

313 stack : Join a sequence of arrays along a new axis. 

314 block : Assemble an nd-array from nested lists of blocks. 

315 vstack : Stack arrays in sequence vertically (row wise). 

316 dstack : Stack arrays in sequence depth wise (along third axis). 

317 column_stack : Stack 1-D arrays as columns into a 2-D array. 

318 hsplit : Split an array into multiple sub-arrays horizontally (column-wise). 

319 

320 Examples 

321 -------- 

322 >>> a = np.array((1,2,3)) 

323 >>> b = np.array((4,5,6)) 

324 >>> np.hstack((a,b)) 

325 array([1, 2, 3, 4, 5, 6]) 

326 >>> a = np.array([[1],[2],[3]]) 

327 >>> b = np.array([[4],[5],[6]]) 

328 >>> np.hstack((a,b)) 

329 array([[1, 4], 

330 [2, 5], 

331 [3, 6]]) 

332 

333 """ 

334 if not overrides.ARRAY_FUNCTION_ENABLED: 

335 # raise warning if necessary 

336 _arrays_for_stack_dispatcher(tup, stacklevel=2) 

337 

338 arrs = atleast_1d(*tup) 

339 if not isinstance(arrs, list): 

340 arrs = [arrs] 

341 # As a special case, dimension 0 of 1-dimensional arrays is "horizontal" 

342 if arrs and arrs[0].ndim == 1: 

343 return _nx.concatenate(arrs, 0) 

344 else: 

345 return _nx.concatenate(arrs, 1) 

346 

347 

348def _stack_dispatcher(arrays, axis=None, out=None): 

349 arrays = _arrays_for_stack_dispatcher(arrays, stacklevel=6) 

350 if out is not None: 

351 # optimize for the typical case where only arrays is provided 

352 arrays = list(arrays) 

353 arrays.append(out) 

354 return arrays 

355 

356 

357@array_function_dispatch(_stack_dispatcher) 

358def stack(arrays, axis=0, out=None): 

359 """ 

360 Join a sequence of arrays along a new axis. 

361 

362 The ``axis`` parameter specifies the index of the new axis in the 

363 dimensions of the result. For example, if ``axis=0`` it will be the first 

364 dimension and if ``axis=-1`` it will be the last dimension. 

365 

366 .. versionadded:: 1.10.0 

367 

368 Parameters 

369 ---------- 

370 arrays : sequence of array_like 

371 Each array must have the same shape. 

372 

373 axis : int, optional 

374 The axis in the result array along which the input arrays are stacked. 

375 

376 out : ndarray, optional 

377 If provided, the destination to place the result. The shape must be 

378 correct, matching that of what stack would have returned if no 

379 out argument were specified. 

380 

381 Returns 

382 ------- 

383 stacked : ndarray 

384 The stacked array has one more dimension than the input arrays. 

385 

386 See Also 

387 -------- 

388 concatenate : Join a sequence of arrays along an existing axis. 

389 block : Assemble an nd-array from nested lists of blocks. 

390 split : Split array into a list of multiple sub-arrays of equal size. 

391 

392 Examples 

393 -------- 

394 >>> arrays = [np.random.randn(3, 4) for _ in range(10)] 

395 >>> np.stack(arrays, axis=0).shape 

396 (10, 3, 4) 

397 

398 >>> np.stack(arrays, axis=1).shape 

399 (3, 10, 4) 

400 

401 >>> np.stack(arrays, axis=2).shape 

402 (3, 4, 10) 

403 

404 >>> a = np.array([1, 2, 3]) 

405 >>> b = np.array([4, 5, 6]) 

406 >>> np.stack((a, b)) 

407 array([[1, 2, 3], 

408 [4, 5, 6]]) 

409 

410 >>> np.stack((a, b), axis=-1) 

411 array([[1, 4], 

412 [2, 5], 

413 [3, 6]]) 

414 

415 """ 

416 if not overrides.ARRAY_FUNCTION_ENABLED: 

417 # raise warning if necessary 

418 _arrays_for_stack_dispatcher(arrays, stacklevel=2) 

419 

420 arrays = [asanyarray(arr) for arr in arrays] 

421 if not arrays: 

422 raise ValueError('need at least one array to stack') 

423 

424 shapes = {arr.shape for arr in arrays} 

425 if len(shapes) != 1: 

426 raise ValueError('all input arrays must have the same shape') 

427 

428 result_ndim = arrays[0].ndim + 1 

429 axis = normalize_axis_index(axis, result_ndim) 

430 

431 sl = (slice(None),) * axis + (_nx.newaxis,) 

432 expanded_arrays = [arr[sl] for arr in arrays] 

433 return _nx.concatenate(expanded_arrays, axis=axis, out=out) 

434 

435 

436# Internal functions to eliminate the overhead of repeated dispatch in one of 

437# the two possible paths inside np.block. 

438# Use getattr to protect against __array_function__ being disabled. 

439_size = getattr(_from_nx.size, '__wrapped__', _from_nx.size) 

440_ndim = getattr(_from_nx.ndim, '__wrapped__', _from_nx.ndim) 

441_concatenate = getattr(_from_nx.concatenate, '__wrapped__', _from_nx.concatenate) 

442 

443 

444def _block_format_index(index): 

445 """ 

446 Convert a list of indices ``[0, 1, 2]`` into ``"arrays[0][1][2]"``. 

447 """ 

448 idx_str = ''.join('[{}]'.format(i) for i in index if i is not None) 

449 return 'arrays' + idx_str 

450 

451 

452def _block_check_depths_match(arrays, parent_index=[]): 

453 """ 

454 Recursive function checking that the depths of nested lists in `arrays` 

455 all match. Mismatch raises a ValueError as described in the block 

456 docstring below. 

457 

458 The entire index (rather than just the depth) needs to be calculated 

459 for each innermost list, in case an error needs to be raised, so that 

460 the index of the offending list can be printed as part of the error. 

461 

462 Parameters 

463 ---------- 

464 arrays : nested list of arrays 

465 The arrays to check 

466 parent_index : list of int 

467 The full index of `arrays` within the nested lists passed to 

468 `_block_check_depths_match` at the top of the recursion. 

469 

470 Returns 

471 ------- 

472 first_index : list of int 

473 The full index of an element from the bottom of the nesting in 

474 `arrays`. If any element at the bottom is an empty list, this will 

475 refer to it, and the last index along the empty axis will be None. 

476 max_arr_ndim : int 

477 The maximum of the ndims of the arrays nested in `arrays`. 

478 final_size: int 

479 The number of elements in the final array. This is used the motivate 

480 the choice of algorithm used using benchmarking wisdom. 

481 

482 """ 

483 if type(arrays) is tuple: 

484 # not strictly necessary, but saves us from: 

485 # - more than one way to do things - no point treating tuples like 

486 # lists 

487 # - horribly confusing behaviour that results when tuples are 

488 # treated like ndarray 

489 raise TypeError( 

490 '{} is a tuple. ' 

491 'Only lists can be used to arrange blocks, and np.block does ' 

492 'not allow implicit conversion from tuple to ndarray.'.format( 

493 _block_format_index(parent_index) 

494 ) 

495 ) 

496 elif type(arrays) is list and len(arrays) > 0: 

497 idxs_ndims = (_block_check_depths_match(arr, parent_index + [i]) 

498 for i, arr in enumerate(arrays)) 

499 

500 first_index, max_arr_ndim, final_size = next(idxs_ndims) 

501 for index, ndim, size in idxs_ndims: 

502 final_size += size 

503 if ndim > max_arr_ndim: 

504 max_arr_ndim = ndim 

505 if len(index) != len(first_index): 

506 raise ValueError( 

507 "List depths are mismatched. First element was at depth " 

508 "{}, but there is an element at depth {} ({})".format( 

509 len(first_index), 

510 len(index), 

511 _block_format_index(index) 

512 ) 

513 ) 

514 # propagate our flag that indicates an empty list at the bottom 

515 if index[-1] is None: 

516 first_index = index 

517 

518 return first_index, max_arr_ndim, final_size 

519 elif type(arrays) is list and len(arrays) == 0: 

520 # We've 'bottomed out' on an empty list 

521 return parent_index + [None], 0, 0 

522 else: 

523 # We've 'bottomed out' - arrays is either a scalar or an array 

524 size = _size(arrays) 

525 return parent_index, _ndim(arrays), size 

526 

527 

528def _atleast_nd(a, ndim): 

529 # Ensures `a` has at least `ndim` dimensions by prepending 

530 # ones to `a.shape` as necessary 

531 return array(a, ndmin=ndim, copy=False, subok=True) 

532 

533 

534def _accumulate(values): 

535 return list(itertools.accumulate(values)) 

536 

537 

538def _concatenate_shapes(shapes, axis): 

539 """Given array shapes, return the resulting shape and slices prefixes. 

540 

541 These help in nested concatenation. 

542  

543 Returns 

544 ------- 

545 shape: tuple of int 

546 This tuple satisfies:: 

547 

548 shape, _ = _concatenate_shapes([arr.shape for shape in arrs], axis) 

549 shape == concatenate(arrs, axis).shape 

550 

551 slice_prefixes: tuple of (slice(start, end), ) 

552 For a list of arrays being concatenated, this returns the slice 

553 in the larger array at axis that needs to be sliced into. 

554 

555 For example, the following holds:: 

556 

557 ret = concatenate([a, b, c], axis) 

558 _, (sl_a, sl_b, sl_c) = concatenate_slices([a, b, c], axis) 

559 

560 ret[(slice(None),) * axis + sl_a] == a 

561 ret[(slice(None),) * axis + sl_b] == b 

562 ret[(slice(None),) * axis + sl_c] == c 

563 

564 These are called slice prefixes since they are used in the recursive 

565 blocking algorithm to compute the left-most slices during the 

566 recursion. Therefore, they must be prepended to rest of the slice 

567 that was computed deeper in the recursion. 

568 

569 These are returned as tuples to ensure that they can quickly be added 

570 to existing slice tuple without creating a new tuple every time. 

571 

572 """ 

573 # Cache a result that will be reused. 

574 shape_at_axis = [shape[axis] for shape in shapes] 

575 

576 # Take a shape, any shape 

577 first_shape = shapes[0] 

578 first_shape_pre = first_shape[:axis] 

579 first_shape_post = first_shape[axis+1:] 

580 

581 if any(shape[:axis] != first_shape_pre or 

582 shape[axis+1:] != first_shape_post for shape in shapes): 

583 raise ValueError( 

584 'Mismatched array shapes in block along axis {}.'.format(axis)) 

585 

586 shape = (first_shape_pre + (sum(shape_at_axis),) + first_shape[axis+1:]) 

587 

588 offsets_at_axis = _accumulate(shape_at_axis) 

589 slice_prefixes = [(slice(start, end),) 

590 for start, end in zip([0] + offsets_at_axis, 

591 offsets_at_axis)] 

592 return shape, slice_prefixes 

593 

594 

595def _block_info_recursion(arrays, max_depth, result_ndim, depth=0): 

596 """ 

597 Returns the shape of the final array, along with a list 

598 of slices and a list of arrays that can be used for assignment inside the 

599 new array 

600 

601 Parameters 

602 ---------- 

603 arrays : nested list of arrays 

604 The arrays to check 

605 max_depth : list of int 

606 The number of nested lists 

607 result_ndim : int 

608 The number of dimensions in thefinal array. 

609 

610 Returns 

611 ------- 

612 shape : tuple of int 

613 The shape that the final array will take on. 

614 slices: list of tuple of slices 

615 The slices into the full array required for assignment. These are 

616 required to be prepended with ``(Ellipsis, )`` to obtain to correct 

617 final index. 

618 arrays: list of ndarray 

619 The data to assign to each slice of the full array 

620 

621 """ 

622 if depth < max_depth: 

623 shapes, slices, arrays = zip( 

624 *[_block_info_recursion(arr, max_depth, result_ndim, depth+1) 

625 for arr in arrays]) 

626 

627 axis = result_ndim - max_depth + depth 

628 shape, slice_prefixes = _concatenate_shapes(shapes, axis) 

629 

630 # Prepend the slice prefix and flatten the slices 

631 slices = [slice_prefix + the_slice 

632 for slice_prefix, inner_slices in zip(slice_prefixes, slices) 

633 for the_slice in inner_slices] 

634 

635 # Flatten the array list 

636 arrays = functools.reduce(operator.add, arrays) 

637 

638 return shape, slices, arrays 

639 else: 

640 # We've 'bottomed out' - arrays is either a scalar or an array 

641 # type(arrays) is not list 

642 # Return the slice and the array inside a list to be consistent with 

643 # the recursive case. 

644 arr = _atleast_nd(arrays, result_ndim) 

645 return arr.shape, [()], [arr] 

646 

647 

648def _block(arrays, max_depth, result_ndim, depth=0): 

649 """ 

650 Internal implementation of block based on repeated concatenation. 

651 `arrays` is the argument passed to 

652 block. `max_depth` is the depth of nested lists within `arrays` and 

653 `result_ndim` is the greatest of the dimensions of the arrays in 

654 `arrays` and the depth of the lists in `arrays` (see block docstring 

655 for details). 

656 """ 

657 if depth < max_depth: 

658 arrs = [_block(arr, max_depth, result_ndim, depth+1) 

659 for arr in arrays] 

660 return _concatenate(arrs, axis=-(max_depth-depth)) 

661 else: 

662 # We've 'bottomed out' - arrays is either a scalar or an array 

663 # type(arrays) is not list 

664 return _atleast_nd(arrays, result_ndim) 

665 

666 

667def _block_dispatcher(arrays): 

668 # Use type(...) is list to match the behavior of np.block(), which special 

669 # cases list specifically rather than allowing for generic iterables or 

670 # tuple. Also, we know that list.__array_function__ will never exist. 

671 if type(arrays) is list: 

672 for subarrays in arrays: 

673 yield from _block_dispatcher(subarrays) 

674 else: 

675 yield arrays 

676 

677 

678@array_function_dispatch(_block_dispatcher) 

679def block(arrays): 

680 """ 

681 Assemble an nd-array from nested lists of blocks. 

682 

683 Blocks in the innermost lists are concatenated (see `concatenate`) along 

684 the last dimension (-1), then these are concatenated along the 

685 second-last dimension (-2), and so on until the outermost list is reached. 

686 

687 Blocks can be of any dimension, but will not be broadcasted using the normal 

688 rules. Instead, leading axes of size 1 are inserted, to make ``block.ndim`` 

689 the same for all blocks. This is primarily useful for working with scalars, 

690 and means that code like ``np.block([v, 1])`` is valid, where 

691 ``v.ndim == 1``. 

692 

693 When the nested list is two levels deep, this allows block matrices to be 

694 constructed from their components. 

695 

696 .. versionadded:: 1.13.0 

697 

698 Parameters 

699 ---------- 

700 arrays : nested list of array_like or scalars (but not tuples) 

701 If passed a single ndarray or scalar (a nested list of depth 0), this 

702 is returned unmodified (and not copied). 

703 

704 Elements shapes must match along the appropriate axes (without 

705 broadcasting), but leading 1s will be prepended to the shape as 

706 necessary to make the dimensions match. 

707 

708 Returns 

709 ------- 

710 block_array : ndarray 

711 The array assembled from the given blocks. 

712 

713 The dimensionality of the output is equal to the greatest of: 

714 * the dimensionality of all the inputs 

715 * the depth to which the input list is nested 

716 

717 Raises 

718 ------ 

719 ValueError 

720 * If list depths are mismatched - for instance, ``[[a, b], c]`` is 

721 illegal, and should be spelt ``[[a, b], [c]]`` 

722 * If lists are empty - for instance, ``[[a, b], []]`` 

723 

724 See Also 

725 -------- 

726 concatenate : Join a sequence of arrays along an existing axis. 

727 stack : Join a sequence of arrays along a new axis. 

728 vstack : Stack arrays in sequence vertically (row wise). 

729 hstack : Stack arrays in sequence horizontally (column wise). 

730 dstack : Stack arrays in sequence depth wise (along third axis). 

731 column_stack : Stack 1-D arrays as columns into a 2-D array. 

732 vsplit : Split an array into multiple sub-arrays vertically (row-wise). 

733 

734 Notes 

735 ----- 

736 

737 When called with only scalars, ``np.block`` is equivalent to an ndarray 

738 call. So ``np.block([[1, 2], [3, 4]])`` is equivalent to 

739 ``np.array([[1, 2], [3, 4]])``. 

740 

741 This function does not enforce that the blocks lie on a fixed grid. 

742 ``np.block([[a, b], [c, d]])`` is not restricted to arrays of the form:: 

743 

744 AAAbb 

745 AAAbb 

746 cccDD 

747 

748 But is also allowed to produce, for some ``a, b, c, d``:: 

749 

750 AAAbb 

751 AAAbb 

752 cDDDD 

753 

754 Since concatenation happens along the last axis first, `block` is _not_ 

755 capable of producing the following directly:: 

756 

757 AAAbb 

758 cccbb 

759 cccDD 

760 

761 Matlab's "square bracket stacking", ``[A, B, ...; p, q, ...]``, is 

762 equivalent to ``np.block([[A, B, ...], [p, q, ...]])``. 

763 

764 Examples 

765 -------- 

766 The most common use of this function is to build a block matrix 

767 

768 >>> A = np.eye(2) * 2 

769 >>> B = np.eye(3) * 3 

770 >>> np.block([ 

771 ... [A, np.zeros((2, 3))], 

772 ... [np.ones((3, 2)), B ] 

773 ... ]) 

774 array([[2., 0., 0., 0., 0.], 

775 [0., 2., 0., 0., 0.], 

776 [1., 1., 3., 0., 0.], 

777 [1., 1., 0., 3., 0.], 

778 [1., 1., 0., 0., 3.]]) 

779 

780 With a list of depth 1, `block` can be used as `hstack` 

781 

782 >>> np.block([1, 2, 3]) # hstack([1, 2, 3]) 

783 array([1, 2, 3]) 

784 

785 >>> a = np.array([1, 2, 3]) 

786 >>> b = np.array([4, 5, 6]) 

787 >>> np.block([a, b, 10]) # hstack([a, b, 10]) 

788 array([ 1, 2, 3, 4, 5, 6, 10]) 

789 

790 >>> A = np.ones((2, 2), int) 

791 >>> B = 2 * A 

792 >>> np.block([A, B]) # hstack([A, B]) 

793 array([[1, 1, 2, 2], 

794 [1, 1, 2, 2]]) 

795 

796 With a list of depth 2, `block` can be used in place of `vstack`: 

797 

798 >>> a = np.array([1, 2, 3]) 

799 >>> b = np.array([4, 5, 6]) 

800 >>> np.block([[a], [b]]) # vstack([a, b]) 

801 array([[1, 2, 3], 

802 [4, 5, 6]]) 

803 

804 >>> A = np.ones((2, 2), int) 

805 >>> B = 2 * A 

806 >>> np.block([[A], [B]]) # vstack([A, B]) 

807 array([[1, 1], 

808 [1, 1], 

809 [2, 2], 

810 [2, 2]]) 

811 

812 It can also be used in places of `atleast_1d` and `atleast_2d` 

813 

814 >>> a = np.array(0) 

815 >>> b = np.array([1]) 

816 >>> np.block([a]) # atleast_1d(a) 

817 array([0]) 

818 >>> np.block([b]) # atleast_1d(b) 

819 array([1]) 

820 

821 >>> np.block([[a]]) # atleast_2d(a) 

822 array([[0]]) 

823 >>> np.block([[b]]) # atleast_2d(b) 

824 array([[1]]) 

825 

826 

827 """ 

828 arrays, list_ndim, result_ndim, final_size = _block_setup(arrays) 

829 

830 # It was found through benchmarking that making an array of final size 

831 # around 256x256 was faster by straight concatenation on a 

832 # i7-7700HQ processor and dual channel ram 2400MHz. 

833 # It didn't seem to matter heavily on the dtype used. 

834 # 

835 # A 2D array using repeated concatenation requires 2 copies of the array. 

836 # 

837 # The fastest algorithm will depend on the ratio of CPU power to memory 

838 # speed. 

839 # One can monitor the results of the benchmark 

840 # https://pv.github.io/numpy-bench/#bench_shape_base.Block2D.time_block2d 

841 # to tune this parameter until a C version of the `_block_info_recursion` 

842 # algorithm is implemented which would likely be faster than the python 

843 # version. 

844 if list_ndim * final_size > (2 * 512 * 512): 

845 return _block_slicing(arrays, list_ndim, result_ndim) 

846 else: 

847 return _block_concatenate(arrays, list_ndim, result_ndim) 

848 

849 

850# These helper functions are mostly used for testing. 

851# They allow us to write tests that directly call `_block_slicing` 

852# or `_block_concatenate` without blocking large arrays to force the wisdom 

853# to trigger the desired path. 

854def _block_setup(arrays): 

855 """ 

856 Returns 

857 (`arrays`, list_ndim, result_ndim, final_size) 

858 """ 

859 bottom_index, arr_ndim, final_size = _block_check_depths_match(arrays) 

860 list_ndim = len(bottom_index) 

861 if bottom_index and bottom_index[-1] is None: 

862 raise ValueError( 

863 'List at {} cannot be empty'.format( 

864 _block_format_index(bottom_index) 

865 ) 

866 ) 

867 result_ndim = max(arr_ndim, list_ndim) 

868 return arrays, list_ndim, result_ndim, final_size 

869 

870 

871def _block_slicing(arrays, list_ndim, result_ndim): 

872 shape, slices, arrays = _block_info_recursion( 

873 arrays, list_ndim, result_ndim) 

874 dtype = _nx.result_type(*[arr.dtype for arr in arrays]) 

875 

876 # Test preferring F only in the case that all input arrays are F 

877 F_order = all(arr.flags['F_CONTIGUOUS'] for arr in arrays) 

878 C_order = all(arr.flags['C_CONTIGUOUS'] for arr in arrays) 

879 order = 'F' if F_order and not C_order else 'C' 

880 result = _nx.empty(shape=shape, dtype=dtype, order=order) 

881 # Note: In a c implementation, the function 

882 # PyArray_CreateMultiSortedStridePerm could be used for more advanced 

883 # guessing of the desired order. 

884 

885 for the_slice, arr in zip(slices, arrays): 

886 result[(Ellipsis,) + the_slice] = arr 

887 return result 

888 

889 

890def _block_concatenate(arrays, list_ndim, result_ndim): 

891 result = _block(arrays, list_ndim, result_ndim) 

892 if list_ndim == 0: 

893 # Catch an edge case where _block returns a view because 

894 # `arrays` is a single numpy array and not a list of numpy arrays. 

895 # This might copy scalars or lists twice, but this isn't a likely 

896 # usecase for those interested in performance 

897 result = result.copy() 

898 return result