faer/mat/
matown.rs

1use super::*;
2use crate::internal_prelude::*;
3use crate::{Idx, IdxInc, TryReserveError};
4use core::alloc::Layout;
5use dyn_stack::StackReq;
6use faer_traits::ComplexField;
7
8#[inline]
9pub(crate) fn align_for(size: usize, align: usize, needs_drop: bool) -> usize {
10	if needs_drop || !size.is_power_of_two() {
11		align
12	} else {
13		Ord::max(align, 64)
14	}
15}
16
17// CURSED: currently avoiding inlining to get noalias annotations in llvm
18#[inline(never)]
19unsafe fn noalias_annotate<T, Rows: Shape, Cols: Shape>(
20	iter: &mut [core::mem::MaybeUninit<T>],
21	new_nrows: IdxInc<Rows>,
22	old_nrows: IdxInc<Rows>,
23	f: &mut impl FnMut(Idx<Rows>, Idx<Cols>) -> T,
24	j: Idx<Cols>,
25) {
26	let ptr = iter.as_mut_ptr();
27	let iter = core::slice::from_raw_parts_mut(ptr, new_nrows.unbound() - old_nrows.unbound());
28
29	let mut guard = DropCol {
30		ptr: ptr as *mut T,
31		nrows: 0,
32	};
33	for i in Rows::indices(old_nrows, new_nrows) {
34		let ptr = iter.as_mut_ptr().add(i.unbound()) as *mut T;
35		ptr.write((*f)(i, j));
36		guard.nrows += 1;
37	}
38	core::mem::forget(guard);
39}
40
41pub(crate) struct DropIter<I: Iterator>(pub I);
42impl<I: Iterator> Drop for DropIter<I> {
43	#[inline]
44	fn drop(&mut self) {
45		pub struct DropIterRetry<'a, I: Iterator>(pub &'a mut I);
46		impl<I: Iterator> Drop for DropIterRetry<'_, I> {
47			#[inline]
48			fn drop(&mut self) {
49				self.0.for_each(drop);
50			}
51		}
52
53		let in_case_of_panic = DropIterRetry(&mut self.0);
54		in_case_of_panic.0.for_each(drop);
55		core::mem::forget(in_case_of_panic);
56	}
57}
58
59extern crate alloc;
60
61struct RawMatUnit<T> {
62	ptr: NonNull<T>,
63	row_capacity: usize,
64	col_capacity: usize,
65	layout: StackReq,
66	__marker: PhantomData<T>,
67}
68
69struct RawMat<T> {
70	ptr: NonNull<T>,
71	row_capacity: usize,
72	col_capacity: usize,
73	layout: StackReq,
74	__marker: PhantomData<T>,
75}
76
77impl<T> RawMatUnit<T> {
78	fn try_with_capacity(mut row_capacity: usize, col_capacity: usize) -> Result<Self, TryReserveError> {
79		let size = core::mem::size_of::<T>();
80		let prev_align = core::mem::align_of::<T>();
81		let align = align_for(size, prev_align, core::mem::needs_drop::<T>());
82
83		if align > size {
84			row_capacity = row_capacity
85				.msrv_checked_next_multiple_of(align / size)
86				.ok_or(TryReserveError::CapacityOverflow)?;
87		}
88
89		let size = size
90			.checked_mul(row_capacity)
91			.and_then(|size| size.checked_mul(col_capacity))
92			.ok_or(TryReserveError::CapacityOverflow)?;
93
94		let layout;
95		let ptr = if size == 0 {
96			layout = StackReq::empty();
97			core::ptr::null_mut::<u8>().wrapping_add(align)
98		} else {
99			let new_layout = Layout::from_size_align(size, align).map_err(|_| TryReserveError::CapacityOverflow)?;
100			layout = StackReq::new_aligned::<u8>(new_layout.size(), new_layout.align());
101			let ptr = unsafe { alloc::alloc::alloc(new_layout) };
102			if ptr.is_null() {
103				return Err(TryReserveError::AllocError { layout: new_layout });
104			}
105			ptr
106		};
107		let ptr = ptr as *mut T;
108
109		Ok(Self {
110			ptr: unsafe { NonNull::new_unchecked(ptr) },
111			row_capacity,
112			col_capacity,
113			layout,
114			__marker: PhantomData,
115		})
116	}
117
118	fn into_raw_parts(self) -> (NonNull<T>, usize, usize, StackReq) {
119		let this = core::mem::ManuallyDrop::new(self);
120		(this.ptr, this.row_capacity, this.col_capacity, this.layout)
121	}
122}
123
124impl<T> Drop for RawMatUnit<T> {
125	#[inline]
126	fn drop(&mut self) {
127		if self.layout.size_bytes() > 0 {
128			unsafe {
129				alloc::alloc::dealloc(
130					self.ptr.as_ptr() as *mut u8,
131					Layout::from_size_align_unchecked(self.layout.size_bytes(), self.layout.align_bytes()),
132				)
133			};
134		}
135	}
136}
137
138impl<T> RawMat<T> {
139	#[cold]
140	fn try_with_capacity(row_capacity: usize, col_capacity: usize) -> Result<Self, TryReserveError> {
141		let mut err = None;
142		let alloc = {
143			let alloc = RawMatUnit::<T>::try_with_capacity(row_capacity, col_capacity);
144			if let Err(alloc_err) = &alloc {
145				err = Some(*alloc_err);
146			}
147			alloc
148		};
149		if let Some(err) = err {
150			return Err(err);
151		}
152
153		let layout;
154		let row_capacity;
155		let ptr = {
156			let (ptr, new_row_capacity, _, unit_layout) = alloc.unwrap().into_raw_parts();
157			row_capacity = new_row_capacity;
158			layout = unit_layout;
159			ptr
160		};
161
162		Ok(Self {
163			ptr,
164			row_capacity,
165			col_capacity,
166			layout,
167			__marker: PhantomData,
168		})
169	}
170
171	#[cold]
172	fn do_reserve_with(&mut self, nrows: usize, ncols: usize, new_row_capacity: usize, new_col_capacity: usize) -> Result<(), TryReserveError> {
173		let old_row_capacity = self.row_capacity;
174		let size = core::mem::size_of::<T>();
175
176		let new = Self::try_with_capacity(new_row_capacity, new_col_capacity)?;
177		let new_row_capacity = new.row_capacity;
178
179		unsafe fn move_mat(mut new: *mut u8, mut old: *const u8, col_bytes: usize, ncols: usize, new_byte_stride: isize, old_byte_stride: isize) {
180			for _ in 0..ncols {
181				core::ptr::copy_nonoverlapping(old, new, col_bytes);
182				new = new.wrapping_offset(new_byte_stride);
183				old = old.wrapping_offset(old_byte_stride);
184			}
185		}
186
187		{
188			let new = new.ptr;
189			let old = self.ptr;
190
191			let new = new.as_ptr() as *mut u8;
192			let old = old.as_ptr() as *const u8;
193
194			unsafe {
195				move_mat(
196					new,
197					old,
198					nrows * size,
199					ncols,
200					(new_row_capacity * size) as isize,
201					(old_row_capacity * size) as isize,
202				)
203			};
204		};
205
206		*self = new;
207		Ok(())
208	}
209
210	fn try_reserve(&mut self, nrows: usize, ncols: usize, new_row_capacity: usize, new_col_capacity: usize) -> Result<(), TryReserveError> {
211		let new_row_capacity = Ord::max(new_row_capacity, nrows);
212		let new_col_capacity = Ord::max(new_col_capacity, ncols);
213
214		if new_row_capacity > self.row_capacity || new_col_capacity > self.col_capacity {
215			self.do_reserve_with(nrows, ncols, new_row_capacity, new_col_capacity)?
216		}
217		Ok(())
218	}
219}
220impl<T> Drop for RawMat<T> {
221	fn drop(&mut self) {
222		let ptr = self.ptr;
223		drop(RawMatUnit {
224			ptr,
225			row_capacity: self.row_capacity,
226			col_capacity: self.col_capacity,
227			layout: self.layout,
228			__marker: PhantomData,
229		});
230	}
231}
232
233/// see [`super::Mat`]
234pub struct Own<T, Rows: Shape = usize, Cols: Shape = usize> {
235	raw: RawMat<T>,
236	nrows: Rows,
237	ncols: Cols,
238}
239
240unsafe impl<T: Send, Rows: Shape, Cols: Shape> Send for Own<T, Rows, Cols> {}
241unsafe impl<T: Sync, Rows: Shape, Cols: Shape> Sync for Own<T, Rows, Cols> {}
242
243pub(crate) struct DropCol<T> {
244	ptr: *mut T,
245	nrows: usize,
246}
247
248pub(crate) struct DropMat<T> {
249	ptr: *mut T,
250	nrows: usize,
251	ncols: usize,
252	byte_col_stride: usize,
253}
254
255impl<T> Drop for DropCol<T> {
256	#[inline]
257	fn drop(&mut self) {
258		if try_const! { core::mem::needs_drop::<T>() } {
259			unsafe {
260				let slice = core::slice::from_raw_parts_mut(self.ptr, self.nrows);
261				core::ptr::drop_in_place(slice);
262			}
263		}
264	}
265}
266
267impl<T> Drop for DropMat<T> {
268	#[inline]
269	fn drop(&mut self) {
270		if try_const! { core::mem::needs_drop::<T>() } {
271			let mut ptr = self.ptr;
272
273			if self.nrows > 0 {
274				DropIter((0..self.ncols).map(|_| {
275					DropCol { ptr, nrows: self.nrows };
276					ptr = ptr.wrapping_byte_add(self.byte_col_stride);
277				}));
278			}
279		}
280	}
281}
282
283impl<T, Rows: Shape, Cols: Shape> Drop for Own<T, Rows, Cols> {
284	#[inline]
285	fn drop(&mut self) {
286		if try_const! { core::mem::needs_drop::<T>() } {
287			if self.nrows.unbound() > 0 && self.ncols.unbound() > 0 {
288				let size = core::mem::size_of::<T>();
289				let ptr = self.raw.ptr.as_ptr();
290				let row_capacity = self.raw.row_capacity;
291				let stride = row_capacity * size;
292
293				drop(DropMat {
294					ptr,
295					nrows: self.nrows.unbound(),
296					ncols: self.ncols.unbound(),
297					byte_col_stride: stride,
298				})
299			}
300		}
301	}
302}
303
304impl<T> Mat<T> {
305	/// returns an empty matrix of dimension `0×0`.
306	#[inline]
307	pub const fn new() -> Self {
308		Self(Own {
309			raw: RawMat {
310				ptr: NonNull::dangling(),
311				row_capacity: 0,
312				col_capacity: 0,
313				layout: StackReq::EMPTY,
314				__marker: PhantomData,
315			},
316			nrows: 0,
317			ncols: 0,
318		})
319	}
320
321	/// reserves the minimum capacity for `row_capacity` rows and `col_capacity`
322	/// columns without reallocating. does nothing if the capacity is already sufficient
323	#[track_caller]
324	pub fn with_capacity(row_capacity: usize, col_capacity: usize) -> Self {
325		let mut me = Self::new();
326		me.reserve(row_capacity, col_capacity);
327		me
328	}
329}
330
331impl<T, Rows: Shape, Cols: Shape> Mat<T, Rows, Cols> {
332	unsafe fn init_with(
333		ptr: *mut T,
334		old_nrows: IdxInc<Rows>,
335		old_ncols: IdxInc<Cols>,
336		new_nrows: IdxInc<Rows>,
337		new_ncols: IdxInc<Cols>,
338		row_capacity: usize,
339		f: &mut impl FnMut(Idx<Rows>, Idx<Cols>) -> T,
340	) {
341		let stride = row_capacity;
342
343		let mut ptr = ptr.wrapping_add(stride * old_ncols.unbound());
344		let mut col_guard = DropMat {
345			ptr,
346			nrows: new_nrows.unbound() - old_nrows.unbound(),
347			ncols: 0,
348			byte_col_stride: stride,
349		};
350
351		for j in Cols::indices(old_ncols, new_ncols) {
352			let old = ptr;
353
354			noalias_annotate::<T, Rows, Cols>(
355				core::slice::from_raw_parts_mut(ptr as *mut _, new_nrows.unbound() - old_nrows.unbound()),
356				new_nrows,
357				old_nrows,
358				f,
359				j,
360			);
361
362			col_guard.ncols += 1;
363			ptr = old.wrapping_add(stride);
364		}
365		core::mem::forget(col_guard);
366	}
367
368	/// returns a new matrix with dimensions `(nrows, ncols)`, filled with the provided function
369	pub fn from_fn(nrows: Rows, ncols: Cols, f: impl FnMut(Idx<Rows>, Idx<Cols>) -> T) -> Self {
370		unsafe {
371			let raw = RawMat::<T>::try_with_capacity(nrows.unbound(), ncols.unbound()).unwrap();
372
373			let ptr = raw.ptr.as_ptr();
374			Self::init_with(ptr, Rows::start(), Cols::start(), nrows.end(), ncols.end(), raw.row_capacity, &mut { f });
375
376			Self(Own { raw, nrows, ncols })
377		}
378	}
379
380	/// returns a new matrix with dimensions `(nrows, ncols)`, filled with zeros
381	#[inline]
382	pub fn zeros(nrows: Rows, ncols: Cols) -> Self
383	where
384		T: ComplexField,
385	{
386		Self::from_fn(nrows, ncols, |_, _| T::zero_impl())
387	}
388
389	/// returns a new matrix with dimensions `(nrows, ncols)`, filled with ones
390	#[inline]
391	pub fn ones(nrows: Rows, ncols: Cols) -> Self
392	where
393		T: ComplexField,
394	{
395		Self::from_fn(nrows, ncols, |_, _| T::one_impl())
396	}
397
398	/// returns a new identity matrix, with ones on the diagonal and zeros everywhere else
399	#[inline]
400	pub fn identity(nrows: Rows, ncols: Cols) -> Self
401	where
402		T: ComplexField,
403	{
404		Self::from_fn(
405			nrows,
406			ncols,
407			|i, j| if i.unbound() == j.unbound() { T::one_impl() } else { T::zero_impl() },
408		)
409	}
410
411	/// returns a new matrix with dimensions `(nrows, ncols)`, filled with `value`
412	#[inline]
413	pub fn full(nrows: Rows, ncols: Cols, value: T) -> Self
414	where
415		T: Clone,
416	{
417		Self::from_fn(nrows, ncols, |_, _| value.clone())
418	}
419
420	/// reserves the minimum capacity for `new_row_capacity` rows and `new_col_capacity`
421	/// columns without reallocating, or returns an error in case of failure. does nothing if the
422	/// capacity is already sufficient
423	pub fn try_reserve(&mut self, new_row_capacity: usize, new_col_capacity: usize) -> Result<(), TryReserveError> {
424		self.0
425			.raw
426			.try_reserve(self.0.nrows.unbound(), self.0.ncols.unbound(), new_row_capacity, new_col_capacity)
427	}
428
429	/// reserves the minimum capacity for `new_row_capacity` rows and `new_col_capacity`
430	/// columns without reallocating. does nothing if the capacity is already sufficient
431	#[track_caller]
432	pub fn reserve(&mut self, new_row_capacity: usize, new_col_capacity: usize) {
433		self.try_reserve(new_row_capacity, new_col_capacity).unwrap()
434	}
435
436	/// resizes the matrix in-place so that the new dimensions are `(new_nrows, new_ncols)`.
437	/// new elements are created with the given function `f`, so that elements at index `(i, j)`
438	/// are created by calling `f(i, j)`.
439	pub fn resize_with(&mut self, new_nrows: Rows, new_ncols: Cols, f: impl FnMut(Idx<Rows>, Idx<Cols>) -> T) {
440		unsafe {
441			let this = &mut *self;
442
443			if new_nrows == this.0.nrows && new_ncols == this.0.ncols {
444				return;
445			}
446
447			this.truncate(new_nrows, new_ncols);
448
449			if new_nrows > this.0.nrows || new_ncols > this.0.ncols {
450				this.reserve(new_nrows.unbound(), new_ncols.unbound());
451			}
452
453			let mut f = f;
454
455			if new_nrows > this.0.nrows {
456				Self::init_with(
457					this.0.raw.ptr.as_ptr(),
458					this.0.nrows.end(),
459					Cols::start(),
460					new_nrows.end(),
461					this.0.ncols.end(),
462					this.0.raw.row_capacity,
463					&mut f,
464				);
465				this.0.nrows = new_nrows;
466			}
467
468			if new_ncols > this.0.ncols {
469				Self::init_with(
470					this.0.raw.ptr.as_ptr(),
471					Rows::start(),
472					this.0.ncols.end(),
473					new_nrows.end(),
474					new_ncols.end(),
475					this.0.raw.row_capacity,
476					&mut f,
477				);
478				this.0.ncols = new_ncols;
479			}
480		};
481	}
482
483	/// truncates the matrix so that its new dimensions are `new_nrows` and `new_ncols`.  
484	/// both of the new dimensions must be smaller than or equal to the current dimensions
485	///
486	/// # panics
487	/// the function panics if any of the following conditions are violated:
488	/// - `new_nrows > self.nrows()`
489	/// - `new_ncols > self.ncols()`
490	pub fn truncate(&mut self, new_nrows: Rows, new_ncols: Cols) {
491		if new_ncols < self.0.ncols {
492			let stride = self.0.raw.row_capacity;
493
494			drop(DropMat {
495				ptr: self.0.raw.ptr.as_ptr().wrapping_add(stride * new_ncols.unbound()),
496				nrows: self.0.nrows.unbound(),
497				ncols: self.0.ncols.unbound() - new_ncols.unbound(),
498				byte_col_stride: stride,
499			});
500			self.0.ncols = new_ncols;
501		}
502		if new_nrows < self.0.nrows {
503			let size = core::mem::size_of::<T>();
504			let stride = size * self.0.raw.row_capacity;
505
506			drop(DropMat {
507				ptr: self.0.raw.ptr.as_ptr().wrapping_add(new_nrows.unbound()),
508				nrows: self.0.nrows.unbound() - new_nrows.unbound(),
509				ncols: self.0.ncols.unbound(),
510				byte_col_stride: stride,
511			});
512			self.0.nrows = new_nrows;
513		}
514	}
515
516	/// see [`MatRef::as_shape`]
517	pub fn into_shape<V: Shape, H: Shape>(self, nrows: V, ncols: H) -> Mat<T, V, H> {
518		let this = core::mem::ManuallyDrop::new(self);
519
520		Mat {
521			0: Own {
522				raw: RawMat {
523					ptr: this.0.raw.ptr,
524					row_capacity: this.0.raw.row_capacity,
525					col_capacity: this.0.raw.col_capacity,
526					layout: this.0.raw.layout,
527					__marker: PhantomData,
528				},
529				nrows,
530				ncols,
531			},
532		}
533	}
534
535	/// set the dimensions of the matrix.
536	///
537	/// # safety
538	/// the behavior is undefined if any of the following conditions are violated:
539	/// - `nrows < self.row_capacity()`
540	/// - `ncols < self.col_capacity()`
541	/// - the elements that were previously out of bounds but are now in bounds must be
542	/// initialized
543	pub unsafe fn set_dims(&mut self, nrows: Rows, ncols: Cols) {
544		self.0.nrows = nrows;
545		self.0.ncols = ncols;
546	}
547
548	/// returns a reference to a slice over the column at the given index
549	pub fn col_as_slice(&self, j: Idx<Cols>) -> &[T] {
550		self.col(j).try_as_col_major().unwrap().as_slice()
551	}
552
553	/// returns a reference to a slice over the column at the given index
554	pub fn col_as_slice_mut(&mut self, j: Idx<Cols>) -> &mut [T] {
555		self.col_mut(j).try_as_col_major_mut().unwrap().as_slice_mut()
556	}
557}
558
559impl<T, Rows: Shape, Cols: Shape> Mat<T, Rows, Cols> {
560	/// returns the number of rows of the matrix
561	#[inline]
562	pub fn nrows(&self) -> Rows {
563		self.0.nrows
564	}
565
566	/// returns the number of columns of the matrix
567	#[inline]
568	pub fn ncols(&self) -> Cols {
569		self.0.ncols
570	}
571}
572
573impl<T: Clone, Rows: Shape, Cols: Shape> Clone for Own<T, Rows, Cols> {
574	#[inline]
575	fn clone(&self) -> Self {
576		let __self__ = Mat::from_inner_ref(self);
577		with_dim!(M, __self__.nrows().unbound());
578		with_dim!(N, __self__.ncols().unbound());
579		let this = __self__.as_ref().as_shape(M, N);
580		Mat::from_fn(this.nrows(), this.ncols(), |i, j| this.at(i, j).clone())
581			.into_shape(__self__.nrows(), __self__.ncols())
582			.0
583	}
584}
585
586impl<T: core::fmt::Debug, Rows: Shape, Cols: Shape> core::fmt::Debug for Own<T, Rows, Cols> {
587	fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
588		self.rb().fmt(f)
589	}
590}
591
592impl<T, Rows: Shape, Cols: Shape> Mat<T, Rows, Cols> {
593	/// returns a pointer to the matrix data
594	#[inline(always)]
595	pub fn as_ptr(&self) -> *const T {
596		self.as_ref().as_ptr()
597	}
598
599	/// returns the number of rows and columns of the matrix
600	#[inline(always)]
601	pub fn shape(&self) -> (Rows, Cols) {
602		(self.nrows(), self.ncols())
603	}
604
605	/// returns the row stride of the matrix, specified in number of elements, not in bytes
606	#[inline(always)]
607	pub fn row_stride(&self) -> isize {
608		1
609	}
610
611	/// returns the column stride of the matrix, specified in number of elements, not in bytes
612	#[inline(always)]
613	pub fn col_stride(&self) -> isize {
614		self.0.raw.row_capacity as isize
615	}
616
617	/// returns a raw pointer to the element at the given index
618	#[inline(always)]
619	pub fn ptr_at(&self, row: IdxInc<Rows>, col: IdxInc<Cols>) -> *const T {
620		self.as_ref().ptr_at(row, col)
621	}
622
623	/// returns a raw pointer to the element at the given index, assuming the provided index
624	/// is within the matrix bounds
625	///
626	/// # safety
627	/// the behavior is undefined if any of the following conditions are violated:
628	/// * `row < self.nrows()`
629	/// * `col < self.ncols()`
630	#[inline(always)]
631	#[track_caller]
632	pub unsafe fn ptr_inbounds_at(&self, row: Idx<Rows>, col: Idx<Cols>) -> *const T {
633		self.as_ref().ptr_inbounds_at(row, col)
634	}
635
636	#[inline]
637	#[track_caller]
638	/// see [`MatRef::split_at`]
639	pub fn split_at(
640		&self,
641		row: IdxInc<Rows>,
642		col: IdxInc<Cols>,
643	) -> (
644		MatRef<'_, T, usize, usize>,
645		MatRef<'_, T, usize, usize>,
646		MatRef<'_, T, usize, usize>,
647		MatRef<'_, T, usize, usize>,
648	) {
649		self.as_ref().split_at(row, col)
650	}
651
652	#[inline]
653	#[track_caller]
654	/// see [`MatRef::split_at_row`]
655	pub fn split_at_row(&self, row: IdxInc<Rows>) -> (MatRef<'_, T, usize, Cols>, MatRef<'_, T, usize, Cols>) {
656		self.as_ref().split_at_row(row)
657	}
658
659	#[inline]
660	#[track_caller]
661	/// see [`MatRef::split_at_col`]
662	pub fn split_at_col(&self, col: IdxInc<Cols>) -> (MatRef<'_, T, Rows, usize>, MatRef<'_, T, Rows, usize>) {
663		self.as_ref().split_at_col(col)
664	}
665
666	#[inline(always)]
667	/// see [`MatRef::transpose`]
668	pub fn transpose(&self) -> MatRef<'_, T, Cols, Rows> {
669		self.as_ref().transpose()
670	}
671
672	#[inline(always)]
673	/// see [`MatRef::conjugate`]
674	pub fn conjugate(&self) -> MatRef<'_, T::Conj, Rows, Cols>
675	where
676		T: Conjugate,
677	{
678		self.as_ref().conjugate()
679	}
680
681	#[inline(always)]
682	/// see [`MatRef::canonical`]
683	pub fn canonical(&self) -> MatRef<'_, T::Canonical, Rows, Cols>
684	where
685		T: Conjugate,
686	{
687		self.as_ref().canonical()
688	}
689
690	#[inline(always)]
691	/// see [`MatRef::adjoint`]
692	pub fn adjoint(&self) -> MatRef<'_, T::Conj, Cols, Rows>
693	where
694		T: Conjugate,
695	{
696		self.as_ref().adjoint()
697	}
698
699	#[inline]
700	/// see [`MatRef::reverse_rows`]
701	pub fn reverse_rows(&self) -> MatRef<'_, T, Rows, Cols> {
702		self.as_ref().reverse_rows()
703	}
704
705	#[inline]
706	/// see [`MatRef::reverse_cols`]
707	pub fn reverse_cols(&self) -> MatRef<'_, T, Rows, Cols> {
708		self.as_ref().reverse_cols()
709	}
710
711	#[inline]
712	/// see [`MatRef::reverse_rows_and_cols`]
713	pub fn reverse_rows_and_cols(&self) -> MatRef<'_, T, Rows, Cols> {
714		self.as_ref().reverse_rows_and_cols()
715	}
716
717	#[inline]
718	/// see [`MatRef::submatrix`]
719	pub fn submatrix<V: Shape, H: Shape>(&self, row_start: IdxInc<Rows>, col_start: IdxInc<Cols>, nrows: V, ncols: H) -> MatRef<'_, T, V, H> {
720		self.as_ref().submatrix(row_start, col_start, nrows, ncols)
721	}
722
723	#[inline]
724	/// see [`MatRef::subrows`]
725	pub fn subrows<V: Shape>(&self, row_start: IdxInc<Rows>, nrows: V) -> MatRef<'_, T, V, Cols> {
726		self.as_ref().subrows(row_start, nrows)
727	}
728
729	#[inline]
730	/// see [`MatRef::subcols`]
731	pub fn subcols<H: Shape>(&self, col_start: IdxInc<Cols>, ncols: H) -> MatRef<'_, T, Rows, H> {
732		self.as_ref().subcols(col_start, ncols)
733	}
734
735	#[inline]
736	/// see [`MatRef::as_shape`]
737	pub fn as_shape<V: Shape, H: Shape>(&self, nrows: V, ncols: H) -> MatRef<'_, T, V, H> {
738		self.as_ref().as_shape(nrows, ncols)
739	}
740
741	#[inline]
742	/// see [`MatRef::as_row_shape`]
743	pub fn as_row_shape<V: Shape>(&self, nrows: V) -> MatRef<'_, T, V, Cols> {
744		self.as_ref().as_row_shape(nrows)
745	}
746
747	#[inline]
748	/// see [`MatRef::as_col_shape`]
749	pub fn as_col_shape<H: Shape>(&self, ncols: H) -> MatRef<'_, T, Rows, H> {
750		self.as_ref().as_col_shape(ncols)
751	}
752
753	#[inline]
754	/// see [`MatRef::as_dyn_stride`]
755	pub fn as_dyn_stride(&self) -> MatRef<'_, T, Rows, Cols, isize, isize> {
756		self.as_ref().as_dyn_stride()
757	}
758
759	#[inline]
760	/// see [`MatRef::as_dyn`]
761	pub fn as_dyn(&self) -> MatRef<'_, T, usize, usize> {
762		self.as_ref().as_dyn()
763	}
764
765	#[inline]
766	/// see [`MatRef::as_dyn_rows`]
767	pub fn as_dyn_rows(&self) -> MatRef<'_, T, usize, Cols> {
768		self.as_ref().as_dyn_rows()
769	}
770
771	#[inline]
772	/// see [`MatRef::as_dyn_cols`]
773	pub fn as_dyn_cols(&self) -> MatRef<'_, T, Rows, usize> {
774		self.as_ref().as_dyn_cols()
775	}
776
777	#[inline]
778	/// see [`MatRef::row`]
779	pub fn row(&self, i: Idx<Rows>) -> RowRef<'_, T, Cols> {
780		self.as_ref().row(i)
781	}
782
783	#[inline]
784	#[track_caller]
785	/// see [`MatRef::col`]
786	pub fn col(&self, j: Idx<Cols>) -> ColRef<'_, T, Rows> {
787		self.as_ref().col(j)
788	}
789
790	#[inline]
791	/// see [`MatRef::col_iter`]
792	pub fn col_iter(&self) -> impl '_ + ExactSizeIterator + DoubleEndedIterator<Item = ColRef<'_, T, Rows>> {
793		self.as_ref().col_iter()
794	}
795
796	#[inline]
797	/// see [`MatRef::row_iter`]
798	pub fn row_iter(&self) -> impl '_ + ExactSizeIterator + DoubleEndedIterator<Item = RowRef<'_, T, Cols>> {
799		self.as_ref().row_iter()
800	}
801
802	#[inline]
803	#[track_caller]
804	#[cfg(feature = "rayon")]
805	/// see [`MatRef::par_col_iter`]
806	pub fn par_col_iter(&self) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = ColRef<'_, T, Rows>>
807	where
808		T: Sync,
809	{
810		self.as_ref().par_col_iter()
811	}
812
813	#[inline]
814	#[track_caller]
815	#[cfg(feature = "rayon")]
816	/// see [`MatRef::par_row_iter`]
817	pub fn par_row_iter(&self) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = RowRef<'_, T, Cols>>
818	where
819		T: Sync,
820	{
821		self.as_ref().par_row_iter()
822	}
823
824	#[inline]
825	#[track_caller]
826	#[cfg(feature = "rayon")]
827	/// see [`MatRef::par_col_chunks`]
828	pub fn par_col_chunks(&self, chunk_size: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatRef<'_, T, Rows, usize>>
829	where
830		T: Sync,
831	{
832		self.as_ref().par_col_chunks(chunk_size)
833	}
834
835	#[inline]
836	#[track_caller]
837	#[cfg(feature = "rayon")]
838	/// see [`MatRef::par_col_partition`]
839	pub fn par_col_partition(&self, count: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatRef<'_, T, Rows, usize>>
840	where
841		T: Sync,
842	{
843		self.as_ref().par_col_partition(count)
844	}
845
846	#[inline]
847	#[track_caller]
848	#[cfg(feature = "rayon")]
849	/// see [`MatRef::par_row_chunks`]
850	pub fn par_row_chunks(&self, chunk_size: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatRef<'_, T, usize, Cols>>
851	where
852		T: Sync,
853	{
854		self.as_ref().par_row_chunks(chunk_size)
855	}
856
857	#[inline]
858	#[track_caller]
859	#[cfg(feature = "rayon")]
860	/// see [`MatRef::par_row_partition`]
861	pub fn par_row_partition(&self, count: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatRef<'_, T, usize, Cols>>
862	where
863		T: Sync,
864	{
865		self.as_ref().par_row_partition(count)
866	}
867
868	#[inline]
869	/// see [`MatRef::try_as_col_major`]
870	pub fn try_as_col_major(&self) -> Option<MatRef<'_, T, Rows, Cols, ContiguousFwd>> {
871		self.as_ref().try_as_col_major()
872	}
873
874	#[inline]
875	/// see [`MatRef::try_as_row_major`]
876	pub fn try_as_row_major(&self) -> Option<MatRef<'_, T, Rows, Cols, isize, ContiguousFwd>> {
877		self.as_ref().try_as_row_major()
878	}
879
880	#[track_caller]
881	#[inline]
882	/// see [`MatRef::get`]
883	pub fn get<RowRange, ColRange>(&self, row: RowRange, col: ColRange) -> <MatRef<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::Target
884	where
885		for<'a> MatRef<'a, T, Rows, Cols>: MatIndex<RowRange, ColRange>,
886	{
887		<MatRef<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::get(self.as_ref(), row, col)
888	}
889
890	#[track_caller]
891	#[inline]
892	/// see [`MatRef::get_unchecked`]
893	pub unsafe fn get_unchecked<RowRange, ColRange>(
894		&self,
895		row: RowRange,
896		col: ColRange,
897	) -> <MatRef<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::Target
898	where
899		for<'a> MatRef<'a, T, Rows, Cols>: MatIndex<RowRange, ColRange>,
900	{
901		unsafe { <MatRef<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::get_unchecked(self.as_ref(), row, col) }
902	}
903
904	#[track_caller]
905	#[inline]
906	/// see [`MatMut::get_mut`]
907	pub fn get_mut<RowRange, ColRange>(&mut self, row: RowRange, col: ColRange) -> <MatMut<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::Target
908	where
909		for<'a> MatMut<'a, T, Rows, Cols>: MatIndex<RowRange, ColRange>,
910	{
911		<MatMut<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::get(self.as_mut(), row, col)
912	}
913
914	#[track_caller]
915	#[inline]
916	/// see [`MatMut::get_mut_unchecked`]
917	pub unsafe fn get_mut_unchecked<RowRange, ColRange>(
918		&mut self,
919		row: RowRange,
920		col: ColRange,
921	) -> <MatMut<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::Target
922	where
923		for<'a> MatMut<'a, T, Rows, Cols>: MatIndex<RowRange, ColRange>,
924	{
925		unsafe { <MatMut<'_, T, Rows, Cols> as MatIndex<RowRange, ColRange>>::get_unchecked(self.as_mut(), row, col) }
926	}
927}
928
929impl<T, Rows: Shape, Cols: Shape> Mat<T, Rows, Cols> {
930	#[inline(always)]
931	/// returns a pointer to the matrix data
932	pub fn as_ptr_mut(&mut self) -> *mut T {
933		self.as_mut().as_ptr_mut()
934	}
935
936	#[inline(always)]
937	/// returns a raw pointer to the element at the given index
938	pub fn ptr_at_mut(&mut self, row: IdxInc<Rows>, col: IdxInc<Cols>) -> *mut T {
939		self.as_mut().ptr_at_mut(row, col)
940	}
941
942	#[inline(always)]
943	#[track_caller]
944	/// returns a raw pointer to the element at the given index, assuming the provided index
945	/// is within the matrix bounds
946	///
947	/// # safety
948	/// the behavior is undefined if any of the following conditions are violated:
949	/// * `row < self.nrows()`
950	/// * `col < self.ncols()`
951	pub unsafe fn ptr_inbounds_at_mut(&mut self, row: Idx<Rows>, col: Idx<Cols>) -> *mut T {
952		self.as_mut().ptr_inbounds_at_mut(row, col)
953	}
954
955	#[inline]
956	#[track_caller]
957	/// see [`MatMut::split_at_mut`]
958	pub fn split_at_mut(
959		&mut self,
960		row: IdxInc<Rows>,
961		col: IdxInc<Cols>,
962	) -> (
963		MatMut<'_, T, usize, usize>,
964		MatMut<'_, T, usize, usize>,
965		MatMut<'_, T, usize, usize>,
966		MatMut<'_, T, usize, usize>,
967	) {
968		self.as_mut().split_at_mut(row, col)
969	}
970
971	#[inline]
972	#[track_caller]
973	/// see [`MatMut::split_at_row_mut`]
974	pub fn split_at_row_mut(&mut self, row: IdxInc<Rows>) -> (MatMut<'_, T, usize, Cols>, MatMut<'_, T, usize, Cols>) {
975		self.as_mut().split_at_row_mut(row)
976	}
977
978	#[inline]
979	#[track_caller]
980	/// see [`MatMut::split_at_col_mut`]
981	pub fn split_at_col_mut(&mut self, col: IdxInc<Cols>) -> (MatMut<'_, T, Rows, usize>, MatMut<'_, T, Rows, usize>) {
982		self.as_mut().split_at_col_mut(col)
983	}
984
985	#[inline(always)]
986	/// see [`MatMut::transpose_mut`]
987	pub fn transpose_mut(&mut self) -> MatMut<'_, T, Cols, Rows> {
988		self.as_mut().transpose_mut()
989	}
990
991	#[inline(always)]
992	/// see [`MatMut::conjugate_mut`]
993	pub fn conjugate_mut(&mut self) -> MatMut<'_, T::Conj, Rows, Cols>
994	where
995		T: Conjugate,
996	{
997		self.as_mut().conjugate_mut()
998	}
999
1000	#[inline(always)]
1001	/// see [`MatMut::canonical_mut`]
1002	pub fn canonical_mut(&mut self) -> MatMut<'_, T::Canonical, Rows, Cols>
1003	where
1004		T: Conjugate,
1005	{
1006		self.as_mut().canonical_mut()
1007	}
1008
1009	#[inline(always)]
1010	/// see [`MatMut::adjoint_mut`]
1011	pub fn adjoint_mut(&mut self) -> MatMut<'_, T::Conj, Cols, Rows>
1012	where
1013		T: Conjugate,
1014	{
1015		self.as_mut().adjoint_mut()
1016	}
1017
1018	#[inline]
1019	/// see [`MatMut::reverse_rows_mut`]
1020	pub fn reverse_rows_mut(&mut self) -> MatMut<'_, T, Rows, Cols> {
1021		self.as_mut().reverse_rows_mut()
1022	}
1023
1024	#[inline]
1025	/// see [`MatMut::reverse_cols_mut`]
1026	pub fn reverse_cols_mut(&mut self) -> MatMut<'_, T, Rows, Cols> {
1027		self.as_mut().reverse_cols_mut()
1028	}
1029
1030	#[inline]
1031	/// see [`MatMut::reverse_rows_and_cols_mut`]
1032	pub fn reverse_rows_and_cols_mut(&mut self) -> MatMut<'_, T, Rows, Cols> {
1033		self.as_mut().reverse_rows_and_cols_mut()
1034	}
1035
1036	#[inline]
1037	/// see [`MatMut::submatrix_mut`]
1038	pub fn submatrix_mut<V: Shape, H: Shape>(&mut self, row_start: IdxInc<Rows>, col_start: IdxInc<Cols>, nrows: V, ncols: H) -> MatMut<'_, T, V, H> {
1039		self.as_mut().submatrix_mut(row_start, col_start, nrows, ncols)
1040	}
1041
1042	#[inline]
1043	/// see [`MatMut::subrows_mut`]
1044	pub fn subrows_mut<V: Shape>(&mut self, row_start: IdxInc<Rows>, nrows: V) -> MatMut<'_, T, V, Cols> {
1045		self.as_mut().subrows_mut(row_start, nrows)
1046	}
1047
1048	#[inline]
1049	/// see [`MatMut::subcols_mut`]
1050	pub fn subcols_mut<H: Shape>(&mut self, col_start: IdxInc<Cols>, ncols: H) -> MatMut<'_, T, Rows, H> {
1051		self.as_mut().subcols_mut(col_start, ncols)
1052	}
1053
1054	#[inline]
1055	#[track_caller]
1056	/// see [`MatMut::as_shape_mut`]
1057	pub fn as_shape_mut<V: Shape, H: Shape>(&mut self, nrows: V, ncols: H) -> MatMut<'_, T, V, H> {
1058		self.as_mut().as_shape_mut(nrows, ncols)
1059	}
1060
1061	#[inline]
1062	/// see [`MatMut::as_row_shape_mut`]
1063	pub fn as_row_shape_mut<V: Shape>(&mut self, nrows: V) -> MatMut<'_, T, V, Cols> {
1064		self.as_mut().as_row_shape_mut(nrows)
1065	}
1066
1067	#[inline]
1068	/// see [`MatMut::as_col_shape_mut`]
1069	pub fn as_col_shape_mut<H: Shape>(&mut self, ncols: H) -> MatMut<'_, T, Rows, H> {
1070		self.as_mut().as_col_shape_mut(ncols)
1071	}
1072
1073	#[inline]
1074	/// see [`MatMut::as_dyn_stride_mut`]
1075	pub fn as_dyn_stride_mut(&mut self) -> MatMut<'_, T, Rows, Cols, isize, isize> {
1076		self.as_mut().as_dyn_stride_mut()
1077	}
1078
1079	#[inline]
1080	/// see [`MatMut::as_dyn_mut`]
1081	pub fn as_dyn_mut(&mut self) -> MatMut<'_, T, usize, usize> {
1082		self.as_mut().as_dyn_mut()
1083	}
1084
1085	#[inline]
1086	/// see [`MatMut::as_dyn_rows_mut`]
1087	pub fn as_dyn_rows_mut(&mut self) -> MatMut<'_, T, usize, Cols> {
1088		self.as_mut().as_dyn_rows_mut()
1089	}
1090
1091	#[inline]
1092	/// see [`MatMut::as_dyn_cols_mut`]
1093	pub fn as_dyn_cols_mut(&mut self) -> MatMut<'_, T, Rows, usize> {
1094		self.as_mut().as_dyn_cols_mut()
1095	}
1096
1097	#[inline]
1098	/// see [`MatMut::row_mut`]
1099	pub fn row_mut(&mut self, i: Idx<Rows>) -> RowMut<'_, T, Cols> {
1100		self.as_mut().row_mut(i)
1101	}
1102
1103	#[inline]
1104	/// see [`MatMut::col_mut`]
1105	pub fn col_mut(&mut self, j: Idx<Cols>) -> ColMut<'_, T, Rows> {
1106		self.as_mut().col_mut(j)
1107	}
1108
1109	#[inline]
1110	/// see [`MatMut::col_iter_mut`]
1111	pub fn col_iter_mut(&mut self) -> impl '_ + ExactSizeIterator + DoubleEndedIterator<Item = ColMut<'_, T, Rows>> {
1112		self.as_mut().col_iter_mut()
1113	}
1114
1115	#[inline]
1116	/// see [`MatMut::row_iter_mut`]
1117	pub fn row_iter_mut(&mut self) -> impl '_ + ExactSizeIterator + DoubleEndedIterator<Item = RowMut<'_, T, Cols>> {
1118		self.as_mut().row_iter_mut()
1119	}
1120
1121	#[inline]
1122	#[track_caller]
1123	#[cfg(feature = "rayon")]
1124	/// see [`MatMut::par_col_iter_mut`]
1125	pub fn par_col_iter_mut(&mut self) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = ColMut<'_, T, Rows>>
1126	where
1127		T: Send,
1128	{
1129		self.as_mut().par_col_iter_mut()
1130	}
1131
1132	#[inline]
1133	#[track_caller]
1134	#[cfg(feature = "rayon")]
1135	/// see [`MatMut::par_row_iter_mut`]
1136	pub fn par_row_iter_mut(&mut self) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = RowMut<'_, T, Cols>>
1137	where
1138		T: Send,
1139	{
1140		self.as_mut().par_row_iter_mut()
1141	}
1142
1143	#[inline]
1144	#[track_caller]
1145	#[cfg(feature = "rayon")]
1146	/// see [`MatMut::par_col_chunks_mut`]
1147	pub fn par_col_chunks_mut(&mut self, chunk_size: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatMut<'_, T, Rows, usize>>
1148	where
1149		T: Send,
1150	{
1151		self.as_mut().par_col_chunks_mut(chunk_size)
1152	}
1153
1154	#[inline]
1155	#[track_caller]
1156	#[cfg(feature = "rayon")]
1157	/// see [`MatMut::par_col_partition_mut`]
1158	pub fn par_col_partition_mut(&mut self, count: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatMut<'_, T, Rows, usize>>
1159	where
1160		T: Send,
1161	{
1162		self.as_mut().par_col_partition_mut(count)
1163	}
1164
1165	#[inline]
1166	#[track_caller]
1167	#[cfg(feature = "rayon")]
1168	/// see [`MatMut::par_row_chunks_mut`]
1169	pub fn par_row_chunks_mut(&mut self, chunk_size: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatMut<'_, T, usize, Cols>>
1170	where
1171		T: Send,
1172	{
1173		self.as_mut().par_row_chunks_mut(chunk_size)
1174	}
1175
1176	#[inline]
1177	#[track_caller]
1178	#[cfg(feature = "rayon")]
1179	/// see [`MatMut::par_row_partition_mut`]
1180	pub fn par_row_partition_mut(&mut self, count: usize) -> impl '_ + rayon::iter::IndexedParallelIterator<Item = MatMut<'_, T, usize, Cols>>
1181	where
1182		T: Send,
1183	{
1184		self.as_mut().par_row_partition_mut(count)
1185	}
1186
1187	#[inline]
1188	/// see [`MatMut::split_first_row_mut`]
1189	pub fn split_first_row_mut(&mut self) -> Option<(RowMut<'_, T, Cols>, MatMut<'_, T, usize, Cols>)> {
1190		self.as_mut().split_first_row_mut()
1191	}
1192
1193	#[inline]
1194	/// see [`MatMut::try_as_col_major_mut`]
1195	pub fn try_as_col_major_mut(&mut self) -> Option<MatMut<'_, T, Rows, Cols, ContiguousFwd>> {
1196		self.as_mut().try_as_col_major_mut()
1197	}
1198
1199	#[inline]
1200	/// see [`MatMut::try_as_row_major_mut`]
1201	pub fn try_as_row_major_mut(&mut self) -> Option<MatMut<'_, T, Rows, Cols, isize, ContiguousFwd>> {
1202		self.as_mut().try_as_row_major_mut()
1203	}
1204
1205	#[inline]
1206	#[track_caller]
1207	/// see [`MatMut::two_cols_mut`]
1208	pub fn two_cols_mut(&mut self, i0: Idx<Cols>, i1: Idx<Cols>) -> (ColMut<'_, T, Rows>, ColMut<'_, T, Rows>) {
1209		self.as_mut().two_cols_mut(i0, i1)
1210	}
1211
1212	#[inline]
1213	#[track_caller]
1214	/// see [`MatMut::two_rows_mut`]
1215	pub fn two_rows_mut(&mut self, i0: Idx<Rows>, i1: Idx<Rows>) -> (RowMut<'_, T, Cols>, RowMut<'_, T, Cols>) {
1216		self.as_mut().two_rows_mut(i0, i1)
1217	}
1218}
1219
1220impl<T, Dim: Shape> Mat<T, Dim, Dim> {
1221	#[inline]
1222	/// see [`MatRef::diagonal`]
1223	pub fn diagonal(&self) -> DiagRef<'_, T, Dim, isize> {
1224		self.as_ref().diagonal()
1225	}
1226
1227	#[inline]
1228	/// see [`MatMut::diagonal_mut`]
1229	pub fn diagonal_mut(&mut self) -> DiagMut<'_, T, Dim, isize> {
1230		self.as_mut().diagonal_mut()
1231	}
1232}
1233
1234impl<'short, T, Rows: Shape, Cols: Shape> Reborrow<'short> for Own<T, Rows, Cols> {
1235	type Target = Ref<'short, T, Rows, Cols>;
1236
1237	#[inline]
1238	fn rb(&'short self) -> Self::Target {
1239		Ref {
1240			imp: MatView {
1241				ptr: self.raw.ptr,
1242				nrows: self.nrows,
1243				ncols: self.ncols,
1244				row_stride: 1,
1245				col_stride: self.raw.row_capacity as isize,
1246			},
1247			__marker: PhantomData,
1248		}
1249	}
1250}
1251impl<'short, T, Rows: Shape, Cols: Shape> ReborrowMut<'short> for Own<T, Rows, Cols> {
1252	type Target = Mut<'short, T, Rows, Cols>;
1253
1254	#[inline]
1255	fn rb_mut(&'short mut self) -> Self::Target {
1256		Mut {
1257			imp: MatView {
1258				ptr: self.raw.ptr,
1259				nrows: self.nrows,
1260				ncols: self.ncols,
1261				row_stride: 1,
1262				col_stride: self.raw.row_capacity as isize,
1263			},
1264			__marker: PhantomData,
1265		}
1266	}
1267}
1268
1269impl<T, Cols: Shape> Mat<T, usize, Cols> {
1270	/// inserts a row at the end of the matrix
1271	/// # panics
1272	/// The function panics if the number of columns in the row does not match the number of columns
1273	/// in the matrix
1274	pub fn push_row(&mut self, row: RowRef<'_, T, Cols>)
1275	where
1276		T: Clone,
1277	{
1278		self::assert!(
1279			self.ncols() == row.ncols(),
1280			"row ncols ({:?}) must match matrix ncols ({:?})",
1281			row.ncols(),
1282			self.ncols()
1283		);
1284
1285		self.resize_with(self.nrows() + 1, self.ncols(), |_, j| row[j].clone());
1286	}
1287}
1288
1289impl<T, Rows: Shape> Mat<T, Rows, usize> {
1290	/// inserts a col at the end of the matrix
1291	/// # panics
1292	/// The function panics if the number of rows in the col does not match the number of rows in
1293	/// the matrix
1294	pub fn push_col(&mut self, col: ColRef<'_, T, Rows>)
1295	where
1296		T: Clone,
1297	{
1298		self::assert!(
1299			self.nrows() == col.nrows(),
1300			"col nrows ({:?}) must match matrix nrows ({:?})",
1301			col.nrows(),
1302			self.nrows()
1303		);
1304
1305		self.resize_with(self.nrows(), self.ncols() + 1, |i, _| col[i].clone());
1306	}
1307}
1308
1309impl<T, Rows: Shape, Cols: Shape> Mat<T, Rows, Cols>
1310where
1311	T: RealField,
1312{
1313	/// see [MatRef::min]
1314	pub fn min(self) -> Option<T> {
1315		MatRef::internal_min(self.as_dyn())
1316	}
1317
1318	/// see [MatRef::min]
1319	pub fn max(self) -> Option<T> {
1320		MatRef::internal_max(self.as_dyn())
1321	}
1322}
1323
1324#[cfg(test)]
1325mod tests {
1326	use crate::{assert, mat};
1327
1328	#[test]
1329	fn test_resize() {
1330		// Create a matrix
1331		let mut m = mat![
1332			[1.0, 2.0, 3.0],
1333			[4.0, 5.0, 6.0],
1334			[7.0, 8.0, 9.0],
1335			[10.0, 11.0, 12.0], //
1336		];
1337
1338		m.resize_with(m.nrows() + 1, m.ncols(), |_, _| 99.9);
1339
1340		let target = mat![
1341			[1.0, 2.0, 3.0],
1342			[4.0, 5.0, 6.0],
1343			[7.0, 8.0, 9.0],
1344			[10.0, 11.0, 12.0],
1345			[99.9, 99.9, 99.9], //
1346		];
1347
1348		assert!(m == target);
1349	}
1350
1351	#[test]
1352	fn test_resize_5() {
1353		// Create a matrix
1354		let mut m = mat![
1355			[1.0, 2.0, 3.0],
1356			[4.0, 5.0, 6.0],
1357			[7.0, 8.0, 9.0],
1358			[10.0, 11.0, 12.0], //
1359		];
1360
1361		m.resize_with(m.nrows() + 5, m.ncols(), |_, _| 99.9);
1362
1363		let target = mat![
1364			[1.0, 2.0, 3.0],
1365			[4.0, 5.0, 6.0],
1366			[7.0, 8.0, 9.0],
1367			[10.0, 11.0, 12.0],
1368			[99.9, 99.9, 99.9], //
1369			[99.9, 99.9, 99.9], //
1370			[99.9, 99.9, 99.9], //
1371			[99.9, 99.9, 99.9], //
1372			[99.9, 99.9, 99.9], //
1373		];
1374
1375		assert!(m == target);
1376	}
1377
1378	#[test]
1379	fn test_resize_5_1() {
1380		// Create a matrix
1381		let mut m = mat![
1382			[1.0, 2.0, 3.0],
1383			[4.0, 5.0, 6.0],
1384			[7.0, 8.0, 9.0],
1385			[10.0, 11.0, 12.0], //
1386		];
1387
1388		m.resize_with(m.nrows() + 5, m.ncols() + 1, |_, _| 99.9);
1389
1390		let target = mat![
1391			[1.0, 2.0, 3.0, 99.9],
1392			[4.0, 5.0, 6.0, 99.9],
1393			[7.0, 8.0, 9.0, 99.9],
1394			[10.0, 11.0, 12.0, 99.9],
1395			[99.9, 99.9, 99.9, 99.9], //
1396			[99.9, 99.9, 99.9, 99.9], //
1397			[99.9, 99.9, 99.9, 99.9], //
1398			[99.9, 99.9, 99.9, 99.9], //
1399			[99.9, 99.9, 99.9, 99.9], //
1400		];
1401
1402		assert!(m == target);
1403	}
1404
1405	#[test]
1406	fn test_push_row() {
1407		let mut m = mat![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],];
1408
1409		let row = row![10.0, 11.0, 12.0];
1410
1411		m.push_row(row.as_ref());
1412
1413		let target = mat![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0], [10.0, 11.0, 12.0],];
1414
1415		assert!(m == target);
1416	}
1417
1418	#[test]
1419	#[should_panic]
1420	fn test_push_row_panic() {
1421		let mut m = mat![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0],];
1422
1423		// row has one too many columns
1424		let row = row![10.0, 11.0, 12.0, 13.0];
1425
1426		m.push_row(row.as_ref());
1427	}
1428
1429	#[test]
1430	fn test_push_col() {
1431		let mut m = mat![[1.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 2.0, 3.0],];
1432
1433		let col = col![4.0, 4.0, 4.0];
1434
1435		m.push_col(col.as_ref());
1436
1437		let target = mat![[1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0],];
1438
1439		assert!(m == target);
1440	}
1441
1442	#[test]
1443	#[should_panic]
1444	fn test_push_col_panic() {
1445		let mut m = mat![[1.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 2.0, 3.0],];
1446
1447		let col = col![4.0, 4.0, 4.0, 4.0];
1448
1449		m.push_col(col.as_ref());
1450	}
1451
1452	#[test]
1453	fn test_min() {
1454		use crate::Mat;
1455		let m = mat![
1456			[1.0, 5.0, 3.0],
1457			[4.0, 2.0, 9.0],
1458			[7.0, 8.0, 6.0], //
1459		];
1460
1461		assert_eq!(m.min(), Some(1.0));
1462
1463		let empty: Mat<f64> = Mat::new();
1464		assert_eq!(empty.min(), None);
1465	}
1466
1467	#[test]
1468	fn test_max() {
1469		use crate::Mat;
1470		let m = mat![
1471			[1.0, 5.0, 3.0],
1472			[4.0, 2.0, 9.0],
1473			[7.0, 8.0, 6.0], //
1474		];
1475
1476		assert_eq!(m.max(), Some(9.0));
1477
1478		let empty: Mat<f64> = Mat::new();
1479		assert_eq!(empty.max(), None);
1480	}
1481}