#[cfg(feature = "abomonation-serialize")]
use std::io::{Result as IOResult, Write};
#[cfg(all(feature = "alloc", not(feature = "std")))]
use alloc::vec::Vec;
use crate::base::allocator::Allocator;
use crate::base::constraint::{SameNumberOfRows, ShapeConstraint};
use crate::base::default_allocator::DefaultAllocator;
use crate::base::dimension::{Dim, DimName, Dynamic, U1};
use crate::base::storage::{
ContiguousStorage, ContiguousStorageMut, Owned, ReshapableStorage, Storage, StorageMut,
};
use crate::base::{Scalar, Vector};
#[cfg(feature = "abomonation-serialize")]
use abomonation::Abomonation;
#[repr(C)]
#[derive(Eq, Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
pub struct VecStorage<N, R: Dim, C: Dim> {
data: Vec<N>,
nrows: R,
ncols: C,
}
#[deprecated(note = "renamed to `VecStorage`")]
pub type MatrixVec<N, R, C> = VecStorage<N, R, C>;
impl<N, R: Dim, C: Dim> VecStorage<N, R, C> {
#[inline]
pub fn new(nrows: R, ncols: C, data: Vec<N>) -> Self {
assert!(
nrows.value() * ncols.value() == data.len(),
"Data storage buffer dimension mismatch."
);
Self { data, nrows, ncols }
}
#[inline]
pub fn as_vec(&self) -> &Vec<N> {
&self.data
}
#[inline]
pub unsafe fn as_vec_mut(&mut self) -> &mut Vec<N> {
&mut self.data
}
#[inline]
pub unsafe fn resize(mut self, sz: usize) -> Vec<N> {
let len = self.len();
if sz < len {
self.data.set_len(sz);
self.data.shrink_to_fit();
} else {
self.data.reserve_exact(sz - len);
self.data.set_len(sz);
}
self.data
}
#[inline]
pub fn len(&self) -> usize {
self.data.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl<N, R: Dim, C: Dim> Into<Vec<N>> for VecStorage<N, R, C> {
fn into(self) -> Vec<N> {
self.data
}
}
unsafe impl<N: Scalar, C: Dim> Storage<N, Dynamic, C> for VecStorage<N, Dynamic, C>
where
DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self>,
{
type RStride = U1;
type CStride = Dynamic;
#[inline]
fn ptr(&self) -> *const N {
self.data.as_ptr()
}
#[inline]
fn shape(&self) -> (Dynamic, C) {
(self.nrows, self.ncols)
}
#[inline]
fn strides(&self) -> (Self::RStride, Self::CStride) {
(Self::RStride::name(), self.nrows)
}
#[inline]
fn is_contiguous(&self) -> bool {
true
}
#[inline]
fn into_owned(self) -> Owned<N, Dynamic, C>
where
DefaultAllocator: Allocator<N, Dynamic, C>,
{
self
}
#[inline]
fn clone_owned(&self) -> Owned<N, Dynamic, C>
where
DefaultAllocator: Allocator<N, Dynamic, C>,
{
self.clone()
}
#[inline]
fn as_slice(&self) -> &[N] {
&self.data
}
}
unsafe impl<N: Scalar, R: DimName> Storage<N, R, Dynamic> for VecStorage<N, R, Dynamic>
where
DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self>,
{
type RStride = U1;
type CStride = R;
#[inline]
fn ptr(&self) -> *const N {
self.data.as_ptr()
}
#[inline]
fn shape(&self) -> (R, Dynamic) {
(self.nrows, self.ncols)
}
#[inline]
fn strides(&self) -> (Self::RStride, Self::CStride) {
(Self::RStride::name(), self.nrows)
}
#[inline]
fn is_contiguous(&self) -> bool {
true
}
#[inline]
fn into_owned(self) -> Owned<N, R, Dynamic>
where
DefaultAllocator: Allocator<N, R, Dynamic>,
{
self
}
#[inline]
fn clone_owned(&self) -> Owned<N, R, Dynamic>
where
DefaultAllocator: Allocator<N, R, Dynamic>,
{
self.clone()
}
#[inline]
fn as_slice(&self) -> &[N] {
&self.data
}
}
unsafe impl<N: Scalar, C: Dim> StorageMut<N, Dynamic, C> for VecStorage<N, Dynamic, C>
where
DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self>,
{
#[inline]
fn ptr_mut(&mut self) -> *mut N {
self.data.as_mut_ptr()
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [N] {
&mut self.data[..]
}
}
unsafe impl<N: Scalar, C: Dim> ContiguousStorage<N, Dynamic, C> for VecStorage<N, Dynamic, C> where
DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self>
{
}
unsafe impl<N: Scalar, C: Dim> ContiguousStorageMut<N, Dynamic, C> for VecStorage<N, Dynamic, C> where
DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self>
{
}
impl<N, C1, C2> ReshapableStorage<N, Dynamic, C1, Dynamic, C2> for VecStorage<N, Dynamic, C1>
where
N: Scalar,
C1: Dim,
C2: Dim,
{
type Output = VecStorage<N, Dynamic, C2>;
fn reshape_generic(self, nrows: Dynamic, ncols: C2) -> Self::Output {
assert_eq!(nrows.value() * ncols.value(), self.data.len());
VecStorage {
data: self.data,
nrows,
ncols,
}
}
}
impl<N, C1, R2> ReshapableStorage<N, Dynamic, C1, R2, Dynamic> for VecStorage<N, Dynamic, C1>
where
N: Scalar,
C1: Dim,
R2: DimName,
{
type Output = VecStorage<N, R2, Dynamic>;
fn reshape_generic(self, nrows: R2, ncols: Dynamic) -> Self::Output {
assert_eq!(nrows.value() * ncols.value(), self.data.len());
VecStorage {
data: self.data,
nrows,
ncols,
}
}
}
unsafe impl<N: Scalar, R: DimName> StorageMut<N, R, Dynamic> for VecStorage<N, R, Dynamic>
where
DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self>,
{
#[inline]
fn ptr_mut(&mut self) -> *mut N {
self.data.as_mut_ptr()
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [N] {
&mut self.data[..]
}
}
impl<N, R1, C2> ReshapableStorage<N, R1, Dynamic, Dynamic, C2> for VecStorage<N, R1, Dynamic>
where
N: Scalar,
R1: DimName,
C2: Dim,
{
type Output = VecStorage<N, Dynamic, C2>;
fn reshape_generic(self, nrows: Dynamic, ncols: C2) -> Self::Output {
assert_eq!(nrows.value() * ncols.value(), self.data.len());
VecStorage {
data: self.data,
nrows,
ncols,
}
}
}
impl<N, R1, R2> ReshapableStorage<N, R1, Dynamic, R2, Dynamic> for VecStorage<N, R1, Dynamic>
where
N: Scalar,
R1: DimName,
R2: DimName,
{
type Output = VecStorage<N, R2, Dynamic>;
fn reshape_generic(self, nrows: R2, ncols: Dynamic) -> Self::Output {
assert_eq!(nrows.value() * ncols.value(), self.data.len());
VecStorage {
data: self.data,
nrows,
ncols,
}
}
}
#[cfg(feature = "abomonation-serialize")]
impl<N: Abomonation, R: Dim, C: Dim> Abomonation for VecStorage<N, R, C> {
unsafe fn entomb<W: Write>(&self, writer: &mut W) -> IOResult<()> {
self.data.entomb(writer)
}
unsafe fn exhume<'a, 'b>(&'a mut self, bytes: &'b mut [u8]) -> Option<&'b mut [u8]> {
self.data.exhume(bytes)
}
fn extent(&self) -> usize {
self.data.extent()
}
}
unsafe impl<N: Scalar, R: DimName> ContiguousStorage<N, R, Dynamic> for VecStorage<N, R, Dynamic> where
DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self>
{
}
unsafe impl<N: Scalar, R: DimName> ContiguousStorageMut<N, R, Dynamic> for VecStorage<N, R, Dynamic> where
DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self>
{
}
impl<N, R: Dim> Extend<N> for VecStorage<N, R, Dynamic> {
fn extend<I: IntoIterator<Item = N>>(&mut self, iter: I) {
self.data.extend(iter);
self.ncols = Dynamic::new(self.data.len() / self.nrows.value());
assert!(self.data.len() % self.nrows.value() == 0,
"The number of elements produced by the given iterator was not a multiple of the number of rows.");
}
}
impl<'a, N: 'a + Copy, R: Dim> Extend<&'a N> for VecStorage<N, R, Dynamic> {
fn extend<I: IntoIterator<Item = &'a N>>(&mut self, iter: I) {
self.extend(iter.into_iter().copied())
}
}
impl<N, R, RV, SV> Extend<Vector<N, RV, SV>> for VecStorage<N, R, Dynamic>
where
N: Scalar,
R: Dim,
RV: Dim,
SV: Storage<N, RV>,
ShapeConstraint: SameNumberOfRows<R, RV>,
{
fn extend<I: IntoIterator<Item = Vector<N, RV, SV>>>(&mut self, iter: I) {
let nrows = self.nrows.value();
let iter = iter.into_iter();
let (lower, _upper) = iter.size_hint();
self.data.reserve(nrows * lower);
for vector in iter {
assert_eq!(nrows, vector.shape().0);
self.data.extend(vector.iter().cloned());
}
self.ncols = Dynamic::new(self.data.len() / nrows);
}
}
impl<N> Extend<N> for VecStorage<N, Dynamic, U1> {
fn extend<I: IntoIterator<Item = N>>(&mut self, iter: I) {
self.data.extend(iter);
self.nrows = Dynamic::new(self.data.len());
}
}