621 lines
17 KiB
Rust
621 lines
17 KiB
Rust
#![feature(ptr_metadata)]
|
|
#![feature(layout_for_ptr)]
|
|
#![feature(coerce_unsized)]
|
|
|
|
#[cfg(test)]
|
|
mod test;
|
|
|
|
pub mod prelude;
|
|
|
|
use core::panic;
|
|
use std::{ptr::{NonNull, Pointee, self, drop_in_place, metadata}, marker::PhantomData, alloc::{alloc, Layout, dealloc}, mem::{size_of, size_of_val, align_of_val, self, size_of_val_raw}, slice, fmt::Debug, ops::{CoerceUnsized, Index, IndexMut}};
|
|
|
|
/// Alias for metadata of a pointer to `T`.
|
|
pub type Meta<T> = <T as Pointee>::Metadata;
|
|
|
|
/// Copy `size` bytes of memory from `src` to `dst`.
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// `src` must be valid for reads, `dst` must be valid for writes, etc, you get the idea.
|
|
// TODO: inline me! i didnt realize it was avaliable as `copy_from` until the code was mostly complete.
|
|
unsafe fn memcpy(src: *const u8, dst: *mut u8, size: usize) {
|
|
dst.copy_from(src, size);
|
|
}
|
|
|
|
fn align_up<T: ?Sized>(ptr: *const T, align: usize) -> *const T {
|
|
let (mut data, meta) = ptr.to_raw_parts();
|
|
data = ((data as usize + align - 1) & !(align - 1)) as _;
|
|
ptr::from_raw_parts(data, meta)
|
|
}
|
|
|
|
fn align_up_mut<T: ?Sized>(ptr: *mut T, align: usize) -> *mut T {
|
|
align_up(ptr as _, align) as _
|
|
}
|
|
|
|
/// A heap allocated, dynamically sized collection of `?Sized` elements.
|
|
///
|
|
/// See [`::alloc::vec::Vec`] (the standard library `Vec` type) for more information.
|
|
pub struct Vec<T: ?Sized> {
|
|
ptr: NonNull<u8>,
|
|
len: usize,
|
|
capacity: usize,
|
|
end_ptr: NonNull<u8>,
|
|
_phantom: PhantomData<T>
|
|
}
|
|
|
|
impl<T: ?Sized + Debug> std::fmt::Debug for Vec<T> {
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
f.debug_list().entries(self.iter()).finish()
|
|
}
|
|
}
|
|
|
|
// Vec<T> == Vec<U>
|
|
impl<T: ?Sized + PartialEq<U>, U: ?Sized> PartialEq<Vec<U>> for Vec<T> {
|
|
fn eq(&self, other: &Vec<U>) -> bool {
|
|
if self.len != other.len { return false }
|
|
for (el, el2) in self.iter().zip(other.iter()) {
|
|
if el != el2 { return false }
|
|
}
|
|
true
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized + Eq> Eq for Vec<T> {}
|
|
|
|
// Vec<T> == &[U]
|
|
impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
|
|
fn eq(&self, other: &&[U]) -> bool {
|
|
if self.len != other.len() { return false }
|
|
for (el, el2) in self.iter().zip(other.iter()) {
|
|
if el != el2 { return false }
|
|
}
|
|
true
|
|
}
|
|
}
|
|
|
|
// &[U] == Vec<T>
|
|
impl<T: PartialEq<U>, U> PartialEq<Vec<T>> for &[U] {
|
|
fn eq(&self, other: &Vec<T>) -> bool {
|
|
other == self
|
|
}
|
|
}
|
|
|
|
// Vec<T> == [U; N]
|
|
impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
|
|
fn eq(&self, other: &[U; N]) -> bool {
|
|
*self == &other[..]
|
|
}
|
|
}
|
|
|
|
// [U; N] == Vec<T>
|
|
impl<T: PartialEq<U>, U, const N: usize> PartialEq<Vec<T>> for [U; N] {
|
|
fn eq(&self, other: &Vec<T>) -> bool {
|
|
other == self
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Vec<T> {
|
|
/// Creates a new, empty `Vec`.
|
|
pub fn new() -> Self {
|
|
let ptr = NonNull::dangling();
|
|
Self {
|
|
ptr,
|
|
len: 0,
|
|
capacity: 0,
|
|
end_ptr: ptr,
|
|
_phantom: PhantomData
|
|
}
|
|
}
|
|
|
|
/// Appends an element to the end of the `Vec`.
|
|
pub fn push(&mut self, v: T) where T: Sized {
|
|
unsafe { self.push_raw(&v) }
|
|
mem::forget(v);
|
|
}
|
|
|
|
/// Appends an (possibly unsized) boxed element to the end of the `Vec`.
|
|
pub fn push_box(&mut self, v: Box<T>) {
|
|
let ptr = Box::into_raw(v);
|
|
let layout = unsafe { Layout::for_value_raw(ptr) };
|
|
unsafe {
|
|
self.push_raw(ptr);
|
|
dealloc(ptr.cast(), layout);
|
|
}
|
|
}
|
|
|
|
/// Appends a sized element of type `U` to the end of the `Vec`, given that it can be coerced to an unsized `T`.
|
|
pub fn push_unsize<U>(&mut self, v: U) where for<'a> &'a U: CoerceUnsized<&'a T> {
|
|
let v_unsized: &T = &v;
|
|
unsafe { self.push_raw(v_unsized) };
|
|
mem::forget(v);
|
|
}
|
|
|
|
unsafe fn push_raw(&mut self, v: *const T) {
|
|
let size = size_of_val(&*v);
|
|
|
|
if !self.will_fit(&*v) {
|
|
// oh no! allocation too small!
|
|
|
|
// make sure we have enough space for a new element, but also space for future elements
|
|
// this bit is tricky, we must make sure we have enough space for padding too, so its probably UB somehow
|
|
// FIXME: ^^^
|
|
let new_alloc_size = self.capacity * 2 + size * 2 + size_of::<*const T>();
|
|
self.realloc(new_alloc_size);
|
|
}
|
|
|
|
self.push_raw_unchecked(v);
|
|
}
|
|
|
|
/// Given an element, returns a pointer to where it would be written if it was pushed, assuming no reallocation is needed.
|
|
///
|
|
/// The pointer will be aligned, but writing to it may overwrite data belonging to the Vec.
|
|
/// To check for this, call `will_fit`.
|
|
pub fn get_next_elem_ptr(&self, v: &T) -> *mut u8 {
|
|
align_up_mut(self.end_ptr.as_ptr(), align_of_val(v))
|
|
}
|
|
|
|
/// Checks if a given element will fill in the `Vec` without reallocations.
|
|
pub fn will_fit(&self, v: &T) -> bool {
|
|
let remaining_space = self.get_ptr_to_ptr(self.len) as usize - self.end_ptr.as_ptr() as usize;
|
|
let needed_space = size_of_val(v) + size_of::<*const T>();
|
|
remaining_space >= needed_space
|
|
}
|
|
|
|
unsafe fn push_raw_unchecked(&mut self, v: *const T) {
|
|
let size = size_of_val(&*v);
|
|
let dest = self.get_next_elem_ptr(&*v); // this is mentioned by the `// SAFETY:` in `as_slice_flatten`
|
|
|
|
memcpy(v.cast(), dest, size);
|
|
|
|
let new_ptr = ptr::from_raw_parts::<T>(dest.cast(), metadata(v));
|
|
self.get_ptr_to_ptr(self.len + 1).write(new_ptr);
|
|
|
|
self.end_ptr = NonNull::new_unchecked(dest.wrapping_add(size));
|
|
self.len += 1;
|
|
}
|
|
|
|
unsafe fn realloc(&mut self, size: usize) {
|
|
let layout = Layout::from_size_align_unchecked(size, 8).pad_to_align();
|
|
if self.capacity == 0 {
|
|
// will panic if OOM
|
|
self.ptr = NonNull::new(alloc(layout)).unwrap();
|
|
|
|
self.end_ptr = self.ptr;
|
|
} else {
|
|
// cannot use realloc here
|
|
|
|
let new_alloc = NonNull::new(alloc(layout)).unwrap();
|
|
|
|
// data
|
|
let mut ptr = new_alloc.as_ptr();
|
|
for i in 0..self.len {
|
|
let v = self.get_unchecked(i);
|
|
|
|
let size = size_of_val(v);
|
|
ptr = align_up_mut(ptr, align_of_val(v));
|
|
memcpy(v as *const _ as _, ptr, size);
|
|
let meta = self.get_ptr(i).to_raw_parts().1;
|
|
self.get_ptr_to_ptr(i + 1).write(ptr::from_raw_parts(ptr.cast(), meta));
|
|
ptr = ptr.wrapping_add(size);
|
|
}
|
|
self.end_ptr = NonNull::new_unchecked(ptr);
|
|
|
|
// metadata
|
|
let meta_src = self.get_ptr_to_ptr(self.len);
|
|
let meta_dst = {
|
|
let current_alloc_end = self.ptr.as_ptr().wrapping_add(self.capacity);
|
|
let new_alloc_end = new_alloc.as_ptr().wrapping_add(layout.size());
|
|
let meta_len = current_alloc_end as usize - meta_src as usize;
|
|
new_alloc_end.wrapping_sub(meta_len)
|
|
};
|
|
let meta_size = self.len * size_of::<*const T>();
|
|
memcpy(meta_src.cast(), meta_dst, meta_size);
|
|
|
|
dealloc(self.ptr.as_ptr(), Layout::from_size_align_unchecked(self.capacity, 8));
|
|
|
|
self.ptr = new_alloc;
|
|
}
|
|
|
|
self.capacity = layout.size();
|
|
}
|
|
|
|
/// for internal use
|
|
///
|
|
/// NOTE: 1-indexed, to allow getting a pointer to the end of the alloc easily
|
|
fn get_ptr_to_ptr(&self, index: usize) -> *mut *const T {
|
|
self.ptr.as_ptr()
|
|
.wrapping_add(self.capacity)
|
|
.cast::<*const T>()
|
|
.wrapping_sub(index)
|
|
}
|
|
|
|
/// for internal use
|
|
unsafe fn get_ptr(&self, index: usize) -> *const T {
|
|
*self.get_ptr_to_ptr(index + 1)
|
|
}
|
|
|
|
pub fn get(&self, index: usize) -> Option<&T> {
|
|
if index < self.len {
|
|
Some(unsafe { self.get_unchecked(index) })
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
pub unsafe fn get_unchecked(&self, index: usize) -> &T {
|
|
&*self.get_ptr(index)
|
|
}
|
|
|
|
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
|
|
if index < self.len {
|
|
Some(unsafe { self.get_unchecked_mut(index) })
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
|
|
&mut *(self.get_ptr(index) as *mut _)
|
|
}
|
|
|
|
pub fn len(&self) -> usize {
|
|
self.len
|
|
}
|
|
|
|
pub fn capacity(&self) -> usize {
|
|
self.capacity
|
|
}
|
|
|
|
pub fn as_ptr(&self) -> *const u8 {
|
|
self.ptr.as_ptr()
|
|
}
|
|
|
|
pub fn as_mut_ptr(&mut self) -> *mut u8 {
|
|
self.ptr.as_ptr()
|
|
}
|
|
|
|
pub fn iter(&self) -> Iter<T> {
|
|
Iter::new(self)
|
|
}
|
|
|
|
pub fn iter_mut(&mut self) -> IterMut<T> {
|
|
IterMut::new(self)
|
|
}
|
|
|
|
pub fn unsize<U: ?Sized>(self) -> Vec<U> where for<'a> &'a T: CoerceUnsized<&'a U> {
|
|
let new_vec = Vec::<U> {
|
|
ptr: self.ptr,
|
|
len: self.len,
|
|
capacity: self.capacity,
|
|
end_ptr: self.end_ptr,
|
|
_phantom: PhantomData,
|
|
};
|
|
|
|
println!("sizeof(*const U) = {}, sizeof(*const T) = {}", size_of::<*const U>(), size_of::<*const T>());
|
|
|
|
if size_of::<*const U>() > size_of::<*const T>() {
|
|
// new meta larger than old meta, must go from back to front
|
|
|
|
// 1 indexed moment
|
|
for i in (1..=self.len).rev() {
|
|
let current = unsafe { &*self.get_ptr_to_ptr(i).read() };
|
|
unsafe { new_vec.get_ptr_to_ptr(i).write(current as &U) }
|
|
}
|
|
} else {
|
|
// net meta smaller or same size as old meta, must go from front to back
|
|
|
|
// 1 indexed moment
|
|
for i in 1..=self.len {
|
|
let current = unsafe { &*self.get_ptr_to_ptr(i).read() };
|
|
unsafe { new_vec.get_ptr_to_ptr(i).write(current as &U) }
|
|
}
|
|
}
|
|
|
|
mem::forget(self);
|
|
new_vec
|
|
}
|
|
}
|
|
|
|
impl<T> Vec<[T]> {
|
|
pub fn as_slice_flatten(&self) -> &[T] {
|
|
assert!(self.len > 0);
|
|
|
|
// SAFETY: the slices should be contiguous by the logic of `push_raw_unchecked`
|
|
unsafe {
|
|
slice::from_raw_parts(self.get_ptr(0).to_raw_parts().0 as _, {
|
|
let start = self.get_ptr(0).to_raw_parts().0 as usize;
|
|
let end = self.end_ptr.as_ptr() as usize;
|
|
(end - start) / size_of::<T>() // integer division!
|
|
})
|
|
}
|
|
}
|
|
|
|
pub fn as_mut_slice_flatten(&mut self) -> &mut [T] {
|
|
assert!(self.len > 0);
|
|
|
|
// SAFETY: the slices should be contiguous by the logic of `push_raw_unchecked`
|
|
unsafe {
|
|
slice::from_raw_parts_mut(self.get_ptr(0).to_raw_parts().0 as _, {
|
|
let start = self.get_ptr(0).to_raw_parts().0 as usize;
|
|
let end = self.end_ptr.as_ptr() as usize;
|
|
(end - start) / size_of::<T>() // integer division!
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Drop for Vec<T> {
|
|
fn drop(&mut self) {
|
|
unsafe {
|
|
for i in 0..self.len {
|
|
drop_in_place(self.get_unchecked_mut(i));
|
|
}
|
|
|
|
dealloc(self.ptr.as_ptr(), Layout::from_size_align_unchecked(self.capacity, 8));
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
// Iteration
|
|
struct BaseIter<T: ?Sized> {
|
|
ptr: *const *mut T,
|
|
ptr_end: *const *mut T
|
|
}
|
|
|
|
impl<T: ?Sized> BaseIter<T> {
|
|
fn new(vec: &Vec<T>) -> Self {
|
|
Self { ptr: vec.get_ptr_to_ptr(vec.len).cast(), ptr_end: vec.get_ptr_to_ptr(0).cast() }
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Iterator for BaseIter<T> {
|
|
type Item = *mut T;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
if self.ptr == self.ptr_end {
|
|
return None
|
|
}
|
|
|
|
self.ptr_end = self.ptr_end.wrapping_sub(1);
|
|
Some(unsafe { self.ptr_end.read() })
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> DoubleEndedIterator for BaseIter<T> {
|
|
fn next_back(&mut self) -> Option<Self::Item> {
|
|
if self.ptr == self.ptr_end {
|
|
return None
|
|
}
|
|
|
|
let el = unsafe { self.ptr_end.read() };
|
|
self.ptr = self.ptr.wrapping_add(1);
|
|
Some(el)
|
|
}
|
|
}
|
|
|
|
|
|
// By-ref iteration
|
|
impl<'a, T: ?Sized> IntoIterator for &'a Vec<T> {
|
|
type Item = &'a T;
|
|
|
|
type IntoIter = Iter<'a, T>;
|
|
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
self.iter()
|
|
}
|
|
}
|
|
|
|
pub struct Iter<'a, T: ?Sized> {
|
|
base: BaseIter<T>,
|
|
_phantom: PhantomData<&'a T>
|
|
}
|
|
|
|
impl<'a, T: ?Sized> Iter<'a, T> {
|
|
pub fn new(vec: &'a Vec<T>) -> Self {
|
|
Self { base: BaseIter::new(vec), _phantom: PhantomData }
|
|
}
|
|
}
|
|
|
|
impl<'a, T: ?Sized> Iterator for Iter<'a, T> {
|
|
type Item = &'a T;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
unsafe { self.base.next().map(|v| &*v) }
|
|
}
|
|
}
|
|
|
|
impl<'a, T: ?Sized> DoubleEndedIterator for Iter<'a, T> {
|
|
fn next_back(&mut self) -> Option<Self::Item> {
|
|
unsafe { self.base.next_back().map(|v| &*v) }
|
|
}
|
|
}
|
|
|
|
|
|
// By-mut iteration
|
|
impl<'a, T: ?Sized> IntoIterator for &'a mut Vec<T> {
|
|
type Item = &'a mut T;
|
|
|
|
type IntoIter = IterMut<'a, T>;
|
|
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
self.iter_mut()
|
|
}
|
|
}
|
|
|
|
pub struct IterMut<'a, T: ?Sized> {
|
|
base: BaseIter<T>,
|
|
_phantom: PhantomData<&'a mut T>
|
|
}
|
|
|
|
impl<'a, T: ?Sized> IterMut<'a, T> {
|
|
pub fn new(vec: &'a mut Vec<T>) -> Self {
|
|
Self { base: BaseIter::new(vec), _phantom: PhantomData }
|
|
}
|
|
}
|
|
|
|
impl<'a, T: ?Sized> Iterator for IterMut<'a, T> {
|
|
type Item = &'a mut T;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
unsafe { self.base.next().map(|v| &mut *v) }
|
|
}
|
|
}
|
|
|
|
impl<'a, T: ?Sized> DoubleEndedIterator for IterMut<'a, T> {
|
|
fn next_back(&mut self) -> Option<Self::Item> {
|
|
unsafe { self.base.next_back().map(|v| &mut *v) }
|
|
}
|
|
}
|
|
|
|
|
|
// By-value iteration
|
|
impl<T: ?Sized> IntoIterator for Vec<T> {
|
|
type Item = Box<T>;
|
|
|
|
type IntoIter = IntoIter<T>;
|
|
|
|
fn into_iter(self) -> Self::IntoIter {
|
|
IntoIter::new(self)
|
|
}
|
|
}
|
|
|
|
pub struct IntoIter<T: ?Sized> {
|
|
ptr: NonNull<u8>,
|
|
capacity: usize,
|
|
base: BaseIter<T>
|
|
}
|
|
|
|
impl<T: ?Sized> IntoIter<T> {
|
|
pub fn new(vec: Vec<T>) -> Self {
|
|
let this = Self {
|
|
ptr: vec.ptr,
|
|
capacity: vec.capacity,
|
|
base: BaseIter::new(&vec)
|
|
};
|
|
mem::forget(vec);
|
|
this
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Iterator for IntoIter<T> {
|
|
type Item = Box<T>;
|
|
|
|
fn next(&mut self) -> Option<Self::Item> {
|
|
let ptr = self.base.next()?;
|
|
unsafe {
|
|
let alloc = alloc(Layout::for_value_raw(ptr));
|
|
memcpy(ptr.cast(), alloc, size_of_val_raw(ptr));
|
|
Some(Box::from_raw(ptr::from_raw_parts_mut(alloc.cast(), metadata(ptr))))
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> DoubleEndedIterator for IntoIter<T> {
|
|
fn next_back(&mut self) -> Option<Self::Item> {
|
|
let ptr = self.base.next_back()?;
|
|
unsafe {
|
|
let alloc = alloc(Layout::for_value_raw(ptr));
|
|
memcpy(ptr.cast(), alloc, size_of_val_raw(ptr));
|
|
Some(Box::from_raw(ptr::from_raw_parts_mut(alloc.cast(), metadata(ptr))))
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> Drop for IntoIter<T> {
|
|
fn drop(&mut self) {
|
|
unsafe { dealloc(self.ptr.as_ptr(), Layout::from_size_align_unchecked(self.capacity, 8)) }
|
|
}
|
|
}
|
|
|
|
|
|
// // this implementation will collect *while unsizing*, and would conflict with the other
|
|
// impl<T: ?Sized, U> FromIterator<U> for Vec<T> where for<'a> &'a U: CoerceUnsized<&'a T> {
|
|
// fn from_iter<I: IntoIterator<Item = U>>(iter: I) -> Self {
|
|
// let mut vec = Vec::new();
|
|
|
|
// for item in iter.into_iter() {
|
|
// vec.push_unsize(item);
|
|
// }
|
|
|
|
// vec
|
|
// }
|
|
// }
|
|
|
|
impl<T> FromIterator<T> for Vec<T> {
|
|
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
|
let mut vec = Vec::new();
|
|
|
|
for item in iter.into_iter() {
|
|
vec.push(item);
|
|
}
|
|
|
|
vec
|
|
}
|
|
}
|
|
|
|
|
|
impl<T: ?Sized> Index<usize> for Vec<T> {
|
|
type Output = T;
|
|
|
|
#[track_caller]
|
|
fn index(&self, index: usize) -> &Self::Output {
|
|
match self.get(index) {
|
|
Some(v) => v,
|
|
None => panic!("index out of bounds: the len is {} but the index is {}", self.len, index),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ?Sized> IndexMut<usize> for Vec<T> {
|
|
#[track_caller]
|
|
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
|
|
let len = self.len;
|
|
match self.get_mut(index) {
|
|
Some(v) => v,
|
|
None => panic!("index out of bounds: the len is {} but the index is {}", len, index),
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/// Creates a [`Vec`].
|
|
///
|
|
/// # Examples
|
|
///
|
|
/// ```
|
|
/// # use dyn_vec::prelude::{vec, Vec};
|
|
/// # use std::fmt::Debug;
|
|
/// let vec1: Vec<dyn Debug> = vec![1, 2, 3].unsize();
|
|
/// let vec2: Vec<dyn Debug> = vec![box:
|
|
/// Box::new(1) as _,
|
|
/// Box::new(String::from("foo")) as _,
|
|
/// Box::new(true) as _
|
|
/// ];
|
|
/// let vec3: Vec<dyn Debug> = vec![unsized: 1, String::from("foo"), true];
|
|
/// ```
|
|
#[macro_export]
|
|
macro_rules! vec {
|
|
() => {
|
|
$crate::Vec::new();
|
|
};
|
|
(box: $($elem:expr),+ $(,)?) => {{
|
|
let mut vec = $crate::Vec::new();
|
|
$(vec.push_box($elem);)+
|
|
vec
|
|
}};
|
|
(unsized: $($elem:expr),+ $(,)?) => {{
|
|
let mut vec = $crate::Vec::new();
|
|
$(vec.push_unsize($elem);)+
|
|
vec
|
|
}};
|
|
($elem:expr; $n:expr) => {
|
|
unimplemented!("vec![T; N] is currently not supported");
|
|
};
|
|
($($elem:expr),+ $(,)?) => {{
|
|
let mut vec = $crate::Vec::new();
|
|
$(vec.push($elem);)+
|
|
vec
|
|
}};
|
|
} |