this isnt working, commiting this so i can come back to it later, undo in next commit

This commit is contained in:
missing 2022-05-13 08:32:53 -05:00 committed by missing
parent d63fa123a5
commit 3c00857465

View file

@ -11,10 +11,10 @@ use core::panic;
#[cfg(feature = "only_store_meta")]
use std::{ptr::Pointee, mem::align_of_val_raw};
use std::{
ptr::{NonNull, self, drop_in_place, metadata},
ptr::{NonNull, self, drop_in_place, metadata, Pointee},
marker::PhantomData,
alloc::{alloc, Layout},
mem::{size_of, size_of_val, align_of_val, self, size_of_val_raw},
mem::{size_of, size_of_val, align_of_val, self, size_of_val_raw, align_of_val_raw},
slice,
fmt::Debug,
ops::{CoerceUnsized, Index, IndexMut}
@ -44,67 +44,81 @@ fn align_up_mut<T: ?Sized>(ptr: *mut T, align: usize) -> *mut T {
align_up(ptr as _, align) as _
}
trait Strategy<T: ?Sized>: Sized {
type Metadata: Copy;
unsafe fn get_ptr(vec: &Vec<T, Self>, index: usize) -> *const T;
unsafe fn next(vec: &Vec<T, Self>, prev: *const T, index: usize) -> *const T;
fn create_meta_from_ptr(vec: &Vec<T, Self>, ptr: *const T) -> Self::Metadata;
}
struct StorePtr;
impl<T: ?Sized> Strategy<T> for StorePtr {
type Metadata = *const T;
unsafe fn get_ptr(vec: &Vec<T, Self>, index: usize) -> *const T {
vec.get_ptr_to_meta(index + 1).cast::<*const T>().read()
}
unsafe fn next(vec: &Vec<T, Self>, _prev: *const T, index: usize) -> *const T {
Self::get_ptr(vec, index)
}
fn create_meta_from_ptr(_vec: &Vec<T, Self>, ptr: *const T) -> Self::Metadata {
ptr
}
}
struct OnlyStoreMeta;
impl<T: ?Sized> Strategy<T> for OnlyStoreMeta {
type Metadata = <T as Pointee>::Metadata;
unsafe fn get_ptr(vec: &Vec<T, Self>, index: usize) -> *const T {
let meta = vec.get_ptr_to_meta(1).cast::<<T as Pointee>::Metadata>().read();
let fake = ptr::from_raw_parts::<T>(0 as _, meta);
let ptr = align_up(vec.ptr.as_ptr(), align_of_val_raw(fake));
let mut ptr = ptr::from_raw_parts(ptr.cast(), meta);
for index in 1..=index {
ptr = Self::next(vec, ptr, index);
}
ptr
}
unsafe fn next(vec: &Vec<T, Self>, prev: *const T, index: usize) -> *const T {
let ptr = prev.cast::<u8>().wrapping_add(size_of_val_raw(prev));
let meta = vec.get_ptr_to_meta(index + 1).cast::<<T as Pointee>::Metadata>().read();
let fake = ptr::from_raw_parts::<T>(0 as _, meta);
let ptr = align_up(ptr, align_of_val_raw(fake));
ptr::from_raw_parts(ptr.cast(), meta)
}
fn create_meta_from_ptr(_vec: &Vec<T, Self>, ptr: *const T) -> Self::Metadata {
metadata(ptr)
}
}
/// A heap allocated, dynamically sized collection of `?Sized` elements.
///
/// See [`::alloc::vec::Vec`] (the standard library `Vec` type) for more information.
pub struct Vec<T: ?Sized> {
struct Vec<T: ?Sized, S: Strategy<T>> {
ptr: NonNull<u8>,
len: usize,
capacity: usize,
end_ptr: NonNull<u8>,
_phantom: PhantomData<T>
_phantom: PhantomData<(*mut S, T)>
}
impl<T: ?Sized + Debug> std::fmt::Debug for Vec<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
// Vec<T> == Vec<U>
impl<T: ?Sized + PartialEq<U>, U: ?Sized> PartialEq<Vec<U>> for Vec<T> {
fn eq(&self, other: &Vec<U>) -> bool {
if self.len != other.len { return false }
for (el, el2) in self.iter().zip(other.iter()) {
if el != el2 { return false }
}
true
}
}
impl<T: ?Sized + Eq> Eq for Vec<T> {}
// Vec<T> == &[U]
impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
fn eq(&self, other: &&[U]) -> bool {
if self.len != other.len() { return false }
for (el, el2) in self.iter().zip(other.iter()) {
if el != el2 { return false }
}
true
}
}
// &[U] == Vec<T>
impl<T: PartialEq<U>, U> PartialEq<Vec<T>> for &[U] {
fn eq(&self, other: &Vec<T>) -> bool {
other == self
}
}
// Vec<T> == [U; N]
impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
fn eq(&self, other: &[U; N]) -> bool {
*self == &other[..]
}
}
// [U; N] == Vec<T>
impl<T: PartialEq<U>, U, const N: usize> PartialEq<Vec<T>> for [U; N] {
fn eq(&self, other: &Vec<T>) -> bool {
other == self
}
}
// keeps this file cleaner
mod impls;
mod iter;
pub use iter::*;
/// The data stored as metadata at the end of the allocation.
#[cfg(feature = "only_store_meta")]
@ -122,7 +136,7 @@ unsafe fn align_of_val_meta<T: ?Sized>(meta: Meta<T>) -> usize {
align_of_val_raw(ptr::from_raw_parts::<T>(0 as *const (), meta))
}
impl<T: ?Sized> Vec<T> {
impl<T: ?Sized, S: Strategy<T>> Vec<T, S> {
/// Creates a new, empty `Vec`.
pub fn new() -> Self {
let ptr = NonNull::dangling();
@ -136,6 +150,8 @@ impl<T: ?Sized> Vec<T> {
}
/// Appends an element to the end of the `Vec`.
///
/// Only avaliable if `T: Sized`.
pub fn push(&mut self, v: T) where T: Sized {
unsafe { self.push_raw(&v) }
mem::forget(v);
@ -166,7 +182,7 @@ impl<T: ?Sized> Vec<T> {
// make sure we have enough space for a new element, but also space for future elements
// this bit is tricky, we must make sure we have enough space for padding too, so its probably UB somehow
// FIXME: ^^^
let new_alloc_size = self.capacity * 2 + size * 2 + size_of::<Meta<T>>();
let new_alloc_size = self.capacity * 2 + size * 2 + size_of::<S::Metadata>();
self.realloc(new_alloc_size);
}
@ -178,14 +194,14 @@ impl<T: ?Sized> Vec<T> {
/// The pointer will be aligned, but writing to it may overwrite data belonging to the Vec.
/// To check for this, call `will_fit`.
/// In addition, the metedata for the element must be set using `set_meta_from_ptr`.
pub fn get_next_elem_ptr(&self, v: &T) -> *mut u8 {
fn get_next_elem_ptr(&self, v: &T) -> *mut u8 {
align_up_mut(self.end_ptr.as_ptr(), align_of_val(v))
}
/// Checks if a given element will fill in the `Vec` without reallocations.
pub fn will_fit(&self, v: &T) -> bool {
let remaining_space = self.get_ptr_to_meta(self.len) as usize - self.end_ptr.as_ptr() as usize;
let needed_space = size_of_val(v) + size_of::<Meta<T>>();
let needed_space = size_of_val(v) + size_of::<S::Metadata>();
remaining_space >= needed_space
}
@ -250,46 +266,24 @@ impl<T: ?Sized> Vec<T> {
/// for internal use
///
/// NOTE: 1-indexed, to allow getting a pointer to the end of the alloc easily
fn get_ptr_to_meta(&self, index: usize) -> *mut Meta<T> {
fn get_ptr_to_meta(&self, index: usize) -> *mut S::Metadata {
self.ptr.as_ptr()
.wrapping_add(self.capacity)
.cast::<Meta<T>>()
.cast::<S::Metadata>()
.wrapping_sub(index)
}
/// for internal use
unsafe fn get_meta(&self, index: usize) -> Meta<T> {
unsafe fn get_meta(&self, index: usize) -> S::Metadata {
*self.get_ptr_to_meta(index + 1)
}
unsafe fn set_meta_from_ptr(&self, index: usize, ptr: *const T) {
#[cfg(feature = "only_store_meta")]
self.get_ptr_to_meta(index + 1).write(metadata(ptr));
#[cfg(not(feature = "only_store_meta"))]
self.get_ptr_to_meta(index + 1).write(ptr);
self.get_ptr_to_meta(index + 1).write(S::create_meta_from_ptr(self, ptr));
}
unsafe fn get_ptr(&self, index: usize) -> *const T {
#[cfg(feature = "only_store_meta")]
{
let mut ptr = self.ptr.as_ptr();
for i in 0..index {
let meta = self.get_meta(i);
let align = align_of_val_meta::<T>(meta);
let size = size_of_val_meta::<T>(meta);
ptr = align_up_mut(ptr, align).wrapping_add(size);
}
let meta = self.get_meta(index);
let align = align_of_val_meta::<T>(meta);
ptr::from_raw_parts(align_up_mut(ptr, align).cast(), meta)
}
#[cfg(not(feature = "only_store_meta"))]
self.get_meta(index)
S::get_ptr(self, index)
}
pub fn get(&self, index: usize) -> Option<&T> {
@ -347,7 +341,7 @@ impl<T: ?Sized> Vec<T> {
}
/// Converts a `Vec<T: Sized>` into a `Vec<U: ?Sized>`, given that `T` can be `CoerceUnsized` into `U`.
pub fn unsize<U: ?Sized>(self) -> Vec<U> where for<'a> &'a T: CoerceUnsized<&'a U> {
pub fn unsize<U: ?Sized>(self) -> Vec<U> where for<'a> &'a T: CoerceUnsized<&'a U>, S: Strategy<U> {
let new_vec = Vec::<U> {
ptr: self.ptr,
len: self.len,
@ -356,7 +350,7 @@ impl<T: ?Sized> Vec<T> {
_phantom: PhantomData,
};
if size_of::<Meta<U>>() > size_of::<Meta<T>>() {
if size_of::<<S as Strategy<U>>::Metadata>() > size_of::<<S as Strategy<T>>::Metadata>() {
// new meta larger than old meta, must go from back to front
for i in (0..self.len).rev() {
@ -417,231 +411,6 @@ impl<T: ?Sized> Drop for Vec<T> {
}
}
// Iteration
struct BaseIter<T: ?Sized> {
ptr: *const Meta<T>,
ptr_back: *const Meta<T>,
#[cfg(feature = "only_store_meta")]
running_ptr: *mut u8,
// #[cfg(feature = "only_store_meta")]
// running_ptr_back: *mut u8
}
impl<T: ?Sized> BaseIter<T> {
fn new(vec: &Vec<T>) -> Self {
Self {
ptr: vec.get_ptr_to_meta(vec.len),
ptr_back: vec.get_ptr_to_meta(0),
#[cfg(feature = "only_store_meta")]
running_ptr: vec.ptr.as_ptr(),
// #[cfg(feature = "only_store_meta")]
// running_ptr_back: vec.end_ptr.as_ptr()
}
}
}
impl<T: ?Sized> Iterator for BaseIter<T> {
type Item = *mut T;
fn next(&mut self) -> Option<Self::Item> {
if self.ptr == self.ptr_back {
return None
}
self.ptr_back = self.ptr_back.wrapping_sub(1);
#[cfg(not(feature = "only_store_meta"))]
{
Some(unsafe { self.ptr_back.read() as *mut T })
}
#[cfg(feature = "only_store_meta")]
unsafe {
let meta = self.ptr_back.read();
self.running_ptr = align_up_mut(self.running_ptr, align_of_val_meta::<T>(meta));
let ret = ptr::from_raw_parts_mut(self.running_ptr.cast(), meta);
self.running_ptr = self.running_ptr.wrapping_add(size_of_val_meta::<T>(meta));
Some(ret)
}
}
}
#[cfg(not(feature = "only_store_meta"))]
impl<T: ?Sized> DoubleEndedIterator for BaseIter<T> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.ptr == self.ptr_back {
return None
}
let el = unsafe { self.ptr.read() };
self.ptr = self.ptr.wrapping_add(1);
Some(el as *mut T)
}
}
// By-ref iteration
impl<'a, T: ?Sized> IntoIterator for &'a Vec<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct Iter<'a, T: ?Sized> {
base: BaseIter<T>,
_phantom: PhantomData<&'a T>
}
impl<'a, T: ?Sized> Iter<'a, T> {
pub fn new(vec: &'a Vec<T>) -> Self {
Self { base: BaseIter::new(vec), _phantom: PhantomData }
}
}
impl<'a, T: ?Sized> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
unsafe { self.base.next().map(|v| &*v) }
}
}
#[cfg(not(feature = "only_store_meta"))]
impl<'a, T: ?Sized> DoubleEndedIterator for Iter<'a, T> {
fn next_back(&mut self) -> Option<Self::Item> {
unsafe { self.base.next_back().map(|v| &*v) }
}
}
// By-mut iteration
impl<'a, T: ?Sized> IntoIterator for &'a mut Vec<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
pub struct IterMut<'a, T: ?Sized> {
base: BaseIter<T>,
_phantom: PhantomData<&'a mut T>
}
impl<'a, T: ?Sized> IterMut<'a, T> {
pub fn new(vec: &'a mut Vec<T>) -> Self {
Self { base: BaseIter::new(vec), _phantom: PhantomData }
}
}
impl<'a, T: ?Sized> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
unsafe { self.base.next().map(|v| &mut *v) }
}
}
#[cfg(not(feature = "only_store_meta"))]
impl<'a, T: ?Sized> DoubleEndedIterator for IterMut<'a, T> {
fn next_back(&mut self) -> Option<Self::Item> {
unsafe { self.base.next_back().map(|v| &mut *v) }
}
}
// By-value iteration
impl<T: ?Sized> IntoIterator for Vec<T> {
type Item = Box<T>;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
pub struct IntoIter<T: ?Sized> {
ptr: NonNull<u8>,
capacity: usize,
base: BaseIter<T>
}
impl<T: ?Sized> IntoIter<T> {
pub fn new(vec: Vec<T>) -> Self {
let this = Self {
ptr: vec.ptr,
capacity: vec.capacity,
base: BaseIter::new(&vec)
};
mem::forget(vec);
this
}
}
impl<T: ?Sized> Iterator for IntoIter<T> {
type Item = Box<T>;
fn next(&mut self) -> Option<Self::Item> {
let ptr = self.base.next()?;
unsafe {
let alloc = alloc(Layout::for_value_raw(ptr));
memcpy(ptr.cast(), alloc, size_of_val_raw(ptr));
Some(Box::from_raw(ptr::from_raw_parts_mut(alloc.cast(), metadata(ptr))))
}
}
}
#[cfg(not(feature = "only_store_meta"))]
impl<T: ?Sized> DoubleEndedIterator for IntoIter<T> {
fn next_back(&mut self) -> Option<Self::Item> {
let ptr = self.base.next_back()?;
unsafe {
let alloc = alloc(Layout::for_value_raw(ptr));
memcpy(ptr.cast(), alloc, size_of_val_raw(ptr));
Some(Box::from_raw(ptr::from_raw_parts_mut(alloc.cast(), metadata(ptr))))
}
}
}
impl<T: ?Sized> Drop for IntoIter<T> {
fn drop(&mut self) {
unsafe { dealloc(self.ptr.as_ptr(), Layout::from_size_align_unchecked(self.capacity, 8)) }
}
}
// // this implementation will collect *while unsizing*, and would conflict with the other
// impl<T: ?Sized, U> FromIterator<U> for Vec<T> where for<'a> &'a U: CoerceUnsized<&'a T> {
// fn from_iter<I: IntoIterator<Item = U>>(iter: I) -> Self {
// let mut vec = Vec::new();
// for item in iter.into_iter() {
// vec.push_unsize(item);
// }
// vec
// }
// }
impl<T> FromIterator<T> for Vec<T> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut vec = Vec::new();
for item in iter.into_iter() {
vec.push(item);
}
vec
}
}
impl<T: ?Sized> Index<usize> for Vec<T> {
type Output = T;
@ -665,7 +434,6 @@ impl<T: ?Sized> IndexMut<usize> for Vec<T> {
}
}
/// Creates a [`Vec`].
///
/// # Examples
@ -697,7 +465,7 @@ macro_rules! vec {
vec
}};
($elem:expr; $n:expr) => {
unimplemented!("vec![T; N] is currently not supported");
compile_error!("dyn_vec::vec![T; N] is currently not supported");
};
($($elem:expr),+ $(,)?) => {{
let mut vec = $crate::Vec::new();