Unnamed repository; edit this file 'description' to name the repository.
Shrink `Path` to 16 bytes
Thanks to the observation (supported by counting) that the vast majority paths have neither generics no type anchors, and thanks to a new datastructure `ThinVecWithHeader` that is essentially `(T, Box<[U]>)` but with the size of a single pointer, we are able to reach this feat. This (together with `ThinVecWithHeader`) makes the possibility to shrink `TypeRef`, because most types are paths.
Chayim Refael Friedman 2024-10-25
parent bccf006 · commit 061e5d7
-rw-r--r--crates/hir-def/src/body/lower.rs12
-rw-r--r--crates/hir-def/src/generics.rs21
-rw-r--r--crates/hir-def/src/hir.rs2
-rw-r--r--crates/hir-def/src/hir/type_ref.rs2
-rw-r--r--crates/hir-def/src/path.rs150
-rw-r--r--crates/hir-def/src/path/lower.rs22
-rw-r--r--crates/hir-def/src/resolver.rs6
-rw-r--r--crates/hir-ty/src/infer/expr.rs4
-rw-r--r--crates/hir-ty/src/infer/path.rs2
-rw-r--r--crates/hir-ty/src/mir/lower.rs7
-rw-r--r--crates/stdx/src/lib.rs1
-rw-r--r--crates/stdx/src/thin_vec.rs472
-rw-r--r--xtask/src/tidy.rs2
13 files changed, 596 insertions, 107 deletions
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 3ecb9576b7..a1b0b52145 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -720,12 +720,8 @@ impl ExprCollector<'_> {
fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> {
e.path().and_then(|path| {
let path = self.parse_path(path)?;
- let Path::Normal { type_anchor, mod_path, generic_args } = &path else {
- panic!("path parsing produced a non-normal path");
- };
// Need to enable `mod_path.len() < 1` for `self`.
- let may_be_variable =
- type_anchor.is_none() && mod_path.len() <= 1 && generic_args.is_none();
+ let may_be_variable = matches!(&path, Path::BarePath(mod_path) if mod_path.len() <= 1);
let hygiene = if may_be_variable {
self.hygiene_id_for(e.syntax().text_range().start())
} else {
@@ -797,7 +793,7 @@ impl ExprCollector<'_> {
ast::Expr::PathExpr(e) => {
let (path, hygiene) = self
.collect_expr_path(e.clone())
- .map(|(path, hygiene)| (Pat::Path(Box::new(path)), hygiene))
+ .map(|(path, hygiene)| (Pat::Path(path), hygiene))
.unwrap_or((Pat::Missing, HygieneId::ROOT));
let pat_id = self.alloc_pat_from_expr(path, syntax_ptr);
if !hygiene.is_root() {
@@ -1059,7 +1055,7 @@ impl ExprCollector<'_> {
syntax_ptr,
);
let none_arm = MatchArm {
- pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))),
+ pat: self.alloc_pat_desugared(Pat::Path(option_none)),
guard: None,
expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr),
};
@@ -1561,7 +1557,7 @@ impl ExprCollector<'_> {
Pat::Ref { pat, mutability }
}
ast::Pat::PathPat(p) => {
- let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new);
+ let path = p.path().and_then(|path| self.parse_path(path));
path.map(Pat::Path).unwrap_or(Pat::Missing)
}
ast::Pat::OrPat(p) => 'b: {
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index 20a6e5cc2d..b27a97ab47 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -21,7 +21,7 @@ use crate::{
item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree},
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
- path::{AssociatedTypeBinding, GenericArg, GenericArgs, Path},
+ path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path},
type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap},
AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId,
LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
@@ -788,19 +788,16 @@ fn copy_path(
to_source_map: &mut TypesSourceMap,
) -> Path {
match path {
- Path::Normal { type_anchor, mod_path, generic_args } => {
- let type_anchor = type_anchor
+ Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()),
+ Path::Normal(path) => {
+ let type_anchor = path
+ .type_anchor()
.map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map));
- let mod_path = mod_path.clone();
- let generic_args = generic_args.as_ref().map(|generic_args| {
- generic_args
- .iter()
- .map(|generic_args| {
- copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
- })
- .collect()
+ let mod_path = path.mod_path().clone();
+ let generic_args = path.generic_args().iter().map(|generic_args| {
+ copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
});
- Path::Normal { type_anchor, mod_path, generic_args }
+ Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args))
}
Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()),
}
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index df1e103230..8596346943 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -583,7 +583,7 @@ pub enum Pat {
suffix: Box<[PatId]>,
},
/// This might refer to a variable if a single segment path (specifically, on destructuring assignment).
- Path(Box<Path>),
+ Path(Path),
Lit(ExprId),
Bind {
id: BindingId,
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index b9f47ffdd0..38d95084e7 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -201,7 +201,7 @@ pub enum TypeBound {
}
#[cfg(target_pointer_width = "64")]
-const _: [(); 48] = [(); ::std::mem::size_of::<TypeBound>()];
+const _: [(); 32] = [(); ::std::mem::size_of::<TypeBound>()];
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum UseArgRef {
diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs
index 4f02a59f8d..dc6947c5b5 100644
--- a/crates/hir-def/src/path.rs
+++ b/crates/hir-def/src/path.rs
@@ -14,6 +14,7 @@ use crate::{
use hir_expand::name::Name;
use intern::Interned;
use span::Edition;
+use stdx::thin_vec::thin_vec_with_header_struct;
use syntax::ast;
pub use hir_expand::mod_path::{path, ModPath, PathKind};
@@ -47,20 +48,33 @@ impl Display for ImportAliasDisplay<'_> {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Path {
- /// A normal path
- Normal {
- /// Type based path like `<T>::foo`.
- /// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`.
- type_anchor: Option<TypeRefId>,
- mod_path: Interned<ModPath>,
- /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
- generic_args: Option<Box<[Option<GenericArgs>]>>,
- },
+ /// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths
+ /// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics
+ /// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically
+ /// this is not a problem since many more paths have generics than a type anchor).
+ BarePath(Interned<ModPath>),
+ /// `Path::Normal` may have empty generics and type anchor (but generic args will be filled with `None`).
+ Normal(NormalPath),
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget, Option<Name>),
}
+// This type is being used a lot, make sure it doesn't grow unintentionally.
+#[cfg(target_arch = "x86_64")]
+const _: () = {
+ assert!(size_of::<Path>() == 16);
+ assert!(size_of::<Option<Path>>() == 16);
+};
+
+thin_vec_with_header_struct! {
+ pub new(pub(crate)) struct NormalPath, NormalPathHeader {
+ pub generic_args: [Option<GenericArgs>],
+ pub type_anchor: Option<TypeRefId>,
+ pub mod_path: Interned<ModPath>; ref,
+ }
+}
+
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -112,50 +126,49 @@ impl Path {
}
/// Converts a known mod path to `Path`.
- pub fn from_known_path(
- path: ModPath,
- generic_args: impl Into<Box<[Option<GenericArgs>]>>,
- ) -> Path {
- let generic_args = generic_args.into();
- assert_eq!(path.len(), generic_args.len());
- Path::Normal {
- type_anchor: None,
- mod_path: Interned::new(path),
- generic_args: Some(generic_args),
- }
+ pub fn from_known_path(path: ModPath, generic_args: Vec<Option<GenericArgs>>) -> Path {
+ Path::Normal(NormalPath::new(None, Interned::new(path), generic_args))
}
/// Converts a known mod path to `Path`.
pub fn from_known_path_with_no_generic(path: ModPath) -> Path {
- Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None }
+ Path::BarePath(Interned::new(path))
}
+ #[inline]
pub fn kind(&self) -> &PathKind {
match self {
- Path::Normal { mod_path, .. } => &mod_path.kind,
+ Path::BarePath(mod_path) => &mod_path.kind,
+ Path::Normal(path) => &path.mod_path().kind,
Path::LangItem(..) => &PathKind::Abs,
}
}
+ #[inline]
pub fn type_anchor(&self) -> Option<TypeRefId> {
match self {
- Path::Normal { type_anchor, .. } => *type_anchor,
- Path::LangItem(..) => None,
+ Path::Normal(path) => path.type_anchor(),
+ Path::LangItem(..) | Path::BarePath(_) => None,
+ }
+ }
+
+ #[inline]
+ pub fn generic_args(&self) -> Option<&[Option<GenericArgs>]> {
+ match self {
+ Path::Normal(path) => Some(path.generic_args()),
+ Path::LangItem(..) | Path::BarePath(_) => None,
}
}
pub fn segments(&self) -> PathSegments<'_> {
match self {
- Path::Normal { mod_path, generic_args, .. } => {
- let s = PathSegments {
- segments: mod_path.segments(),
- generic_args: generic_args.as_deref(),
- };
- if let Some(generic_args) = s.generic_args {
- assert_eq!(s.segments.len(), generic_args.len());
- }
- s
+ Path::BarePath(mod_path) => {
+ PathSegments { segments: mod_path.segments(), generic_args: None }
}
+ Path::Normal(path) => PathSegments {
+ segments: path.mod_path().segments(),
+ generic_args: Some(path.generic_args()),
+ },
Path::LangItem(_, seg) => PathSegments {
segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)),
generic_args: None,
@@ -165,34 +178,55 @@ impl Path {
pub fn mod_path(&self) -> Option<&ModPath> {
match self {
- Path::Normal { mod_path, .. } => Some(mod_path),
+ Path::BarePath(mod_path) => Some(mod_path),
+ Path::Normal(path) => Some(path.mod_path()),
Path::LangItem(..) => None,
}
}
pub fn qualifier(&self) -> Option<Path> {
- let Path::Normal { mod_path, generic_args, type_anchor } = self else {
- return None;
- };
- if mod_path.is_ident() {
- return None;
+ match self {
+ Path::BarePath(mod_path) => {
+ if mod_path.is_ident() {
+ return None;
+ }
+ Some(Path::BarePath(Interned::new(ModPath::from_segments(
+ mod_path.kind,
+ mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
+ ))))
+ }
+ Path::Normal(path) => {
+ let mod_path = path.mod_path();
+ if mod_path.is_ident() {
+ return None;
+ }
+ let type_anchor = path.type_anchor();
+ let generic_args = path.generic_args();
+ let qualifier_mod_path = Interned::new(ModPath::from_segments(
+ mod_path.kind,
+ mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
+ ));
+ let qualifier_generic_args = &generic_args[..generic_args.len() - 1];
+ Some(Path::Normal(NormalPath::new(
+ type_anchor,
+ qualifier_mod_path,
+ qualifier_generic_args.iter().cloned(),
+ )))
+ }
+ Path::LangItem(..) => None,
}
- let res = Path::Normal {
- type_anchor: *type_anchor,
- mod_path: Interned::new(ModPath::from_segments(
- mod_path.kind,
- mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
- )),
- generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()),
- };
- Some(res)
}
pub fn is_self_type(&self) -> bool {
- let Path::Normal { mod_path, generic_args, type_anchor } = self else {
- return false;
- };
- type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self()
+ match self {
+ Path::BarePath(mod_path) => mod_path.is_Self(),
+ Path::Normal(path) => {
+ path.type_anchor().is_none()
+ && path.mod_path().is_Self()
+ && path.generic_args().iter().all(|args| args.is_none())
+ }
+ Path::LangItem(..) => false,
+ }
}
}
@@ -268,16 +302,6 @@ impl GenericArgs {
impl From<Name> for Path {
fn from(name: Name) -> Path {
- Path::Normal {
- type_anchor: None,
- mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
- generic_args: None,
- }
- }
-}
-
-impl From<Name> for Box<Path> {
- fn from(name: Name) -> Box<Path> {
- Box::new(Path::from(name))
+ Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))))
}
}
diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs
index 5472da59b5..df036ef3b6 100644
--- a/crates/hir-def/src/path/lower.rs
+++ b/crates/hir-def/src/path/lower.rs
@@ -2,7 +2,7 @@
use std::iter;
-use crate::{lower::LowerCtx, type_ref::ConstRef};
+use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef};
use hir_expand::{
mod_path::resolve_crate_root,
@@ -74,11 +74,9 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
}
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
- let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
- Path::from_src(ctx, trait_ref.path()?)?
- else {
- return None;
- };
+ let path = Path::from_src(ctx, trait_ref.path()?)?;
+ let mod_path = path.mod_path()?;
+ let path_generic_args = path.generic_args();
let num_segments = mod_path.segments().len();
kind = mod_path.kind;
@@ -136,7 +134,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
};
}
segments.reverse();
- if !generic_args.is_empty() {
+ if !generic_args.is_empty() || type_anchor.is_some() {
generic_args.resize(segments.len(), None);
generic_args.reverse();
}
@@ -165,11 +163,11 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
}
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
- return Some(Path::Normal {
- type_anchor,
- mod_path,
- generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) },
- });
+ if type_anchor.is_none() && generic_args.is_empty() {
+ return Some(Path::BarePath(mod_path));
+ } else {
+ return Some(Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)));
+ }
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
if let Some(q) = path.qualifier() {
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 0b49ee8051..42c7ea7c09 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -167,7 +167,8 @@ impl Resolver {
path: &Path,
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let path = match path {
- Path::Normal { mod_path, .. } => mod_path,
+ Path::BarePath(mod_path) => mod_path,
+ Path::Normal(it) => it.mod_path(),
Path::LangItem(l, seg) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
@@ -265,7 +266,8 @@ impl Resolver {
mut hygiene_id: HygieneId,
) -> Option<ResolveValueResult> {
let path = match path {
- Path::Normal { mod_path, .. } => mod_path,
+ Path::BarePath(mod_path) => mod_path,
+ Path::Normal(it) => it.mod_path(),
Path::LangItem(l, None) => {
return Some(ResolveValueResult::ValueNs(
match *l {
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index ee55dbe1c3..32b4ea2f28 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -198,7 +198,7 @@ impl InferenceContext<'_> {
match &self.body[expr] {
// Lang item paths cannot currently be local variables or statics.
Expr::Path(Path::LangItem(_, _)) => false,
- Expr::Path(Path::Normal { type_anchor: Some(_), .. }) => false,
+ Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(),
Expr::Path(path) => self
.resolver
.resolve_path_in_value_ns_fully(
@@ -1214,7 +1214,7 @@ impl InferenceContext<'_> {
let ty = match self.infer_path(path, id) {
Some(ty) => ty,
None => {
- if matches!(path, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self())
+ if path.mod_path().is_some_and(|mod_path| mod_path.is_ident() || mod_path.is_self())
{
self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { id });
}
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 0df44f5df2..442daa9f9e 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -222,7 +222,7 @@ impl InferenceContext<'_> {
let _d;
let (resolved_segment, remaining_segments) = match path {
- Path::Normal { .. } => {
+ Path::Normal { .. } | Path::BarePath(_) => {
assert!(remaining_index < path.segments().len());
(
path.segments().get(remaining_index - 1).unwrap(),
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 3339422760..c4e0640051 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -1373,12 +1373,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
),
};
let edition = self.edition();
- let unresolved_name = || {
- MirLowerError::unresolved_path(self.db, c.as_ref(), edition, &self.body.types)
- };
+ let unresolved_name =
+ || MirLowerError::unresolved_path(self.db, c, edition, &self.body.types);
let pr = self
.resolver
- .resolve_path_in_value_ns(self.db.upcast(), c.as_ref(), HygieneId::ROOT)
+ .resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT)
.ok_or_else(unresolved_name)?;
match pr {
ResolveValueResult::ValueNs(v, _) => {
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index 76dbd42ff6..3e36394182 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -10,6 +10,7 @@ pub mod non_empty_vec;
pub mod panic_context;
pub mod process;
pub mod rand;
+pub mod thin_vec;
pub mod thread;
pub use always_assert::{always, never};
diff --git a/crates/stdx/src/thin_vec.rs b/crates/stdx/src/thin_vec.rs
new file mode 100644
index 0000000000..700220e1d3
--- /dev/null
+++ b/crates/stdx/src/thin_vec.rs
@@ -0,0 +1,472 @@
+use std::alloc::{dealloc, handle_alloc_error, Layout};
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
+use std::ops::{Deref, DerefMut};
+use std::ptr::{addr_of_mut, slice_from_raw_parts_mut, NonNull};
+
+/// A type that is functionally equivalent to `(Header, Box<[Item]>)`,
+/// but all data is stored in one heap allocation and the pointer is thin,
+/// so the whole thing's size is like a pointer.
+pub struct ThinVecWithHeader<Header, Item> {
+ /// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner<Header>`,
+ /// followed by (suitably aligned) `len` `Item`s.
+ ptr: NonNull<ThinVecInner<Header>>,
+ _marker: PhantomData<(Header, Box<[Item]>)>,
+}
+
+// SAFETY: We essentially own both the header and the items.
+unsafe impl<Header: Send, Item: Send> Send for ThinVecWithHeader<Header, Item> {}
+unsafe impl<Header: Sync, Item: Sync> Sync for ThinVecWithHeader<Header, Item> {}
+
+#[derive(Clone)]
+struct ThinVecInner<Header> {
+ header: Header,
+ len: usize,
+}
+
+impl<Header, Item> ThinVecWithHeader<Header, Item> {
+ /// # Safety
+ ///
+ /// The iterator must produce `len` elements.
+ #[inline]
+ unsafe fn from_trusted_len_iter(
+ header: Header,
+ len: usize,
+ items: impl Iterator<Item = Item>,
+ ) -> Self {
+ let (ptr, layout, items_offset) = Self::allocate(len);
+
+ struct DeallocGuard(*mut u8, Layout);
+ impl Drop for DeallocGuard {
+ fn drop(&mut self) {
+ // SAFETY: We allocated this above.
+ unsafe {
+ dealloc(self.0, self.1);
+ }
+ }
+ }
+ let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::<u8>(), layout);
+
+ // INVARIANT: Between `0..1` there are only initialized items.
+ struct ItemsGuard<Item>(*mut Item, *mut Item);
+ impl<Item> Drop for ItemsGuard<Item> {
+ fn drop(&mut self) {
+ // SAFETY: Our invariant.
+ unsafe {
+ slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize)
+ .drop_in_place();
+ }
+ }
+ }
+
+ // SAFETY: We allocated enough space.
+ let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::<Item>() };
+ // INVARIANT: There are zero elements in this range.
+ let mut items_guard = ItemsGuard(items_ptr, items_ptr);
+ items.for_each(|item| {
+ // SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated
+ // enough space for `len` items.
+ unsafe {
+ items_ptr.write(item);
+ items_ptr = items_ptr.add(1);
+ }
+ // INVARIANT: We just initialized this item.
+ items_guard.1 = items_ptr;
+ });
+
+ // SAFETY: We allocated enough space.
+ unsafe {
+ ptr.write(ThinVecInner { header, len });
+ }
+
+ std::mem::forget(items_guard);
+
+ std::mem::forget(_dealloc_guard);
+
+ // INVARIANT: We allocated and initialized all fields correctly.
+ Self { ptr, _marker: PhantomData }
+ }
+
+ #[inline]
+ fn allocate(len: usize) -> (NonNull<ThinVecInner<Header>>, Layout, usize) {
+ let (layout, items_offset) = Self::layout(len);
+ // SAFETY: We always have `len`, so our allocation cannot be zero-sized.
+ let ptr = unsafe { std::alloc::alloc(layout).cast::<ThinVecInner<Header>>() };
+ let Some(ptr) = NonNull::<ThinVecInner<Header>>::new(ptr) else {
+ handle_alloc_error(layout);
+ };
+ (ptr, layout, items_offset)
+ }
+
+ #[inline]
+ #[allow(clippy::should_implement_trait)]
+ pub fn from_iter<I>(header: Header, items: I) -> Self
+ where
+ I: IntoIterator,
+ I::IntoIter: TrustedLen<Item = Item>,
+ {
+ let items = items.into_iter();
+ // SAFETY: `TrustedLen` guarantees the iterator length is exact.
+ unsafe { Self::from_trusted_len_iter(header, items.len(), items) }
+ }
+
+ #[inline]
+ fn items_offset(&self) -> usize {
+ // SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation,
+ // which is allowed.
+ unsafe {
+ Layout::new::<ThinVecInner<Header>>().extend(Layout::new::<Item>()).unwrap_unchecked().1
+ }
+ }
+
+ #[inline]
+ fn header_and_len(&self) -> &ThinVecInner<Header> {
+ // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
+ unsafe { &*self.ptr.as_ptr() }
+ }
+
+ #[inline]
+ fn items_ptr(&self) -> *mut [Item] {
+ let len = self.header_and_len().len;
+ // SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated.
+ let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::<Item>() };
+ slice_from_raw_parts_mut(ptr, len)
+ }
+
+ #[inline]
+ pub fn header(&self) -> &Header {
+ &self.header_and_len().header
+ }
+
+ #[inline]
+ pub fn header_mut(&mut self) -> &mut Header {
+ // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
+ unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) }
+ }
+
+ #[inline]
+ pub fn items(&self) -> &[Item] {
+ // SAFETY: `items_ptr()` gives a valid pointer.
+ unsafe { &*self.items_ptr() }
+ }
+
+ #[inline]
+ pub fn items_mut(&mut self) -> &mut [Item] {
+ // SAFETY: `items_ptr()` gives a valid pointer.
+ unsafe { &mut *self.items_ptr() }
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.header_and_len().len
+ }
+
+ #[inline]
+ fn layout(len: usize) -> (Layout, usize) {
+ let (layout, items_offset) = Layout::new::<ThinVecInner<Header>>()
+ .extend(Layout::array::<Item>(len).expect("too big `ThinVec` requested"))
+ .expect("too big `ThinVec` requested");
+ let layout = layout.pad_to_align();
+ (layout, items_offset)
+ }
+}
+
+/// # Safety
+///
+/// The length reported must be exactly the number of items yielded.
+pub unsafe trait TrustedLen: ExactSizeIterator {}
+
+unsafe impl<T> TrustedLen for std::vec::IntoIter<T> {}
+unsafe impl<T> TrustedLen for std::slice::Iter<'_, T> {}
+unsafe impl<'a, T: Clone + 'a, I: TrustedLen<Item = &'a T>> TrustedLen for std::iter::Cloned<I> {}
+unsafe impl<T, I: TrustedLen, F: FnMut(I::Item) -> T> TrustedLen for std::iter::Map<I, F> {}
+unsafe impl<T> TrustedLen for std::vec::Drain<'_, T> {}
+unsafe impl<T, const N: usize> TrustedLen for std::array::IntoIter<T, N> {}
+
+impl<Header: Clone, Item: Clone> Clone for ThinVecWithHeader<Header, Item> {
+ #[inline]
+ fn clone(&self) -> Self {
+ Self::from_iter(self.header().clone(), self.items().iter().cloned())
+ }
+}
+
+impl<Header, Item> Drop for ThinVecWithHeader<Header, Item> {
+ #[inline]
+ fn drop(&mut self) {
+ // This must come before we drop `header`, because after that we cannot make a reference to it in `len()`.
+ let len = self.len();
+
+ // SAFETY: The contents are allocated and initialized.
+ unsafe {
+ addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place();
+ self.items_ptr().drop_in_place();
+ }
+
+ let (layout, _) = Self::layout(len);
+ // SAFETY: This was allocated in `new()` with the same layout calculation.
+ unsafe {
+ dealloc(self.ptr.as_ptr().cast::<u8>(), layout);
+ }
+ }
+}
+
+impl<Header: fmt::Debug, Item: fmt::Debug> fmt::Debug for ThinVecWithHeader<Header, Item> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("ThinVecWithHeader")
+ .field("header", self.header())
+ .field("items", &self.items())
+ .finish()
+ }
+}
+
+impl<Header: PartialEq, Item: PartialEq> PartialEq for ThinVecWithHeader<Header, Item> {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.header() == other.header() && self.items() == other.items()
+ }
+}
+
+impl<Header: Eq, Item: Eq> Eq for ThinVecWithHeader<Header, Item> {}
+
+impl<Header: Hash, Item: Hash> Hash for ThinVecWithHeader<Header, Item> {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.header().hash(state);
+ self.items().hash(state);
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct ThinVec<T>(ThinVecWithHeader<(), T>);
+
+impl<T> ThinVec<T> {
+ #[inline]
+ #[allow(clippy::should_implement_trait)]
+ pub fn from_iter<I>(values: I) -> Self
+ where
+ I: IntoIterator,
+ I::IntoIter: TrustedLen<Item = T>,
+ {
+ Self(ThinVecWithHeader::from_iter((), values))
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ #[inline]
+ pub fn iter(&self) -> std::slice::Iter<'_, T> {
+ (**self).iter()
+ }
+
+ #[inline]
+ pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
+ (**self).iter_mut()
+ }
+}
+
+impl<T> Deref for ThinVec<T> {
+ type Target = [T];
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ self.0.items()
+ }
+}
+
+impl<T> DerefMut for ThinVec<T> {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0.items_mut()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a ThinVec<T> {
+ type IntoIter = std::slice::Iter<'a, T>;
+ type Item = &'a T;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a mut ThinVec<T> {
+ type IntoIter = std::slice::IterMut<'a, T>;
+ type Item = &'a mut T;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter_mut()
+ }
+}
+
+impl<T: fmt::Debug> fmt::Debug for ThinVec<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(&**self).finish()
+ }
+}
+
+/// A [`ThinVec`] that requires no allocation for the empty case.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct EmptyOptimizedThinVec<T>(Option<ThinVec<T>>);
+
+impl<T> EmptyOptimizedThinVec<T> {
+ #[inline]
+ #[allow(clippy::should_implement_trait)]
+ pub fn from_iter<I>(values: I) -> Self
+ where
+ I: IntoIterator,
+ I::IntoIter: TrustedLen<Item = T>,
+ {
+ let values = values.into_iter();
+ if values.len() == 0 {
+ Self::empty()
+ } else {
+ Self(Some(ThinVec::from_iter(values)))
+ }
+ }
+
+ #[inline]
+ pub fn empty() -> Self {
+ Self(None)
+ }
+
+ #[inline]
+ pub fn len(&self) -> usize {
+ self.0.as_ref().map_or(0, ThinVec::len)
+ }
+
+ #[inline]
+ pub fn iter(&self) -> std::slice::Iter<'_, T> {
+ (**self).iter()
+ }
+
+ #[inline]
+ pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
+ (**self).iter_mut()
+ }
+}
+
+impl<T> Default for EmptyOptimizedThinVec<T> {
+ #[inline]
+ fn default() -> Self {
+ Self::empty()
+ }
+}
+
+impl<T> Deref for EmptyOptimizedThinVec<T> {
+ type Target = [T];
+
+ #[inline]
+ fn deref(&self) -> &Self::Target {
+ self.0.as_deref().unwrap_or_default()
+ }
+}
+
+impl<T> DerefMut for EmptyOptimizedThinVec<T> {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0.as_deref_mut().unwrap_or_default()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec<T> {
+ type IntoIter = std::slice::Iter<'a, T>;
+ type Item = &'a T;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec<T> {
+ type IntoIter = std::slice::IterMut<'a, T>;
+ type Item = &'a mut T;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter_mut()
+ }
+}
+
+impl<T: fmt::Debug> fmt::Debug for EmptyOptimizedThinVec<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(&**self).finish()
+ }
+}
+
+/// Syntax:
+///
+/// ```ignore
+/// thin_vec_with_header_struct! {
+/// pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader {
+/// pub(crate) variable_length: [Ty],
+/// pub field1: CopyTy,
+/// pub field2: NonCopyTy; ref,
+/// }
+/// }
+/// ```
+#[doc(hidden)]
+#[macro_export]
+macro_rules! thin_vec_with_header_struct_ {
+ (@maybe_ref (ref) $($t:tt)*) => { &$($t)* };
+ (@maybe_ref () $($t:tt)*) => { $($t)* };
+ (
+ $vis:vis new($new_vis:vis) struct $struct:ident, $header:ident {
+ $items_vis:vis $items:ident : [$items_ty:ty],
+ $( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+
+ }
+ ) => {
+ #[derive(Debug, Clone, Eq, PartialEq, Hash)]
+ struct $header {
+ $( $header_var : $header_var_ty, )+
+ }
+
+ #[derive(Clone, Eq, PartialEq, Hash)]
+ $vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>);
+
+ impl $struct {
+ #[inline]
+ #[allow(unused)]
+ $new_vis fn new<I>(
+ $( $header_var: $header_var_ty, )+
+ $items: I,
+ ) -> Self
+ where
+ I: ::std::iter::IntoIterator,
+ I::IntoIter: $crate::thin_vec::TrustedLen<Item = $items_ty>,
+ {
+ Self($crate::thin_vec::ThinVecWithHeader::from_iter(
+ $header { $( $header_var, )+ },
+ $items,
+ ))
+ }
+
+ #[inline]
+ $items_vis fn $items(&self) -> &[$items_ty] {
+ self.0.items()
+ }
+
+ $(
+ #[inline]
+ $header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) {
+ $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var)
+ }
+ )+
+ }
+
+ impl ::std::fmt::Debug for $struct {
+ fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+ f.debug_struct(stringify!($struct))
+ $( .field(stringify!($header_var), &self.$header_var()) )*
+ .field(stringify!($items), &self.$items())
+ .finish()
+ }
+ }
+ };
+}
+pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct;
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index 0268e2473c..c3d531344a 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -223,7 +223,7 @@ struct TidyDocs {
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Tests and diagnostic fixes don't need module level comments.
- if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa"]) {
+ if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa", "stdx"]) {
return;
}