Unnamed repository; edit this file 'description' to name the repository.
Convert some of mir/eval to next-solver types
jackh726 8 months ago
parent 9912b80 · commit 17b94c4
-rw-r--r--crates/hir-ty/src/consteval/tests.rs2
-rw-r--r--crates/hir-ty/src/display.rs17
-rw-r--r--crates/hir-ty/src/lib.rs41
-rw-r--r--crates/hir-ty/src/mir.rs2
-rw-r--r--crates/hir-ty/src/mir/eval.rs282
-rw-r--r--crates/hir-ty/src/mir/eval/shim.rs25
-rw-r--r--crates/hir-ty/src/mir/eval/tests.rs2
-rw-r--r--crates/hir-ty/src/next_solver/consts.rs12
-rw-r--r--crates/hir-ty/src/next_solver/mapping.rs7
9 files changed, 258 insertions, 132 deletions
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index 22b152fe03..299b73a7d6 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -76,7 +76,7 @@ fn check_str(#[rust_analyzer::rust_fixture] ra_fixture: &str, answer: &str) {
#[track_caller]
fn check_answer(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
- check: impl FnOnce(&[u8], &MemoryMap),
+ check: impl FnOnce(&[u8], &MemoryMap<'_>),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 81bc48eecf..5adbea75a6 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -744,20 +744,20 @@ impl HirDisplay for Const {
fn render_const_scalar(
f: &mut HirFormatter<'_>,
b: &[u8],
- memory_map: &MemoryMap,
+ memory_map: &MemoryMap<'_>,
ty: &Ty,
) -> Result<(), HirDisplayError> {
let trait_env = TraitEnvironment::empty(f.krate());
let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block);
let ty = normalize(f.db, trait_env.clone(), ty.clone());
let ty = ty.to_nextsolver(interner);
- render_const_scalar_inner(f, b, memory_map, ty, trait_env, interner)
+ render_const_scalar_inner(f, b, memory_map, ty, trait_env)
}
fn render_const_scalar_ns(
f: &mut HirFormatter<'_>,
b: &[u8],
- memory_map: &MemoryMap,
+ memory_map: &MemoryMap<'_>,
ty: crate::next_solver::Ty<'_>,
) -> Result<(), HirDisplayError> {
let trait_env = TraitEnvironment::empty(f.krate());
@@ -767,16 +767,15 @@ fn render_const_scalar_ns(
trait_env.env.to_nextsolver(interner),
ty,
);
- render_const_scalar_inner(f, b, memory_map, ty, trait_env, interner)
+ render_const_scalar_inner(f, b, memory_map, ty, trait_env)
}
fn render_const_scalar_inner(
f: &mut HirFormatter<'_>,
b: &[u8],
- memory_map: &MemoryMap,
+ memory_map: &MemoryMap<'_>,
ty: crate::next_solver::Ty<'_>,
trait_env: Arc<TraitEnvironment>,
- interner: DbInterner<'_>,
) -> Result<(), HirDisplayError> {
use rustc_type_ir::TyKind;
match ty.kind() {
@@ -875,7 +874,7 @@ fn render_const_scalar_inner(
let Ok(t) = memory_map.vtable_ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>");
};
- let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
+ let Ok(layout) = f.db.layout_of_ty_ns(t, trait_env) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -883,7 +882,7 @@ fn render_const_scalar_inner(
return f.write_str("<ref-data-not-available>");
};
f.write_str("&")?;
- render_const_scalar_ns(f, bytes, memory_map, t.to_nextsolver(interner))
+ render_const_scalar_ns(f, bytes, memory_map, t)
}
TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.def_id() {
SolverDefId::AdtId(hir_def::AdtId::StructId(s)) => {
@@ -1052,7 +1051,7 @@ fn render_variant_after_name(
layout: &Layout,
args: GenericArgs<'_>,
b: &[u8],
- memory_map: &MemoryMap,
+ memory_map: &MemoryMap<'_>,
) -> Result<(), HirDisplayError> {
let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block);
match data.shape {
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 81894830b5..8ce0aeb553 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -212,20 +212,20 @@ pub(crate) type ProgramClause = chalk_ir::ProgramClause<Interner>;
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
-pub enum MemoryMap {
+pub enum MemoryMap<'db> {
#[default]
Empty,
Simple(Box<[u8]>),
- Complex(Box<ComplexMemoryMap>),
+ Complex(Box<ComplexMemoryMap<'db>>),
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
-pub struct ComplexMemoryMap {
+pub struct ComplexMemoryMap<'db> {
memory: IndexMap<usize, Box<[u8]>, FxBuildHasher>,
- vtable: VTableMap,
+ vtable: VTableMap<'db>,
}
-impl ComplexMemoryMap {
+impl ComplexMemoryMap<'_> {
fn insert(&mut self, addr: usize, val: Box<[u8]>) {
match self.memory.entry(addr) {
Entry::Occupied(mut e) => {
@@ -240,8 +240,8 @@ impl ComplexMemoryMap {
}
}
-impl MemoryMap {
- pub fn vtable_ty(&self, id: usize) -> Result<&Ty, MirEvalError> {
+impl<'db> MemoryMap<'db> {
+ pub fn vtable_ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>, MirEvalError> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
@@ -291,10 +291,11 @@ impl MemoryMap {
}
}
+// FIXME(next-solver):
/// A concrete constant value
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar {
- Bytes(Box<[u8]>, MemoryMap),
+ Bytes(Box<[u8]>, MemoryMap<'static>),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(GeneralConstId, Substitution),
@@ -315,6 +316,30 @@ impl Hash for ConstScalar {
}
}
+/// A concrete constant value
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstScalarNs<'db> {
+ Bytes(Box<[u8]>, MemoryMap<'db>),
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ UnevaluatedConst(GeneralConstId, Substitution),
+ /// Case of an unknown value that rustc might know but we don't
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
+ // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
+ Unknown,
+}
+
+impl Hash for ConstScalarNs<'_> {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ if let ConstScalarNs::Bytes(b, _) = self {
+ b.hash(state)
+ }
+ }
+}
+
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics::generics(db, id.parent).type_or_const_param_idx(id)
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index 482b420279..8c48a16537 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -107,7 +107,7 @@ pub enum OperandKind {
}
impl Operand {
- fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
+ fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'static>, ty: Ty) -> Self {
Operand {
kind: OperandKind::Constant(intern_const_scalar(
ConstScalar::Bytes(data, memory_map),
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index dfb8ae704b..d0ae92961e 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -25,21 +25,26 @@ use rustc_apfloat::{
ieee::{Half as f16, Quad as f128},
};
use rustc_hash::{FxHashMap, FxHashSet};
+use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
use span::FileId;
use stdx::never;
use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
- AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId,
- Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+ AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, Interner,
+ MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
+ consteval_nextsolver,
db::{HirDatabase, InternedClosure},
display::{ClosureStyle, DisplayTarget, HirDisplay},
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
- mapping::from_chalk,
method_resolution::{is_dyn_method, lookup_impl_const},
+ next_solver::{
+ Ctor, DbInterner, SolverDefId,
+ mapping::{ChalkToNextSolver, convert_args_for_result, convert_ty_for_result},
+ },
static_lifetime,
traits::FnTrait,
utils::{ClosureSubst, detect_variant_from_bytes},
@@ -78,31 +83,31 @@ macro_rules! not_supported {
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
-pub struct VTableMap {
- ty_to_id: FxHashMap<Ty, usize>,
- id_to_ty: Vec<Ty>,
+pub struct VTableMap<'db> {
+ ty_to_id: FxHashMap<crate::next_solver::Ty<'db>, usize>,
+ id_to_ty: Vec<crate::next_solver::Ty<'db>>,
}
-impl VTableMap {
+impl<'db> VTableMap<'db> {
const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
- fn id(&mut self, ty: Ty) -> usize {
+ fn id(&mut self, ty: crate::next_solver::Ty<'db>) -> usize {
if let Some(it) = self.ty_to_id.get(&ty) {
return *it;
}
let id = self.id_to_ty.len() + VTableMap::OFFSET;
- self.id_to_ty.push(ty.clone());
+ self.id_to_ty.push(ty);
self.ty_to_id.insert(ty, id);
id
}
- pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
+ pub(crate) fn ty(&self, id: usize) -> Result<crate::next_solver::Ty<'db>> {
id.checked_sub(VTableMap::OFFSET)
- .and_then(|id| self.id_to_ty.get(id))
+ .and_then(|id| self.id_to_ty.get(id).copied())
.ok_or(MirEvalError::InvalidVTableId(id))
}
- fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
+ fn ty_of_bytes(&self, bytes: &[u8]) -> Result<crate::next_solver::Ty<'db>> {
let id = from_bytes!(usize, bytes);
self.ty(id)
}
@@ -170,12 +175,12 @@ pub struct Evaluator<'a> {
/// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we
/// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the
/// time of use.
- vtable_map: VTableMap,
+ vtable_map: VTableMap<'a>,
thread_local_storage: TlsData,
random_state: oorandom::Rand64,
stdout: Vec<u8>,
stderr: Vec<u8>,
- layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>,
+ layout_cache: RefCell<FxHashMap<crate::next_solver::Ty<'a>, Arc<Layout>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
@@ -224,7 +229,7 @@ impl Interval {
Self { addr, size }
}
- fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
+ fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
memory.read_memory(self.addr, self.size)
}
@@ -242,7 +247,7 @@ impl Interval {
}
impl IntervalAndTy {
- fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
+ fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
memory.read_memory(self.interval.addr, self.interval.size)
}
@@ -269,7 +274,7 @@ impl From<Interval> for IntervalOrOwned {
}
impl IntervalOrOwned {
- fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
+ fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<&'a [u8]> {
Ok(match self {
IntervalOrOwned::Owned(o) => o,
IntervalOrOwned::Borrowed(b) => b.get(memory)?,
@@ -608,7 +613,13 @@ pub fn interpret_mir(
memory_map.vtable.shrink_to_fit();
MemoryMap::Complex(Box::new(memory_map))
};
- Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty))
+ // SAFETY: will never use this without a db
+ Ok(intern_const_scalar(
+ ConstScalar::Bytes(bytes, unsafe {
+ std::mem::transmute::<MemoryMap<'_>, MemoryMap<'static>>(memory_map)
+ }),
+ ty,
+ ))
})();
Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))
}
@@ -618,7 +629,7 @@ const EXECUTION_LIMIT: usize = 100_000;
#[cfg(not(test))]
const EXECUTION_LIMIT: usize = 10_000_000;
-impl Evaluator<'_> {
+impl<'db> Evaluator<'db> {
pub fn new(
db: &dyn HirDatabase,
owner: DefWithBodyId,
@@ -719,6 +730,7 @@ impl Evaluator<'_> {
p: &Place,
locals: &'a Locals,
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
+ let interner = DbInterner::new_with(self.db, None, None);
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
@@ -791,19 +803,19 @@ impl Evaluator<'_> {
addr = addr.offset(ty_size * (from as usize));
}
&ProjectionElem::ClosureField(f) => {
- let layout = self.layout(&prev_ty)?;
+ let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let offset = layout.fields.offset(f).bytes_usize();
addr = addr.offset(offset);
metadata = None;
}
ProjectionElem::Field(Either::Right(f)) => {
- let layout = self.layout(&prev_ty)?;
+ let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let offset = layout.fields.offset(f.index as usize).bytes_usize();
addr = addr.offset(offset);
metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized
}
ProjectionElem::Field(Either::Left(f)) => {
- let layout = self.layout(&prev_ty)?;
+ let layout = self.layout(prev_ty.to_nextsolver(interner))?;
let variant_layout = match &layout.variants {
Variants::Single { .. } | Variants::Empty => &layout,
Variants::Multiple { variants, .. } => {
@@ -835,20 +847,28 @@ impl Evaluator<'_> {
Ok((addr, ty, metadata))
}
- fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
- if let Some(x) = self.layout_cache.borrow().get(ty) {
+ fn layout(&self, ty: crate::next_solver::Ty<'db>) -> Result<Arc<Layout>> {
+ if let Some(x) = self.layout_cache.borrow().get(&ty) {
return Ok(x.clone());
}
+ let interner = DbInterner::new_with(self.db, None, None);
let r = self
.db
- .layout_of_ty(ty.clone(), self.trait_env.clone())
- .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?;
- self.layout_cache.borrow_mut().insert(ty.clone(), r.clone());
+ .layout_of_ty_ns(ty, self.trait_env.clone())
+ .map_err(|e| MirEvalError::LayoutError(e, convert_ty_for_result(interner, ty)))?;
+ self.layout_cache.borrow_mut().insert(ty, r.clone());
Ok(r)
}
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
- self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
+ let interner = DbInterner::new_with(self.db, None, None);
+ self.layout(crate::next_solver::Ty::new(
+ interner,
+ rustc_type_ir::TyKind::Adt(
+ crate::next_solver::AdtDef::new(adt, interner),
+ subst.to_nextsolver(interner),
+ ),
+ ))
}
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
@@ -952,7 +972,7 @@ impl Evaluator<'_> {
)?
}
TyKind::FnDef(def, generic_args) => self.exec_fn_def(
- *def,
+ CallableDefId::from_chalk(self.db, *def),
generic_args,
destination_interval,
&args,
@@ -1113,6 +1133,7 @@ impl Evaluator<'_> {
}
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> {
+ let interner = DbInterner::new_with(self.db, None, None);
use IntervalOrOwned::*;
Ok(match r {
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
@@ -1436,7 +1457,7 @@ impl Evaluator<'_> {
Owned(r)
}
AggregateKind::Tuple(ty) => {
- let layout = self.layout(ty)?;
+ let layout = self.layout(ty.to_nextsolver(interner))?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@@ -1467,7 +1488,7 @@ impl Evaluator<'_> {
)?)
}
AggregateKind::Closure(ty) => {
- let layout = self.layout(ty)?;
+ let layout = self.layout(ty.to_nextsolver(interner))?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@@ -1484,6 +1505,8 @@ impl Evaluator<'_> {
if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
&current_ty.kind(Interner)
{
+ let interner = DbInterner::new_with(self.db, None, None);
+ let current_ty = current_ty.to_nextsolver(interner);
let id = self.vtable_map.id(current_ty);
let ptr_size = self.ptr_size();
Owned(id.to_le_bytes()[0..ptr_size].to_vec())
@@ -1623,7 +1646,8 @@ impl Evaluator<'_> {
}
fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> {
- let layout = self.layout(&ty)?;
+ let interner = DbInterner::new_with(self.db, None, None);
+ let layout = self.layout(ty.to_nextsolver(interner))?;
let &TyKind::Adt(chalk_ir::AdtId(AdtId::EnumId(e)), _) = ty.kind(Interner) else {
return Ok(0);
};
@@ -1732,6 +1756,8 @@ impl Evaluator<'_> {
}
},
TyKind::Dyn(_) => {
+ let interner = DbInterner::new_with(self.db, None, None);
+ let current_ty = current_ty.to_nextsolver(interner);
let vtable = self.vtable_map.id(current_ty);
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
@@ -1777,6 +1803,7 @@ impl Evaluator<'_> {
subst: Substitution,
locals: &Locals,
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
+ let interner = DbInterner::new_with(self.db, None, None);
let adt = it.adt_id(self.db);
if let DefWithBodyId::VariantId(f) = locals.body.owner
&& let VariantId::EnumVariantId(it) = it
@@ -1786,7 +1813,11 @@ impl Evaluator<'_> {
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
// infinite sized type errors) we use a dummy layout
let i = self.const_eval_discriminant(it)?;
- return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
+ return Ok((
+ 16,
+ self.layout(crate::next_solver::Ty::new_tup(interner, &[]))?,
+ Some((0, 16, i)),
+ ));
}
let layout = self.layout_adt(adt, subst)?;
Ok(match &layout.variants {
@@ -1885,6 +1916,7 @@ impl Evaluator<'_> {
#[allow(clippy::double_parens)]
fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
+ let interner = DbInterner::new_with(self.db, None, None);
let ConstData { ty, value: chalk_ir::ConstValue::Concrete(c) } = &konst.data(Interner)
else {
not_supported!("evaluating non concrete constant");
@@ -1945,7 +1977,7 @@ impl Evaluator<'_> {
MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes),
},
addr,
- ty,
+ ty.to_nextsolver(interner),
locals,
)?;
Ok(Interval::new(addr, size))
@@ -2048,7 +2080,8 @@ impl Evaluator<'_> {
}
fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
- if let Some(layout) = self.layout_cache.borrow().get(ty) {
+ let interner = DbInterner::new_with(self.db, None, None);
+ if let Some(layout) = self.layout_cache.borrow().get(&ty.to_nextsolver(interner)) {
return Ok(layout
.is_sized()
.then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
@@ -2061,7 +2094,7 @@ impl Evaluator<'_> {
// infinite sized type errors) we use a dummy size
return Ok(Some((16, 16)));
}
- let layout = self.layout(ty);
+ let layout = self.layout(ty.to_nextsolver(interner));
if self.assert_placeholder_ty_is_unused
&& matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _)))
{
@@ -2129,15 +2162,16 @@ impl Evaluator<'_> {
bytes: &[u8],
ty: &Ty,
locals: &Locals,
- ) -> Result<ComplexMemoryMap> {
- fn rec(
- this: &Evaluator<'_>,
+ ) -> Result<ComplexMemoryMap<'db>> {
+ fn rec<'db>(
+ this: &Evaluator<'db>,
bytes: &[u8],
ty: &Ty,
locals: &Locals,
- mm: &mut ComplexMemoryMap,
+ mm: &mut ComplexMemoryMap<'db>,
stack_depth_limit: usize,
) -> Result<()> {
+ let interner = DbInterner::new_with(this.db, None, None);
if stack_depth_limit.checked_sub(1).is_none() {
return Err(MirEvalError::StackOverflow);
}
@@ -2158,13 +2192,14 @@ impl Evaluator<'_> {
let element_size = match t.kind(Interner) {
TyKind::Str => 1,
TyKind::Slice(t) => {
- check_inner = Some(t);
+ check_inner = Some(t.clone());
this.size_of_sized(t, locals, "slice inner type")?
}
TyKind::Dyn(_) => {
let t = this.vtable_map.ty_of_bytes(meta)?;
- check_inner = Some(t);
- this.size_of_sized(t, locals, "dyn concrete type")?
+ let t = convert_ty_for_result(interner, t);
+ check_inner = Some(t.clone());
+ this.size_of_sized(&t, locals, "dyn concrete type")?
}
_ => return Ok(()),
};
@@ -2176,7 +2211,7 @@ impl Evaluator<'_> {
let addr = Address::from_bytes(addr)?;
let b = this.read_memory(addr, size)?;
mm.insert(addr.to_usize(), b.into());
- if let Some(ty) = check_inner {
+ if let Some(ty) = &check_inner {
for i in 0..count {
let offset = element_size * i;
rec(
@@ -2211,11 +2246,11 @@ impl Evaluator<'_> {
}
}
TyKind::Tuple(_, subst) => {
- let layout = this.layout(ty)?;
+ let layout = this.layout(ty.to_nextsolver(interner))?;
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
- let size = this.layout(ty)?.size.bytes_usize();
+ let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@@ -2229,7 +2264,7 @@ impl Evaluator<'_> {
TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
let data = s.fields(this.db);
- let layout = this.layout(ty)?;
+ let layout = this.layout(ty.to_nextsolver(interner))?;
let field_types = this.db.field_types(s.into());
for (f, _) in data.fields().iter() {
let offset = layout
@@ -2237,7 +2272,7 @@ impl Evaluator<'_> {
.offset(u32::from(f.into_raw()) as usize)
.bytes_usize();
let ty = &field_types[f].clone().substitute(Interner, subst);
- let size = this.layout(ty)?.size.bytes_usize();
+ let size = this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@@ -2249,7 +2284,7 @@ impl Evaluator<'_> {
}
}
AdtId::EnumId(e) => {
- let layout = this.layout(ty)?;
+ let layout = this.layout(ty.to_nextsolver(interner))?;
if let Some((v, l)) = detect_variant_from_bytes(
&layout,
this.db,
@@ -2263,7 +2298,8 @@ impl Evaluator<'_> {
let offset =
l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
let ty = &field_types[f].clone().substitute(Interner, subst);
- let size = this.layout(ty)?.size.bytes_usize();
+ let size =
+ this.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
rec(
this,
&bytes[offset..offset + size],
@@ -2290,20 +2326,26 @@ impl Evaluator<'_> {
Ok(mm)
}
- fn patch_addresses<'vtable>(
+ fn patch_addresses(
&mut self,
patch_map: &FxHashMap<usize, usize>,
- ty_of_bytes: impl Fn(&[u8]) -> Result<&'vtable Ty> + Copy,
+ ty_of_bytes: impl Fn(&[u8]) -> Result<crate::next_solver::Ty<'db>> + Copy,
addr: Address,
- ty: &Ty,
+ ty: crate::next_solver::Ty<'db>,
locals: &Locals,
) -> Result<()> {
+ let interner = DbInterner::new_with(self.db, None, None);
// FIXME: support indirect references
let layout = self.layout(ty)?;
- let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
- match ty.kind(Interner) {
- TyKind::Ref(_, _, t) => {
- let size = self.size_align_of(t, locals)?;
+ let my_size = self.size_of_sized(
+ &convert_ty_for_result(interner, ty),
+ locals,
+ "value to patch address",
+ )?;
+ use rustc_type_ir::TyKind;
+ match ty.kind() {
+ TyKind::Ref(_, t, _) => {
+ let size = self.size_align_of(&convert_ty_for_result(interner, t), locals)?;
match size {
Some(_) => {
let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
@@ -2319,27 +2361,27 @@ impl Evaluator<'_> {
}
}
}
- TyKind::Function(_) => {
- let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?.clone();
+ TyKind::FnPtr(_, _) => {
+ let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?;
let new_id = self.vtable_map.id(ty);
self.write_memory(addr, &new_id.to_le_bytes())?;
}
- TyKind::Adt(id, subst) => match id.0 {
- AdtId::StructId(s) => {
- for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
+ TyKind::Adt(id, args) => match id.def_id() {
+ SolverDefId::AdtId(AdtId::StructId(s)) => {
+ for (i, (_, ty)) in self.db.field_types_ns(s.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
- let ty = ty.clone().substitute(Interner, subst);
+ let ty = ty.instantiate(interner, args);
self.patch_addresses(
patch_map,
ty_of_bytes,
addr.offset(offset),
- &ty,
+ ty,
locals,
)?;
}
}
- AdtId::UnionId(_) => (),
- AdtId::EnumId(e) => {
+ SolverDefId::AdtId(AdtId::UnionId(_)) => (),
+ SolverDefId::AdtId(AdtId::EnumId(e)) => {
if let Some((ev, layout)) = detect_variant_from_bytes(
&layout,
self.db,
@@ -2347,33 +2389,37 @@ impl Evaluator<'_> {
self.read_memory(addr, layout.size.bytes_usize())?,
e,
) {
- for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
+ for (i, (_, ty)) in self.db.field_types_ns(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
- let ty = ty.clone().substitute(Interner, subst);
+ let ty = ty.instantiate(interner, args);
self.patch_addresses(
patch_map,
ty_of_bytes,
addr.offset(offset),
- &ty,
+ ty,
locals,
)?;
}
}
}
+ _ => unreachable!(),
},
- TyKind::Tuple(_, subst) => {
- for (id, ty) in subst.iter(Interner).enumerate() {
- let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
+ TyKind::Tuple(tys) => {
+ for (id, ty) in tys.iter().enumerate() {
let offset = layout.fields.offset(id).bytes_usize();
self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?;
}
}
TyKind::Array(inner, len) => {
- let len = match try_const_usize(self.db, len) {
+ let len = match consteval_nextsolver::try_const_usize(self.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
- let size = self.size_of_sized(inner, locals, "inner of array")?;
+ let size = self.size_of_sized(
+ &convert_ty_for_result(interner, inner),
+ locals,
+ "inner of array",
+ )?;
for i in 0..len {
self.patch_addresses(
patch_map,
@@ -2384,11 +2430,13 @@ impl Evaluator<'_> {
)?;
}
}
- TyKind::AssociatedType(_, _)
- | TyKind::Scalar(_)
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_)
| TyKind::Slice(_)
- | TyKind::Raw(_, _)
- | TyKind::OpaqueType(_, _)
+ | TyKind::RawPtr(_, _)
| TyKind::FnDef(_, _)
| TyKind::Str
| TyKind::Never
@@ -2396,12 +2444,16 @@ impl Evaluator<'_> {
| TyKind::Coroutine(_, _)
| TyKind::CoroutineWitness(_, _)
| TyKind::Foreign(_)
- | TyKind::Error
+ | TyKind::Error(_)
| TyKind::Placeholder(_)
- | TyKind::Dyn(_)
- | TyKind::Alias(_)
- | TyKind::BoundVar(_)
- | TyKind::InferenceVar(_, _) => (),
+ | TyKind::Dynamic(_, _, _)
+ | TyKind::Alias(_, _)
+ | TyKind::Bound(_, _)
+ | TyKind::Infer(_)
+ | TyKind::Pat(_, _)
+ | TyKind::Param(_)
+ | TyKind::UnsafeBinder(_)
+ | TyKind::CoroutineClosure(_, _) => (),
}
Ok(())
}
@@ -2416,13 +2468,41 @@ impl Evaluator<'_> {
span: MirSpan,
) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
- let next_ty = self.vtable_map.ty(id)?.clone();
- match next_ty.kind(Interner) {
+ let next_ty = self.vtable_map.ty(id)?;
+ let interner = DbInterner::new_with(self.db, None, None);
+ use rustc_type_ir::TyKind;
+ match next_ty.kind() {
TyKind::FnDef(def, generic_args) => {
- self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
+ let def = match def {
+ SolverDefId::FunctionId(id) => CallableDefId::FunctionId(id),
+ SolverDefId::Ctor(Ctor::Struct(s)) => CallableDefId::StructId(s),
+ SolverDefId::Ctor(Ctor::Enum(e)) => CallableDefId::EnumVariantId(e),
+ _ => unreachable!(),
+ };
+ self.exec_fn_def(
+ def,
+ &convert_args_for_result(interner, generic_args.as_slice()),
+ destination,
+ args,
+ locals,
+ target_bb,
+ span,
+ )
}
- TyKind::Closure(id, subst) => {
- self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
+ TyKind::Closure(id, generic_args) => {
+ let id = match id {
+ SolverDefId::InternedClosureId(id) => id,
+ _ => unreachable!(),
+ };
+ self.exec_closure(
+ id.into(),
+ bytes.slice(0..0),
+ &convert_args_for_result(interner, generic_args.as_slice()),
+ destination,
+ args,
+ locals,
+ span,
+ )
}
_ => Err(MirEvalError::InternalError("function pointer to non function".into())),
}
@@ -2469,7 +2549,7 @@ impl Evaluator<'_> {
fn exec_fn_def(
&mut self,
- def: FnDefId,
+ def: CallableDefId,
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
@@ -2477,7 +2557,6 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
- let def: CallableDefId = from_chalk(self.db, def);
let generic_args = generic_args.clone();
match def {
CallableDefId::FunctionId(def) => {
@@ -2574,6 +2653,7 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
+ let interner = DbInterner::new_with(self.db, None, None);
if self.detect_and_exec_special_function(
def,
args,
@@ -2600,6 +2680,7 @@ impl Evaluator<'_> {
.vtable_map
.ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
let mut args_for_target = args.to_vec();
+ let ty = convert_ty_for_result(interner, ty);
args_for_target[0] = IntervalAndTy {
interval: args_for_target[0].interval.slice(0..self.ptr_size()),
ty: ty.clone(),
@@ -2672,6 +2753,7 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
+ let interner = DbInterner::new_with(self.db, None, None);
let func = args
.first()
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
@@ -2683,15 +2765,21 @@ impl Evaluator<'_> {
let id =
from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]);
func_data = func_data.slice(0..self.ptr_size());
- func_ty = self.vtable_map.ty(id)?.clone();
+ func_ty = convert_ty_for_result(interner, self.vtable_map.ty(id)?);
}
let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
}
match &func_ty.kind(Interner) {
- TyKind::FnDef(def, subst) => {
- self.exec_fn_def(*def, subst, destination, &args[1..], locals, target_bb, span)
- }
+ TyKind::FnDef(def, subst) => self.exec_fn_def(
+ CallableDefId::from_chalk(self.db, *def),
+ subst,
+ destination,
+ &args[1..],
+ locals,
+ target_bb,
+ span,
+ ),
TyKind::Function(_) => {
self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span)
}
@@ -2714,7 +2802,7 @@ impl Evaluator<'_> {
Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
)
.intern(Interner);
- let layout = self.layout(&ty)?;
+ let layout = self.layout(ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@@ -2901,6 +2989,7 @@ pub fn render_const_using_debug_impl(
owner: DefWithBodyId,
c: &Const,
) -> Result<String> {
+ let interner = DbInterner::new_with(db, None, None);
let mut evaluator = Evaluator::new(db, owner, false, None)?;
let locals = &Locals {
ptr: ArenaMap::new(),
@@ -2933,7 +3022,8 @@ pub fn render_const_using_debug_impl(
CallableDefId::FunctionId(debug_fmt_fn).to_chalk(db),
Substitution::from1(Interner, c.data(Interner).ty.clone()),
)
- .intern(Interner));
+ .intern(Interner)
+ .to_nextsolver(interner));
evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
// a3 = ::core::fmt::Arguments::new_v1(a1, a2)
// FIXME: similarly, we should call function here, not directly working with memory.
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index bb4c963a8a..e27d334d2a 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -23,6 +23,10 @@ use crate::{
LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
Ty, TyBuilder, TyExt, pad16,
},
+ next_solver::{
+ DbInterner,
+ mapping::{ChalkToNextSolver, convert_ty_for_result},
+ },
};
mod simd;
@@ -171,6 +175,7 @@ impl Evaluator<'_> {
destination: Interval,
span: MirSpan,
) -> Result<()> {
+ let interner = DbInterner::new_with(self.db, None, None);
match self_ty.kind(Interner) {
TyKind::Function(_) => {
let [arg] = args else {
@@ -188,7 +193,7 @@ impl Evaluator<'_> {
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into());
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
- let layout = self.layout(&self_ty)?;
+ let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = captures.iter().map(|c| c.ty(subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
@@ -197,7 +202,7 @@ impl Evaluator<'_> {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
- let layout = self.layout(&self_ty)?;
+ let layout = self.layout(self_ty.to_nextsolver(interner))?;
let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone());
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
@@ -226,8 +231,9 @@ impl Evaluator<'_> {
destination: Interval,
span: MirSpan,
) -> Result<()> {
+ let interner = DbInterner::new_with(self.db, None, None);
for (i, ty) in ty_iter.enumerate() {
- let size = self.layout(&ty)?.size.bytes_usize();
+ let size = self.layout(ty.to_nextsolver(interner))?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
@@ -592,6 +598,7 @@ impl Evaluator<'_> {
span: MirSpan,
needs_override: bool,
) -> Result<bool> {
+ let interner = DbInterner::new_with(self.db, None, None);
if let Some(name) = name.strip_prefix("atomic_") {
return self
.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span)
@@ -769,7 +776,7 @@ impl Evaluator<'_> {
"align_of generic arg is not provided".into(),
));
};
- let align = self.layout(ty)?.align.abi.bytes();
+ let align = self.layout(ty.to_nextsolver(interner))?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"size_of_val" => {
@@ -1025,7 +1032,7 @@ impl Evaluator<'_> {
let is_overflow = u128overflow
|| ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
- let layout = self.layout(&result_ty)?;
+ let layout = self.layout(result_ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@@ -1249,7 +1256,7 @@ impl Evaluator<'_> {
"const_eval_select arg[0] is not a tuple".into(),
));
};
- let layout = self.layout(&tuple.ty)?;
+ let layout = self.layout(tuple.ty.to_nextsolver(interner))?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let offset = layout.fields.offset(i).bytes_usize();
@@ -1408,6 +1415,7 @@ impl Evaluator<'_> {
metadata: Interval,
locals: &Locals,
) -> Result<(usize, usize)> {
+ let interner = DbInterner::new_with(self.db, None, None);
Ok(match ty.kind(Interner) {
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
TyKind::Slice(inner) => {
@@ -1416,7 +1424,7 @@ impl Evaluator<'_> {
(size * len, align)
}
TyKind::Dyn(_) => self.size_align_of_sized(
- self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
+ &convert_ty_for_result(interner, self.vtable_map.ty_of_bytes(metadata.get(self)?)?),
locals,
"dyn concrete type",
)?,
@@ -1463,6 +1471,7 @@ impl Evaluator<'_> {
locals: &Locals,
_span: MirSpan,
) -> Result<()> {
+ let interner = DbInterner::new_with(self.db, None, None);
// We are a single threaded runtime with no UB checking and no optimization, so
// we can implement atomic intrinsics as normal functions.
@@ -1560,7 +1569,7 @@ impl Evaluator<'_> {
Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
- let layout = self.layout(&result_ty)?;
+ let layout = self.layout(result_ty.to_nextsolver(interner))?;
let result = self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index eb5af58f2e..5a56d99fba 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -37,7 +37,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
)
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
- let (result, output) = interpret_mir(db, body, false, None)?;
+ let (result, output) = salsa::attach(db, || interpret_mir(db, body, false, None))?;
result?;
Ok((output.stdout().into_owned(), output.stderr().into_owned()))
}
diff --git a/crates/hir-ty/src/next_solver/consts.rs b/crates/hir-ty/src/next_solver/consts.rs
index 5698ff290f..ce581cfad4 100644
--- a/crates/hir-ty/src/next_solver/consts.rs
+++ b/crates/hir-ty/src/next_solver/consts.rs
@@ -103,7 +103,7 @@ pub struct ValueConst<'db> {
}
impl<'db> ValueConst<'db> {
- pub fn new(ty: Ty<'db>, bytes: ConstBytes) -> Self {
+ pub fn new(ty: Ty<'db>, bytes: ConstBytes<'db>) -> Self {
let value = Valtree::new(bytes);
ValueConst { ty, value }
}
@@ -141,9 +141,9 @@ impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for ValueConst<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ConstBytes(pub Box<[u8]>, pub MemoryMap);
+pub struct ConstBytes<'db>(pub Box<[u8]>, pub MemoryMap<'db>);
-impl Hash for ConstBytes {
+impl Hash for ConstBytes<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
@@ -152,11 +152,11 @@ impl Hash for ConstBytes {
#[salsa::interned(constructor = new_, debug)]
pub struct Valtree<'db> {
#[returns(ref)]
- bytes_: ConstBytes,
+ bytes_: ConstBytes<'db>,
}
impl<'db> Valtree<'db> {
- pub fn new(bytes: ConstBytes) -> Self {
+ pub fn new(bytes: ConstBytes<'db>) -> Self {
salsa::with_attached_database(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
@@ -164,7 +164,7 @@ impl<'db> Valtree<'db> {
.unwrap()
}
- pub fn inner(&self) -> &ConstBytes {
+ pub fn inner(&self) -> &ConstBytes<'db> {
salsa::with_attached_database(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will
diff --git a/crates/hir-ty/src/next_solver/mapping.rs b/crates/hir-ty/src/next_solver/mapping.rs
index b50fccb832..5fefb04a5e 100644
--- a/crates/hir-ty/src/next_solver/mapping.rs
+++ b/crates/hir-ty/src/next_solver/mapping.rs
@@ -21,7 +21,7 @@ use rustc_type_ir::{
use salsa::plumbing::AsId;
use crate::{
- ConcreteConst, ConstScalar, ImplTraitId, Interner,
+ ConcreteConst, ConstScalar, ImplTraitId, Interner, MemoryMap,
db::{
HirDatabase, InternedClosureId, InternedCoroutineId, InternedOpaqueTyId,
InternedTypeOrConstParamId,
@@ -1328,7 +1328,10 @@ pub fn convert_const_for_result<'db>(
rustc_type_ir::ConstKind::Value(value_const) => {
let bytes = value_const.value.inner();
let value = chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
- interned: ConstScalar::Bytes(bytes.0.clone(), bytes.1.clone()),
+ // SAFETY: we will never actually use this without a database
+ interned: ConstScalar::Bytes(bytes.0.clone(), unsafe {
+ std::mem::transmute::<MemoryMap<'db>, MemoryMap<'static>>(bytes.1.clone())
+ }),
});
return chalk_ir::ConstData {
ty: convert_ty_for_result(interner, value_const.ty),