in checker/src/block_visitor.rs [2438:2810]
fn visit_const_kind(&mut self, mut val: ConstKind<'tcx>, lty: Ty<'tcx>) -> Rc<AbstractValue> {
if let rustc_middle::ty::ConstKind::Unevaluated(unevaluated) = &val {
let def_ty = unevaluated.def;
if def_ty.const_param_did.is_some() {
val = val.eval(self.bv.tcx, self.type_visitor().get_param_env());
} else {
let mut def_id = def_ty.def_id_for_type_of();
let substs = self.type_visitor().specialize_substs(
unevaluated.substs,
&self.type_visitor().generic_argument_map,
);
self.bv.cv.substs_cache.insert(def_id, substs);
let path = match unevaluated.promoted {
Some(promoted) => {
let index = promoted.index();
Rc::new(PathEnum::PromotedConstant { ordinal: index }.into())
}
None => {
if !substs.is_empty() {
let param_env = rustc_middle::ty::ParamEnv::reveal_all();
trace!("devirtualize resolving def_id {:?}: {:?}", def_id, def_ty);
trace!("substs {:?}", substs);
if let Ok(Some(instance)) = rustc_middle::ty::Instance::resolve(
self.bv.tcx,
param_env,
def_id,
substs,
) {
def_id = instance.def.def_id();
trace!("resolved it to {:?}", def_id);
}
}
if self.bv.tcx.is_mir_available(def_id) {
self.bv.import_static(Path::new_static(self.bv.tcx, def_id))
} else if self.bv.cv.known_names_cache.get(self.bv.tcx, def_id)
== KnownNames::AllocRawVecMinNonZeroCap
{
if let Ok(ty_and_layout) =
self.type_visitor().layout_of(substs.type_at(0))
{
if !ty_and_layout.is_unsized() {
let size_of_t = ty_and_layout.layout.size().bytes();
let min_non_zero_cap: u128 = if size_of_t == 1 {
8
} else if size_of_t <= 1024 {
4
} else {
1
};
return Rc::new((min_non_zero_cap).into());
}
}
Path::new_static(self.bv.tcx, def_id)
} else {
let cache_key = utils::summary_key_str(self.bv.tcx, def_id);
let summary = self
.bv
.cv
.summary_cache
.get_persistent_summary_for(&cache_key);
if summary.is_computed {
let path =
self.bv.import_static(Path::new_static(self.bv.tcx, def_id));
self.type_visitor_mut()
.set_path_rustc_type(path.clone(), lty);
return self.bv.lookup_path_and_refine_result(path, lty);
} else {
Path::new_static(self.bv.tcx, def_id)
}
}
}
};
self.type_visitor_mut()
.set_path_rustc_type(path.clone(), lty);
let val_at_path = self.bv.lookup_path_and_refine_result(path, lty);
if let Expression::Variable { .. } = &val_at_path.expression {
// Seems like there is nothing at the path, but...
if self.bv.tcx.is_mir_available(def_id) {
// The MIR body should have computed something. If that something is
// a structure, the value of the path will be unknown (only leaf paths have
// known values).
return val_at_path;
}
// Seems like a lazily serialized constant. Force evaluation.
val = val.eval(
self.bv.tcx,
self.type_visitor()
.get_param_env()
.with_reveal_all_normalized(self.bv.cv.tcx),
);
if let rustc_middle::ty::ConstKind::Unevaluated(..) = &val {
// val.eval did not manage to evaluate this, go with unknown.
debug!(
"static def_id with no MIR {:?} {:?}",
def_id,
utils::summary_key_str(self.bv.tcx, def_id)
);
debug!(
"type key {:?}",
utils::argument_types_key_str(self.bv.tcx, Some(substs))
);
return val_at_path;
}
} else {
return val_at_path;
}
}
}
// Don't try to parse serialized functions, just use their types
// todo: serialized closures and generators do not store enough information in their types.
match lty.kind() {
TyKind::Closure(def_id, substs)
| TyKind::FnDef(def_id, substs)
| TyKind::Generator(def_id, substs, ..)
| TyKind::Opaque(def_id, substs) => {
let specialized_ty = self.type_visitor().specialize_generic_argument_type(
lty,
&self.type_visitor().generic_argument_map,
);
let substs = self
.type_visitor()
.specialize_substs(substs, &self.type_visitor().generic_argument_map);
return Rc::new(
self.visit_function_reference(*def_id, specialized_ty, Some(substs))
.clone()
.into(),
);
}
TyKind::FnPtr(..) => {
//todo: figure out how function pointers are serialized
debug!("span: {:?}", self.bv.current_span);
debug!("type kind {:?}", lty.kind());
debug!("unimplemented constant {:?}", val);
return Rc::new(ConstantDomain::Unimplemented.into());
}
_ => {}
}
match &val {
rustc_middle::ty::ConstKind::Param(ParamConst { index, .. }) => {
if let Some(gen_args) = self.type_visitor().generic_arguments {
if let Some(arg_val) = gen_args.as_ref().get(*index as usize) {
return self.visit_const(&arg_val.expect_const());
}
}
assume_unreachable!(
"reference to unmatched generic constant argument {:?} {:?}",
val,
self.bv.current_span
);
}
rustc_middle::ty::ConstKind::Value(ConstValue::ByRef { alloc, offset }) => {
let alloc_len = alloc.inner().len();
let offset_bytes = offset.bytes() as usize;
// The Rust compiler should ensure this.
assume!(alloc_len > offset_bytes);
let num_bytes = alloc_len - offset_bytes;
let bytes = alloc
.inner()
.inspect_with_uninit_and_ptr_outside_interpreter(offset_bytes..alloc_len);
let (heap_val, target_path) = self.bv.get_new_heap_block(
Rc::new((num_bytes as u128).into()),
Rc::new(1u128.into()),
false,
lty,
);
let bytes_left_to_deserialize =
self.deserialize_constant_bytes(target_path, bytes, lty);
if !bytes_left_to_deserialize.is_empty() {
debug!("span: {:?}", self.bv.current_span);
debug!("type kind {:?}", lty.kind());
debug!("constant value did not serialize correctly {:?}", val);
}
return heap_val;
}
rustc_middle::ty::ConstKind::Value(ConstValue::Scalar(Scalar::Int(scalar_int))) => {
let size = scalar_int.size().bytes() as usize;
let data: u128 = if *scalar_int == ScalarInt::ZST {
0
} else {
scalar_int.to_bits(scalar_int.size()).unwrap()
};
let byte_array = data.to_ne_bytes();
let bytes: &[u8] = &byte_array[0..size];
match lty.kind() {
TyKind::Adt(adt_def, _) if adt_def.is_enum() => {
return self.get_enum_variant_as_constant(&val, lty);
}
TyKind::Adt(..) | TyKind::Tuple(..) => {
if *scalar_int == ScalarInt::ZST {
return Rc::new(ConstantDomain::Unit.into());
}
let (heap_val, target_path) = self.bv.get_new_heap_block(
Rc::new((size as u128).into()),
Rc::new(1u128.into()),
false,
lty,
);
let bytes_left_to_deserialize =
self.deserialize_constant_bytes(target_path, bytes, lty);
if !bytes_left_to_deserialize.is_empty() {
debug!("span: {:?}", self.bv.current_span);
debug!("type kind {:?}", lty.kind());
debug!("constant value did not serialize correctly {:?}", val);
}
debug!("env {:?}", self.bv.current_environment);
return heap_val;
}
TyKind::Array(elem_type, length) => {
let length = self.bv.get_array_length(length);
let (array_value, array_path) = self.get_heap_array_and_path(lty, size);
self.deserialize_constant_array(array_path, bytes, length, *elem_type);
return array_value;
}
_ => {
return Rc::new(
self.get_constant_from_scalar(lty.kind(), *scalar_int)
.clone()
.into(),
);
}
}
}
rustc_middle::ty::ConstKind::Value(ConstValue::Scalar(Scalar::Ptr(ptr, _))) => {
match self.bv.tcx.get_global_alloc(ptr.provenance) {
Some(GlobalAlloc::Memory(alloc)) => {
let alloc_len = alloc.inner().len() as u64;
let offset_bytes = ptr.into_parts().1.bytes();
// The Rust compiler should ensure this.
assume!(alloc_len > offset_bytes);
let size = alloc_len - offset_bytes;
let bytes = alloc
.inner()
.get_bytes(
&self.bv.tcx,
alloc_range(
ptr.into_parts().1,
rustc_target::abi::Size::from_bytes(size),
),
)
.unwrap();
match lty.kind() {
TyKind::Array(elem_type, length) => {
let length = self.bv.get_array_length(length);
let (array_value, array_path) =
self.get_heap_array_and_path(lty, size as usize);
self.deserialize_constant_array(
array_path, bytes, length, *elem_type,
);
return array_value;
}
TyKind::Ref(_, t, _) => {
if let TyKind::Array(elem_type, length) = t.kind() {
let length = self.bv.get_array_length(length);
let (_, array_path) =
self.get_heap_array_and_path(lty, size as usize);
self.deserialize_constant_array(
array_path.clone(),
bytes,
length,
*elem_type,
);
return AbstractValue::make_reference(array_path);
}
}
_ => {}
}
}
Some(GlobalAlloc::Function(instance)) => {
let def_id = instance.def.def_id();
let substs = self.type_visitor().specialize_substs(
instance.substs,
&self.type_visitor().generic_argument_map,
);
let fn_ty = self.bv.tcx.type_of(def_id);
self.bv.cv.substs_cache.insert(def_id, substs);
let fun_val = Rc::new(
self.bv
.cv
.constant_value_cache
.get_function_constant_for(
def_id,
fn_ty,
Some(substs),
self.bv.tcx,
&mut self.bv.cv.known_names_cache,
&mut self.bv.cv.summary_cache,
)
.clone()
.into(),
);
let (heap_val, heap_path) = self.bv.get_new_heap_block(
Rc::new((8u128).into()),
Rc::new(1u128.into()),
false,
lty,
);
let field_0 = Path::new_field(heap_path, 0);
self.bv
.current_environment
.strong_update_value_at(field_0, fun_val);
return heap_val;
}
Some(GlobalAlloc::Static(def_id)) => {
return AbstractValue::make_reference(
self.bv.import_static(Path::new_static(self.bv.tcx, def_id)),
);
}
None => unreachable!("missing allocation {:?}", ptr.provenance),
};
}
rustc_middle::ty::ConstKind::Value(ConstValue::Slice { data, start, end }) => {
assume!(*end > *start); // The Rust compiler should ensure this.
let size = *end - *start;
let bytes = data
.inner()
.get_bytes(
&self.bv.tcx,
alloc_range(
rustc_target::abi::Size::from_bytes(*start as u64),
rustc_target::abi::Size::from_bytes(size as u64),
),
)
.unwrap();
let slice = &bytes[*start..*end];
match lty.kind() {
TyKind::Array(elem_type, length) => {
let length = self.bv.get_array_length(length);
let (array_value, array_path) = self.get_heap_array_and_path(lty, size);
self.deserialize_constant_array(array_path, bytes, length, *elem_type);
return array_value;
}
TyKind::Ref(_, t, _) if matches!(t.kind(), TyKind::Slice(..)) => {
let elem_type = self.type_visitor().get_element_type(*t);
let bytes_per_elem = self.type_visitor().get_type_size(elem_type) as usize;
let length = size / bytes_per_elem;
let (_, array_path) = self.get_heap_array_and_path(*t, size);
self.deserialize_constant_array(
array_path.clone(),
bytes,
length,
elem_type,
);
return AbstractValue::make_reference(array_path);
}
TyKind::Ref(_, t, _) if matches!(t.kind(), TyKind::Str) => {
let s = std::str::from_utf8(slice).expect("non utf8 str");
let string_const = &mut self.bv.cv.constant_value_cache.get_string_for(s);
let string_val: Rc<AbstractValue> = Rc::new(string_const.clone().into());
let len_val: Rc<AbstractValue> =
Rc::new(ConstantDomain::U128(s.len() as u128).into());
let str_path = Path::new_computed(string_val.clone());
self.bv.update_value_at(str_path.clone(), string_val);
let len_path = Path::new_length(str_path.clone());
self.bv.update_value_at(len_path, len_val);
return AbstractValue::make_reference(str_path);
}
_ => {}
}
}
_ => {}
}
debug!("span: {:?}", self.bv.current_span);
debug!("type kind {:?}", lty.kind());
debug!("unimplemented constant {:?}", val);
Rc::new(ConstantDomain::Unimplemented.into())
}