in checker/src/block_visitor.rs [539:719]
fn visit_call(
&mut self,
func: &mir::Operand<'tcx>,
args: &[mir::Operand<'tcx>],
destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
cleanup: Option<mir::BasicBlock>,
from_hir_call: bool,
fn_span: &rustc_span::Span,
) {
// This offset is used to distinguish any local variables that leak out from the called function
// from local variables of the callee function.
// This situation arises when a structured value stored in a local variable is assigned to
// a field reachable from a mutable parameter.
// We assume that no program that does not make MIRAI run out of memory will have more than
// a million local variables.
self.bv.fresh_variable_offset += 1_000_000;
trace!("source location {:?}", fn_span);
trace!("call stack {:?}", self.bv.active_calls_map);
trace!("visit_call {:?} {:?}", func, args);
trace!(
"self.generic_argument_map {:?}",
self.type_visitor().generic_argument_map
);
trace!(
"actual_argument_types {:?}",
self.type_visitor().actual_argument_types
);
trace!("env {:?}", self.bv.current_environment);
let func_to_call = self.visit_operand(func);
let func_ref = self.get_func_ref(&func_to_call);
let func_ref_to_call = if let Some(fr) = func_ref {
fr
} else {
if self.might_be_reachable().unwrap_or(true)
&& self
.bv
.already_reported_errors_for_call_to
.insert(func_to_call)
{
self.report_missing_summary();
}
return;
};
let callee_def_id = func_ref_to_call
.def_id
.expect("callee obtained via operand should have def id");
let substs = self
.bv
.cv
.substs_cache
.get(&callee_def_id)
.expect("MIR should ensure this");
let mut callee_generic_arguments = self
.type_visitor()
.specialize_substs(substs, &self.type_visitor().generic_argument_map);
let actual_args: Vec<(Rc<Path>, Rc<AbstractValue>)> = args
.iter()
.map(|arg| (self.get_operand_path(arg), self.visit_operand(arg)))
.collect();
let actual_argument_types: Vec<Ty<'tcx>> = args
.iter()
.map(|arg| {
let arg_ty = self.get_operand_rustc_type(arg);
if utils::is_concrete(arg_ty.kind()) {
arg_ty
} else {
let specialized_ty = self.type_visitor().specialize_generic_argument_type(
arg_ty,
&self.type_visitor().generic_argument_map,
);
if utils::is_concrete(specialized_ty.kind()) {
specialized_ty
} else {
let path = self.get_operand_path(arg);
self.type_visitor()
.get_path_rustc_type(&path, self.bv.current_span)
}
}
})
.collect();
let callee_generic_argument_map = self.type_visitor().get_generic_arguments_map(
callee_def_id,
callee_generic_arguments,
&actual_argument_types,
);
// If the generic arguments include a Self argument, fix that up with the actual type
// of the call (this information was not available when the constant defining the called
// function was visited, so the cached specialized generic argument list created there
// might not be specialized enough.
if !actual_argument_types.is_empty() && !utils::are_concrete(callee_generic_arguments) {
let fty = self.bv.tcx.type_of(callee_def_id);
if let TyKind::FnDef(_, substs) = fty.kind() {
for (i, generic_ty_arg) in substs.types().enumerate() {
if let TyKind::Param(t_par) = generic_ty_arg.kind() {
if t_par.name.as_str() == "Self" {
let mut gen_args: Vec<GenericArg<'_>> =
callee_generic_arguments.iter().collect();
gen_args[i] = self
.type_visitor()
.get_dereferenced_type(actual_argument_types[0])
.into();
callee_generic_arguments = self.bv.tcx.intern_substs(&gen_args);
break;
}
}
}
}
}
let self_ty_is_fn_ptr = if let Some(ty) = actual_argument_types.get(0) {
let self_ty = self.type_visitor().get_dereferenced_type(*ty);
matches!(self_ty.kind(), TyKind::FnPtr(..))
} else {
false
};
let adt_map = self
.type_visitor()
.get_adt_map(&actual_args, &self.bv.current_environment);
let known_name = func_ref_to_call.known_name;
let func_const = ConstantDomain::Function(func_ref_to_call);
let func_const_args =
&self.get_function_constant_args(&actual_args, &actual_argument_types);
let current_location = self.bv.current_location;
self.bv
.block_to_call
.insert(current_location, callee_def_id);
let mut call_visitor = CallVisitor::new(
self,
callee_def_id,
Some(callee_generic_arguments),
callee_generic_argument_map.clone(),
self.bv.current_environment.clone(),
func_const,
);
call_visitor.actual_args = actual_args;
call_visitor.actual_argument_types = actual_argument_types;
call_visitor.cleanup = cleanup;
call_visitor.destination = *destination;
call_visitor.callee_fun_val = func_to_call;
call_visitor.function_constant_args = func_const_args;
call_visitor.initial_type_cache = adt_map;
trace!("calling func {:?}", call_visitor.callee_func_ref);
if call_visitor.handled_as_special_function_call() {
return;
}
let function_summary = call_visitor.get_function_summary().unwrap_or_default();
if !function_summary.is_computed {
if (known_name != KnownNames::StdCloneClone || !self_ty_is_fn_ptr)
&& call_visitor
.block_visitor
.bv
.already_reported_errors_for_call_to
.insert(call_visitor.callee_fun_val.clone())
{
call_visitor.block_visitor.report_missing_summary();
if known_name != KnownNames::StdCloneClone
&& !call_visitor.block_visitor.bv.analysis_is_incomplete
{
return;
}
}
} else if function_summary.is_incomplete
&& call_visitor
.block_visitor
.bv
.already_reported_errors_for_call_to
.insert(call_visitor.callee_fun_val.clone())
{
call_visitor.report_incomplete_summary();
}
if known_name == KnownNames::StdCloneClone {
call_visitor.handle_clone(&function_summary);
} else {
call_visitor.transfer_and_refine_into_current_environment(&function_summary);
}
}