in runtime/vm/object.cc [701:1265]
void Object::Init(IsolateGroup* isolate_group) {
// Should only be run by the vm isolate.
ASSERT(isolate_group == Dart::vm_isolate_group());
Heap* heap = isolate_group->heap();
Thread* thread = Thread::Current();
ASSERT(thread != nullptr);
// Ensure lock checks in setters are happy.
SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
InitVtables();
// Allocate the read only object handles here.
#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
name##_ = Type::ReadOnlyHandle();
SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE)
#undef INITIALIZE_SHARED_READONLY_HANDLE
*null_object_ = Object::null();
*null_class_ = Class::null();
*null_array_ = Array::null();
*null_string_ = String::null();
*null_instance_ = Instance::null();
*null_function_ = Function::null();
*null_function_type_ = FunctionType::null();
*null_type_arguments_ = TypeArguments::null();
*empty_type_arguments_ = TypeArguments::null();
*null_abstract_type_ = AbstractType::null();
*null_compressed_stackmaps_ = CompressedStackMaps::null();
*bool_true_ = true_;
*bool_false_ = false_;
// Initialize the empty and zero array handles to null_ in order to be able to
// check if the empty and zero arrays were allocated (RAW_NULL is not
// available).
*empty_array_ = Array::null();
*zero_array_ = Array::null();
Class& cls = Class::Handle();
// Allocate and initialize the class class.
{
intptr_t size = Class::InstanceSize();
uword address = heap->Allocate(size, Heap::kOld);
class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag);
InitializeObject(address, Class::kClassId, size,
Class::ContainsCompressedPointers());
Class fake;
// Initialization from Class::New<Class>.
// Directly set ptr_ to break a circular dependency: SetRaw will attempt
// to lookup class class in the class table where it is not registered yet.
cls.ptr_ = class_class_;
ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
cls.set_instance_size(
Class::InstanceSize(),
compiler::target::RoundedAllocationSize(RTN::Class::InstanceSize()));
const intptr_t host_next_field_offset = Class::NextFieldOffset();
const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset();
cls.set_next_field_offset(host_next_field_offset, target_next_field_offset);
cls.set_id(Class::kClassId);
cls.set_state_bits(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments,
RTN::Class::kNoTypeArguments);
cls.set_num_type_arguments_unsafe(0);
cls.set_num_native_fields(0);
cls.InitEmptyFields();
isolate_group->class_table()->Register(cls);
}
// Allocate and initialize the null class.
cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
cls.set_num_type_arguments_unsafe(0);
isolate_group->object_store()->set_null_class(cls);
// Allocate and initialize Never class.
cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
cls.set_num_type_arguments_unsafe(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
isolate_group->object_store()->set_never_class(cls);
// Allocate and initialize the free list element class.
cls = Class::New<FreeListElement::FakeInstance,
RTN::FreeListElement::FakeInstance>(kFreeListElement,
isolate_group);
cls.set_num_type_arguments_unsafe(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
// Allocate and initialize the forwarding corpse class.
cls = Class::New<ForwardingCorpse::FakeInstance,
RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse,
isolate_group);
cls.set_num_type_arguments_unsafe(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
// Allocate and initialize Sentinel class.
cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group);
sentinel_class_ = cls.ptr();
// Allocate and initialize the sentinel values.
{
*sentinel_ ^= Sentinel::New();
*transition_sentinel_ ^= Sentinel::New();
}
// Allocate and initialize optimizing compiler constants.
{
*unknown_constant_ ^= Sentinel::New();
*non_constant_ ^= Sentinel::New();
}
// Allocate the remaining VM internal classes.
cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group);
type_parameters_class_ = cls.ptr();
cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group);
type_arguments_class_ = cls.ptr();
cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group);
patch_class_class_ = cls.ptr();
cls = Class::New<Function, RTN::Function>(isolate_group);
function_class_ = cls.ptr();
cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group);
closure_data_class_ = cls.ptr();
cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group);
ffi_trampoline_data_class_ = cls.ptr();
cls = Class::New<Field, RTN::Field>(isolate_group);
field_class_ = cls.ptr();
cls = Class::New<Script, RTN::Script>(isolate_group);
script_class_ = cls.ptr();
cls = Class::New<Library, RTN::Library>(isolate_group);
library_class_ = cls.ptr();
cls = Class::New<Namespace, RTN::Namespace>(isolate_group);
namespace_class_ = cls.ptr();
cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group);
kernel_program_info_class_ = cls.ptr();
cls = Class::New<Code, RTN::Code>(isolate_group);
code_class_ = cls.ptr();
cls = Class::New<Instructions, RTN::Instructions>(isolate_group);
instructions_class_ = cls.ptr();
cls =
Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
instructions_section_class_ = cls.ptr();
cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group);
instructions_table_class_ = cls.ptr();
cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
object_pool_class_ = cls.ptr();
cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group);
pc_descriptors_class_ = cls.ptr();
cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group);
code_source_map_class_ = cls.ptr();
cls =
Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group);
compressed_stackmaps_class_ = cls.ptr();
cls =
Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group);
var_descriptors_class_ = cls.ptr();
cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group);
exception_handlers_class_ = cls.ptr();
cls = Class::New<Context, RTN::Context>(isolate_group);
context_class_ = cls.ptr();
cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group);
context_scope_class_ = cls.ptr();
cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group);
singletargetcache_class_ = cls.ptr();
cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group);
unlinkedcall_class_ = cls.ptr();
cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(
isolate_group);
monomorphicsmiablecall_class_ = cls.ptr();
cls = Class::New<ICData, RTN::ICData>(isolate_group);
icdata_class_ = cls.ptr();
cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group);
megamorphic_cache_class_ = cls.ptr();
cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group);
subtypetestcache_class_ = cls.ptr();
cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group);
loadingunit_class_ = cls.ptr();
cls = Class::New<ApiError, RTN::ApiError>(isolate_group);
api_error_class_ = cls.ptr();
cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group);
language_error_class_ = cls.ptr();
cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group);
unhandled_exception_class_ = cls.ptr();
cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group);
unwind_error_class_ = cls.ptr();
cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
isolate_group);
weak_serialization_reference_class_ = cls.ptr();
ASSERT(class_class() != null_);
// Pre-allocate classes in the vm isolate so that we can for example create a
// symbol table and populate it with some frequently used strings as symbols.
cls = Class::New<Array, RTN::Array>(isolate_group);
isolate_group->object_store()->set_array_class(cls);
cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
RTN::Array::type_arguments_offset());
cls.set_num_type_arguments_unsafe(1);
cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
isolate_group->object_store()->set_immutable_array_class(cls);
cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
RTN::Array::type_arguments_offset());
cls.set_num_type_arguments_unsafe(1);
cls =
Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group);
isolate_group->object_store()->set_growable_object_array_class(cls);
cls.set_type_arguments_field_offset(
GrowableObjectArray::type_arguments_offset(),
RTN::GrowableObjectArray::type_arguments_offset());
cls.set_num_type_arguments_unsafe(1);
cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
isolate_group->object_store()->set_one_byte_string_class(cls);
cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
isolate_group->object_store()->set_two_byte_string_class(cls);
cls = Class::New<Mint, RTN::Mint>(isolate_group);
isolate_group->object_store()->set_mint_class(cls);
cls = Class::New<Double, RTN::Double>(isolate_group);
isolate_group->object_store()->set_double_class(cls);
cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
isolate_group->object_store()->set_float32x4_class(cls);
cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
isolate_group->object_store()->set_float64x2_class(cls);
cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
isolate_group->object_store()->set_int32x4_class(cls);
// Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
// need it when reading in the token stream of bootstrap classes in the VM
// isolate.
Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid,
isolate_group);
// Needed for object pools of VM isolate stubs.
Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate_group);
// Allocate and initialize the empty_array instance.
{
uword address = heap->Allocate(Array::InstanceSize(0), Heap::kOld);
InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(0),
Array::ContainsCompressedPointers());
Array::initializeHandle(empty_array_,
static_cast<ArrayPtr>(address + kHeapObjectTag));
empty_array_->untag()->set_length(Smi::New(0));
empty_array_->SetCanonical();
}
Smi& smi = Smi::Handle();
// Allocate and initialize the zero_array instance.
{
uword address = heap->Allocate(Array::InstanceSize(1), Heap::kOld);
InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(1),
Array::ContainsCompressedPointers());
Array::initializeHandle(zero_array_,
static_cast<ArrayPtr>(address + kHeapObjectTag));
zero_array_->untag()->set_length(Smi::New(1));
smi = Smi::New(0);
zero_array_->SetAt(0, smi);
zero_array_->SetCanonical();
}
// Allocate and initialize the canonical empty context scope object.
{
uword address = heap->Allocate(ContextScope::InstanceSize(0), Heap::kOld);
InitializeObject(address, kContextScopeCid, ContextScope::InstanceSize(0),
ContextScope::ContainsCompressedPointers());
ContextScope::initializeHandle(
empty_context_scope_,
static_cast<ContextScopePtr>(address + kHeapObjectTag));
empty_context_scope_->StoreNonPointer(
&empty_context_scope_->untag()->num_variables_, 0);
empty_context_scope_->StoreNonPointer(
&empty_context_scope_->untag()->is_implicit_, true);
empty_context_scope_->SetCanonical();
}
// Allocate and initialize the canonical empty object pool object.
{
uword address = heap->Allocate(ObjectPool::InstanceSize(0), Heap::kOld);
InitializeObject(address, kObjectPoolCid, ObjectPool::InstanceSize(0),
ObjectPool::ContainsCompressedPointers());
ObjectPool::initializeHandle(
empty_object_pool_,
static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_,
0);
empty_object_pool_->SetCanonical();
}
// Allocate and initialize the empty_compressed_stackmaps instance.
{
const intptr_t instance_size = CompressedStackMaps::InstanceSize(0);
uword address = heap->Allocate(instance_size, Heap::kOld);
InitializeObject(address, kCompressedStackMapsCid, instance_size,
CompressedStackMaps::ContainsCompressedPointers());
CompressedStackMaps::initializeHandle(
empty_compressed_stackmaps_,
static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
empty_compressed_stackmaps_->untag()->payload()->set_flags_and_size(0);
empty_compressed_stackmaps_->SetCanonical();
}
// Allocate and initialize the empty_descriptors instance.
{
uword address = heap->Allocate(PcDescriptors::InstanceSize(0), Heap::kOld);
InitializeObject(address, kPcDescriptorsCid, PcDescriptors::InstanceSize(0),
PcDescriptors::ContainsCompressedPointers());
PcDescriptors::initializeHandle(
empty_descriptors_,
static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_,
0);
empty_descriptors_->SetCanonical();
}
// Allocate and initialize the canonical empty variable descriptor object.
{
uword address =
heap->Allocate(LocalVarDescriptors::InstanceSize(0), Heap::kOld);
InitializeObject(address, kLocalVarDescriptorsCid,
LocalVarDescriptors::InstanceSize(0),
LocalVarDescriptors::ContainsCompressedPointers());
LocalVarDescriptors::initializeHandle(
empty_var_descriptors_,
static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
empty_var_descriptors_->StoreNonPointer(
&empty_var_descriptors_->untag()->num_entries_, 0);
empty_var_descriptors_->SetCanonical();
}
// Allocate and initialize the canonical empty exception handler info object.
// The vast majority of all functions do not contain an exception handler
// and can share this canonical descriptor.
{
uword address =
heap->Allocate(ExceptionHandlers::InstanceSize(0), Heap::kOld);
InitializeObject(address, kExceptionHandlersCid,
ExceptionHandlers::InstanceSize(0),
ExceptionHandlers::ContainsCompressedPointers());
ExceptionHandlers::initializeHandle(
empty_exception_handlers_,
static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
empty_exception_handlers_->StoreNonPointer(
&empty_exception_handlers_->untag()->num_entries_, 0);
empty_exception_handlers_->SetCanonical();
}
// Allocate and initialize the canonical empty type arguments object.
{
uword address = heap->Allocate(TypeArguments::InstanceSize(0), Heap::kOld);
InitializeObject(address, kTypeArgumentsCid, TypeArguments::InstanceSize(0),
TypeArguments::ContainsCompressedPointers());
TypeArguments::initializeHandle(
empty_type_arguments_,
static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
empty_type_arguments_->untag()->set_length(Smi::New(0));
empty_type_arguments_->untag()->set_hash(Smi::New(0));
empty_type_arguments_->ComputeHash();
empty_type_arguments_->SetCanonical();
}
// The VM isolate snapshot object table is initialized to an empty array
// as we do not have any VM isolate snapshot at this time.
*vm_isolate_snapshot_object_table_ = Object::empty_array().ptr();
cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group);
cls.set_is_abstract();
cls.set_num_type_arguments_unsafe(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
dynamic_class_ = cls.ptr();
cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group);
cls.set_num_type_arguments_unsafe(0);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
void_class_ = cls.ptr();
cls = Class::New<Type, RTN::Type>(isolate_group);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
cls.set_is_allocate_finalized();
cls.set_is_declaration_loaded();
cls.set_is_type_finalized();
cls = dynamic_class_;
*dynamic_type_ =
Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
dynamic_type_->SetIsFinalized();
dynamic_type_->ComputeHash();
dynamic_type_->SetCanonical();
cls = void_class_;
*void_type_ =
Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
void_type_->SetIsFinalized();
void_type_->ComputeHash();
void_type_->SetCanonical();
// Since TypeArguments objects are passed as function arguments, make them
// behave as Dart instances, although they are just VM objects.
// Note that we cannot set the super type to ObjectType, which does not live
// in the vm isolate. See special handling in Class::SuperClass().
cls = type_arguments_class_;
cls.set_interfaces(Object::empty_array());
cls.SetFields(Object::empty_array());
cls.SetFunctions(Object::empty_array());
cls = Class::New<Bool, RTN::Bool>(isolate_group);
isolate_group->object_store()->set_bool_class(cls);
*smi_illegal_cid_ = Smi::New(kIllegalCid);
*smi_zero_ = Smi::New(0);
String& error_str = String::Handle();
error_str = String::New(
"Callbacks into the Dart VM are currently prohibited. Either there are "
"outstanding pointers from Dart_TypedDataAcquireData that have not been "
"released with Dart_TypedDataReleaseData, or a finalizer is running.",
Heap::kOld);
*no_callbacks_error_ = ApiError::New(error_str, Heap::kOld);
error_str = String::New(
"No api calls are allowed while unwind is in progress", Heap::kOld);
*unwind_in_progress_error_ = UnwindError::New(error_str, Heap::kOld);
error_str = String::New("SnapshotWriter Error", Heap::kOld);
*snapshot_writer_error_ =
LanguageError::New(error_str, Report::kError, Heap::kOld);
error_str = String::New("Branch offset overflow", Heap::kOld);
*branch_offset_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
error_str = String::New("Speculative inlining failed", Heap::kOld);
*speculative_inlining_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
error_str = String::New("Background Compilation Failed", Heap::kOld);
*background_compilation_error_ =
LanguageError::New(error_str, Report::kBailout, Heap::kOld);
error_str = String::New("Out of memory", Heap::kOld);
*out_of_memory_error_ =
LanguageError::New(error_str, Report::kError, Heap::kOld);
// Allocate the parameter arrays for method extractor types and names.
*extractor_parameter_types_ = Array::New(1, Heap::kOld);
extractor_parameter_types_->SetAt(0, Object::dynamic_type());
*extractor_parameter_names_ = Array::New(1, Heap::kOld);
// Fill in extractor_parameter_names_ later, after symbols are initialized
// (in Object::FinalizeVMIsolate). extractor_parameter_names_ object
// needs to be created earlier as VM isolate snapshot reader references it
// before Object::FinalizeVMIsolate.
// Some thread fields need to be reinitialized as null constants have not been
// initialized until now.
thread->ClearStickyError();
ASSERT(!null_object_->IsSmi());
ASSERT(!null_class_->IsSmi());
ASSERT(null_class_->IsClass());
ASSERT(!null_array_->IsSmi());
ASSERT(null_array_->IsArray());
ASSERT(!null_string_->IsSmi());
ASSERT(null_string_->IsString());
ASSERT(!null_instance_->IsSmi());
ASSERT(null_instance_->IsInstance());
ASSERT(!null_function_->IsSmi());
ASSERT(null_function_->IsFunction());
ASSERT(!null_function_type_->IsSmi());
ASSERT(null_function_type_->IsFunctionType());
ASSERT(!null_type_arguments_->IsSmi());
ASSERT(null_type_arguments_->IsTypeArguments());
ASSERT(!null_compressed_stackmaps_->IsSmi());
ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps());
ASSERT(!empty_array_->IsSmi());
ASSERT(empty_array_->IsArray());
ASSERT(!zero_array_->IsSmi());
ASSERT(zero_array_->IsArray());
ASSERT(!empty_type_arguments_->IsSmi());
ASSERT(empty_type_arguments_->IsTypeArguments());
ASSERT(!empty_context_scope_->IsSmi());
ASSERT(empty_context_scope_->IsContextScope());
ASSERT(!empty_compressed_stackmaps_->IsSmi());
ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps());
ASSERT(!empty_descriptors_->IsSmi());
ASSERT(empty_descriptors_->IsPcDescriptors());
ASSERT(!empty_var_descriptors_->IsSmi());
ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
ASSERT(!empty_exception_handlers_->IsSmi());
ASSERT(empty_exception_handlers_->IsExceptionHandlers());
ASSERT(!sentinel_->IsSmi());
ASSERT(sentinel_->IsSentinel());
ASSERT(!transition_sentinel_->IsSmi());
ASSERT(transition_sentinel_->IsSentinel());
ASSERT(!unknown_constant_->IsSmi());
ASSERT(unknown_constant_->IsSentinel());
ASSERT(!non_constant_->IsSmi());
ASSERT(non_constant_->IsSentinel());
ASSERT(!bool_true_->IsSmi());
ASSERT(bool_true_->IsBool());
ASSERT(!bool_false_->IsSmi());
ASSERT(bool_false_->IsBool());
ASSERT(smi_illegal_cid_->IsSmi());
ASSERT(smi_zero_->IsSmi());
ASSERT(!no_callbacks_error_->IsSmi());
ASSERT(no_callbacks_error_->IsApiError());
ASSERT(!unwind_in_progress_error_->IsSmi());
ASSERT(unwind_in_progress_error_->IsUnwindError());
ASSERT(!snapshot_writer_error_->IsSmi());
ASSERT(snapshot_writer_error_->IsLanguageError());
ASSERT(!branch_offset_error_->IsSmi());
ASSERT(branch_offset_error_->IsLanguageError());
ASSERT(!speculative_inlining_error_->IsSmi());
ASSERT(speculative_inlining_error_->IsLanguageError());
ASSERT(!background_compilation_error_->IsSmi());
ASSERT(background_compilation_error_->IsLanguageError());
ASSERT(!out_of_memory_error_->IsSmi());
ASSERT(out_of_memory_error_->IsLanguageError());
ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
ASSERT(vm_isolate_snapshot_object_table_->IsArray());
ASSERT(!extractor_parameter_types_->IsSmi());
ASSERT(extractor_parameter_types_->IsArray());
ASSERT(!extractor_parameter_names_->IsSmi());
ASSERT(extractor_parameter_names_->IsArray());
}