in toolkit/components/protobuf/src/google/protobuf/generated_message_reflection.cc [937:3165]
void Reflection::Swap(Message* message1, Message* message2) const {
if (message1 == message2) return;
// TODO(kenton): Other Reflection methods should probably check this too.
GOOGLE_CHECK_EQ(message1->GetReflection(), this)
<< "First argument to Swap() (of type \""
<< message1->GetDescriptor()->full_name()
<< "\") is not compatible with this reflection object (which is for type "
"\""
<< descriptor_->full_name()
<< "\"). Note that the exact same class is required; not just the same "
"descriptor.";
GOOGLE_CHECK_EQ(message2->GetReflection(), this)
<< "Second argument to Swap() (of type \""
<< message2->GetDescriptor()->full_name()
<< "\") is not compatible with this reflection object (which is for type "
"\""
<< descriptor_->full_name()
<< "\"). Note that the exact same class is required; not just the same "
"descriptor.";
// Check that both messages are in the same arena (or both on the heap). We
// need to copy all data if not, due to ownership semantics.
#ifdef PROTOBUF_FORCE_COPY_IN_SWAP
if (message1->GetOwningArena() == nullptr ||
message1->GetOwningArena() != message2->GetOwningArena()) {
#else // PROTOBUF_FORCE_COPY_IN_SWAP
if (message1->GetOwningArena() != message2->GetOwningArena()) {
#endif // !PROTOBUF_FORCE_COPY_IN_SWAP
// One of the two is guaranteed to have an arena. Switch things around
// to guarantee that message1 has an arena.
Arena* arena = message1->GetOwningArena();
if (arena == nullptr) {
arena = message2->GetOwningArena();
std::swap(message1, message2); // Swapping names for pointers!
}
Message* temp = message1->New(arena);
temp->MergeFrom(*message2);
message2->CopyFrom(*message1);
#ifdef PROTOBUF_FORCE_COPY_IN_SWAP
message1->CopyFrom(*temp);
if (arena == nullptr) delete temp;
#else // PROTOBUF_FORCE_COPY_IN_SWAP
Swap(message1, temp);
#endif // !PROTOBUF_FORCE_COPY_IN_SWAP
return;
}
GOOGLE_DCHECK_EQ(message1->GetOwningArena(), message2->GetOwningArena());
UnsafeArenaSwap(message1, message2);
}
template <bool unsafe_shallow_swap>
void Reflection::SwapFieldsImpl(
Message* message1, Message* message2,
const std::vector<const FieldDescriptor*>& fields) const {
if (message1 == message2) return;
// TODO(kenton): Other Reflection methods should probably check this too.
GOOGLE_CHECK_EQ(message1->GetReflection(), this)
<< "First argument to SwapFields() (of type \""
<< message1->GetDescriptor()->full_name()
<< "\") is not compatible with this reflection object (which is for type "
"\""
<< descriptor_->full_name()
<< "\"). Note that the exact same class is required; not just the same "
"descriptor.";
GOOGLE_CHECK_EQ(message2->GetReflection(), this)
<< "Second argument to SwapFields() (of type \""
<< message2->GetDescriptor()->full_name()
<< "\") is not compatible with this reflection object (which is for type "
"\""
<< descriptor_->full_name()
<< "\"). Note that the exact same class is required; not just the same "
"descriptor.";
std::set<int> swapped_oneof;
GOOGLE_DCHECK(!unsafe_shallow_swap || message1->GetArenaForAllocation() ==
message2->GetArenaForAllocation());
const Message* prototype =
message_factory_->GetPrototype(message1->GetDescriptor());
for (const auto* field : fields) {
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
if (unsafe_shallow_swap) {
MutableExtensionSet(message1)->UnsafeShallowSwapExtension(
MutableExtensionSet(message2), field->number());
} else {
MutableExtensionSet(message1)->SwapExtension(
prototype, MutableExtensionSet(message2), field->number());
}
} else {
if (schema_.InRealOneof(field)) {
int oneof_index = field->containing_oneof()->index();
// Only swap the oneof field once.
if (swapped_oneof.find(oneof_index) != swapped_oneof.end()) {
continue;
}
swapped_oneof.insert(oneof_index);
SwapOneofField<unsafe_shallow_swap>(message1, message2,
field->containing_oneof());
} else {
// Swap field.
if (unsafe_shallow_swap) {
UnsafeShallowSwapField(message1, message2, field);
} else {
SwapField(message1, message2, field);
}
// Swap has bit for non-repeated fields. We have already checked for
// oneof already. This has to be done after SwapField, because SwapField
// may depend on the information in has bits.
if (!field->is_repeated()) {
SwapBit(message1, message2, field);
if (field->options().ctype() == FieldOptions::STRING &&
IsInlined(field)) {
GOOGLE_DCHECK(!unsafe_shallow_swap ||
message1->GetArenaForAllocation() ==
message2->GetArenaForAllocation());
SwapInlinedStringDonated(message1, message2, field);
}
}
}
}
}
}
void Reflection::SwapFields(
Message* message1, Message* message2,
const std::vector<const FieldDescriptor*>& fields) const {
SwapFieldsImpl<false>(message1, message2, fields);
}
void Reflection::UnsafeShallowSwapFields(
Message* message1, Message* message2,
const std::vector<const FieldDescriptor*>& fields) const {
SwapFieldsImpl<true>(message1, message2, fields);
}
void Reflection::UnsafeArenaSwapFields(
Message* lhs, Message* rhs,
const std::vector<const FieldDescriptor*>& fields) const {
GOOGLE_DCHECK_EQ(lhs->GetArenaForAllocation(), rhs->GetArenaForAllocation());
UnsafeShallowSwapFields(lhs, rhs, fields);
}
// -------------------------------------------------------------------
bool Reflection::HasField(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK_MESSAGE_TYPE(HasField);
USAGE_CHECK_SINGULAR(HasField);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
return GetExtensionSet(message).Has(field->number());
} else {
if (schema_.InRealOneof(field)) {
return HasOneofField(message, field);
} else {
return HasBit(message, field);
}
}
}
void Reflection::UnsafeArenaSwap(Message* lhs, Message* rhs) const {
if (lhs == rhs) return;
MutableInternalMetadata(lhs)->InternalSwap(MutableInternalMetadata(rhs));
for (int i = 0; i <= last_non_weak_field_index_; i++) {
const FieldDescriptor* field = descriptor_->field(i);
if (schema_.InRealOneof(field)) continue;
if (schema_.IsFieldStripped(field)) continue;
UnsafeShallowSwapField(lhs, rhs, field);
}
const int oneof_decl_count = descriptor_->oneof_decl_count();
for (int i = 0; i < oneof_decl_count; i++) {
const OneofDescriptor* oneof = descriptor_->oneof_decl(i);
if (!oneof->is_synthetic()) {
SwapOneofField<true>(lhs, rhs, oneof);
}
}
// Swapping bits need to happen after swapping fields, because the latter may
// depend on the has bit information.
if (schema_.HasHasbits()) {
uint32_t* lhs_has_bits = MutableHasBits(lhs);
uint32_t* rhs_has_bits = MutableHasBits(rhs);
int fields_with_has_bits = 0;
for (int i = 0; i < descriptor_->field_count(); i++) {
const FieldDescriptor* field = descriptor_->field(i);
if (field->is_repeated() || schema_.InRealOneof(field)) {
continue;
}
fields_with_has_bits++;
}
int has_bits_size = (fields_with_has_bits + 31) / 32;
for (int i = 0; i < has_bits_size; i++) {
std::swap(lhs_has_bits[i], rhs_has_bits[i]);
}
}
if (schema_.HasInlinedString()) {
uint32_t* lhs_donated_array = MutableInlinedStringDonatedArray(lhs);
uint32_t* rhs_donated_array = MutableInlinedStringDonatedArray(rhs);
int inlined_string_count = 0;
for (int i = 0; i < descriptor_->field_count(); i++) {
const FieldDescriptor* field = descriptor_->field(i);
if (field->is_extension() || field->is_repeated() ||
schema_.InRealOneof(field) ||
field->options().ctype() != FieldOptions::STRING ||
!IsInlined(field)) {
continue;
}
inlined_string_count++;
}
int donated_array_size = inlined_string_count == 0
? 0
// One extra bit for the arena dtor tracking.
: (inlined_string_count + 1 + 31) / 32;
GOOGLE_CHECK_EQ((lhs_donated_array[0] & 0x1u) == 0,
(rhs_donated_array[0] & 0x1u) == 0);
for (int i = 0; i < donated_array_size; i++) {
std::swap(lhs_donated_array[i], rhs_donated_array[i]);
}
}
if (schema_.HasExtensionSet()) {
MutableExtensionSet(lhs)->InternalSwap(MutableExtensionSet(rhs));
}
}
int Reflection::FieldSize(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK_MESSAGE_TYPE(FieldSize);
USAGE_CHECK_REPEATED(FieldSize);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
return GetExtensionSet(message).ExtensionSize(field->number());
} else {
switch (field->cpp_type()) {
#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \
case FieldDescriptor::CPPTYPE_##UPPERCASE: \
return GetRaw<RepeatedField<LOWERCASE> >(message, field).size()
HANDLE_TYPE(INT32, int32_t);
HANDLE_TYPE(INT64, int64_t);
HANDLE_TYPE(UINT32, uint32_t);
HANDLE_TYPE(UINT64, uint64_t);
HANDLE_TYPE(DOUBLE, double);
HANDLE_TYPE(FLOAT, float);
HANDLE_TYPE(BOOL, bool);
HANDLE_TYPE(ENUM, int);
#undef HANDLE_TYPE
case FieldDescriptor::CPPTYPE_STRING:
case FieldDescriptor::CPPTYPE_MESSAGE:
if (IsMapFieldInApi(field)) {
const internal::MapFieldBase& map =
GetRaw<MapFieldBase>(message, field);
if (map.IsRepeatedFieldValid()) {
return map.GetRepeatedField().size();
} else {
// No need to materialize the repeated field if it is out of sync:
// its size will be the same as the map's size.
return map.size();
}
} else {
return GetRaw<RepeatedPtrFieldBase>(message, field).size();
}
}
GOOGLE_LOG(FATAL) << "Can't get here.";
return 0;
}
}
void Reflection::ClearField(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK_MESSAGE_TYPE(ClearField);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->ClearExtension(field->number());
} else if (!field->is_repeated()) {
if (schema_.InRealOneof(field)) {
ClearOneofField(message, field);
return;
}
if (HasBit(*message, field)) {
ClearBit(message, field);
// We need to set the field back to its default value.
switch (field->cpp_type()) {
#define CLEAR_TYPE(CPPTYPE, TYPE) \
case FieldDescriptor::CPPTYPE_##CPPTYPE: \
*MutableRaw<TYPE>(message, field) = field->default_value_##TYPE(); \
break;
CLEAR_TYPE(INT32, int32_t);
CLEAR_TYPE(INT64, int64_t);
CLEAR_TYPE(UINT32, uint32_t);
CLEAR_TYPE(UINT64, uint64_t);
CLEAR_TYPE(FLOAT, float);
CLEAR_TYPE(DOUBLE, double);
CLEAR_TYPE(BOOL, bool);
#undef CLEAR_TYPE
case FieldDescriptor::CPPTYPE_ENUM:
*MutableRaw<int>(message, field) =
field->default_value_enum()->number();
break;
case FieldDescriptor::CPPTYPE_STRING: {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
if (IsInlined(field)) {
// Currently, string with default value can't be inlined. So we
// don't have to handle default value here.
MutableRaw<InlinedStringField>(message, field)->ClearToEmpty();
} else {
auto* str = MutableRaw<ArenaStringPtr>(message, field);
str->Destroy();
str->InitDefault();
}
break;
}
break;
}
case FieldDescriptor::CPPTYPE_MESSAGE:
if (schema_.HasBitIndex(field) == static_cast<uint32_t>(-1)) {
// Proto3 does not have has-bits and we need to set a message field
// to nullptr in order to indicate its un-presence.
if (message->GetArenaForAllocation() == nullptr) {
delete *MutableRaw<Message*>(message, field);
}
*MutableRaw<Message*>(message, field) = nullptr;
} else {
(*MutableRaw<Message*>(message, field))->Clear();
}
break;
}
}
} else {
switch (field->cpp_type()) {
#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \
case FieldDescriptor::CPPTYPE_##UPPERCASE: \
MutableRaw<RepeatedField<LOWERCASE> >(message, field)->Clear(); \
break
HANDLE_TYPE(INT32, int32_t);
HANDLE_TYPE(INT64, int64_t);
HANDLE_TYPE(UINT32, uint32_t);
HANDLE_TYPE(UINT64, uint64_t);
HANDLE_TYPE(DOUBLE, double);
HANDLE_TYPE(FLOAT, float);
HANDLE_TYPE(BOOL, bool);
HANDLE_TYPE(ENUM, int);
#undef HANDLE_TYPE
case FieldDescriptor::CPPTYPE_STRING: {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
MutableRaw<RepeatedPtrField<std::string> >(message, field)->Clear();
break;
}
break;
}
case FieldDescriptor::CPPTYPE_MESSAGE: {
if (IsMapFieldInApi(field)) {
MutableRaw<MapFieldBase>(message, field)->Clear();
} else {
// We don't know which subclass of RepeatedPtrFieldBase the type is,
// so we use RepeatedPtrFieldBase directly.
MutableRaw<RepeatedPtrFieldBase>(message, field)
->Clear<GenericTypeHandler<Message> >();
}
break;
}
}
}
}
void Reflection::RemoveLast(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK_MESSAGE_TYPE(RemoveLast);
USAGE_CHECK_REPEATED(RemoveLast);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->RemoveLast(field->number());
} else {
switch (field->cpp_type()) {
#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \
case FieldDescriptor::CPPTYPE_##UPPERCASE: \
MutableRaw<RepeatedField<LOWERCASE> >(message, field)->RemoveLast(); \
break
HANDLE_TYPE(INT32, int32_t);
HANDLE_TYPE(INT64, int64_t);
HANDLE_TYPE(UINT32, uint32_t);
HANDLE_TYPE(UINT64, uint64_t);
HANDLE_TYPE(DOUBLE, double);
HANDLE_TYPE(FLOAT, float);
HANDLE_TYPE(BOOL, bool);
HANDLE_TYPE(ENUM, int);
#undef HANDLE_TYPE
case FieldDescriptor::CPPTYPE_STRING:
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
MutableRaw<RepeatedPtrField<std::string> >(message, field)
->RemoveLast();
break;
}
break;
case FieldDescriptor::CPPTYPE_MESSAGE:
if (IsMapFieldInApi(field)) {
MutableRaw<MapFieldBase>(message, field)
->MutableRepeatedField()
->RemoveLast<GenericTypeHandler<Message> >();
} else {
MutableRaw<RepeatedPtrFieldBase>(message, field)
->RemoveLast<GenericTypeHandler<Message> >();
}
break;
}
}
}
Message* Reflection::ReleaseLast(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK_ALL(ReleaseLast, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
Message* released;
if (field->is_extension()) {
released = static_cast<Message*>(
MutableExtensionSet(message)->ReleaseLast(field->number()));
} else {
if (IsMapFieldInApi(field)) {
released = MutableRaw<MapFieldBase>(message, field)
->MutableRepeatedField()
->ReleaseLast<GenericTypeHandler<Message>>();
} else {
released = MutableRaw<RepeatedPtrFieldBase>(message, field)
->ReleaseLast<GenericTypeHandler<Message>>();
}
}
#ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
return MaybeForceCopy(message->GetArenaForAllocation(), released);
#else // PROTOBUF_FORCE_COPY_IN_RELEASE
return released;
#endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
}
Message* Reflection::UnsafeArenaReleaseLast(
Message* message, const FieldDescriptor* field) const {
USAGE_CHECK_ALL(UnsafeArenaReleaseLast, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
return static_cast<Message*>(
MutableExtensionSet(message)->UnsafeArenaReleaseLast(field->number()));
} else {
if (IsMapFieldInApi(field)) {
return MutableRaw<MapFieldBase>(message, field)
->MutableRepeatedField()
->UnsafeArenaReleaseLast<GenericTypeHandler<Message>>();
} else {
return MutableRaw<RepeatedPtrFieldBase>(message, field)
->UnsafeArenaReleaseLast<GenericTypeHandler<Message>>();
}
}
}
void Reflection::SwapElements(Message* message, const FieldDescriptor* field,
int index1, int index2) const {
USAGE_CHECK_MESSAGE_TYPE(Swap);
USAGE_CHECK_REPEATED(Swap);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->SwapElements(field->number(), index1, index2);
} else {
switch (field->cpp_type()) {
#define HANDLE_TYPE(UPPERCASE, LOWERCASE) \
case FieldDescriptor::CPPTYPE_##UPPERCASE: \
MutableRaw<RepeatedField<LOWERCASE> >(message, field) \
->SwapElements(index1, index2); \
break
HANDLE_TYPE(INT32, int32_t);
HANDLE_TYPE(INT64, int64_t);
HANDLE_TYPE(UINT32, uint32_t);
HANDLE_TYPE(UINT64, uint64_t);
HANDLE_TYPE(DOUBLE, double);
HANDLE_TYPE(FLOAT, float);
HANDLE_TYPE(BOOL, bool);
HANDLE_TYPE(ENUM, int);
#undef HANDLE_TYPE
case FieldDescriptor::CPPTYPE_STRING:
case FieldDescriptor::CPPTYPE_MESSAGE:
if (IsMapFieldInApi(field)) {
MutableRaw<MapFieldBase>(message, field)
->MutableRepeatedField()
->SwapElements(index1, index2);
} else {
MutableRaw<RepeatedPtrFieldBase>(message, field)
->SwapElements(index1, index2);
}
break;
}
}
}
namespace {
// Comparison functor for sorting FieldDescriptors by field number.
struct FieldNumberSorter {
bool operator()(const FieldDescriptor* left,
const FieldDescriptor* right) const {
return left->number() < right->number();
}
};
bool IsIndexInHasBitSet(const uint32_t* has_bit_set, uint32_t has_bit_index) {
GOOGLE_DCHECK_NE(has_bit_index, ~0u);
return ((has_bit_set[has_bit_index / 32] >> (has_bit_index % 32)) &
static_cast<uint32_t>(1)) != 0;
}
bool CreateUnknownEnumValues(const FileDescriptor* file) {
return file->syntax() == FileDescriptor::SYNTAX_PROTO3;
}
} // namespace
namespace internal {
bool CreateUnknownEnumValues(const FieldDescriptor* field) {
bool open_enum = false;
return field->file()->syntax() == FileDescriptor::SYNTAX_PROTO3 || open_enum;
}
} // namespace internal
using internal::CreateUnknownEnumValues;
void Reflection::ListFieldsMayFailOnStripped(
const Message& message, bool should_fail,
std::vector<const FieldDescriptor*>* output) const {
output->clear();
// Optimization: The default instance never has any fields set.
if (schema_.IsDefaultInstance(message)) return;
// Optimization: Avoid calling GetHasBits() and HasOneofField() many times
// within the field loop. We allow this violation of ReflectionSchema
// encapsulation because this function takes a noticeable about of CPU
// fleetwide and properly allowing this optimization through public interfaces
// seems more trouble than it is worth.
const uint32_t* const has_bits =
schema_.HasHasbits() ? GetHasBits(message) : nullptr;
const uint32_t* const has_bits_indices = schema_.has_bit_indices_;
output->reserve(descriptor_->field_count());
const int last_non_weak_field_index = last_non_weak_field_index_;
for (int i = 0; i <= last_non_weak_field_index; i++) {
const FieldDescriptor* field = descriptor_->field(i);
if (!should_fail && schema_.IsFieldStripped(field)) {
continue;
}
if (field->is_repeated()) {
if (FieldSize(message, field) > 0) {
output->push_back(field);
}
} else {
const OneofDescriptor* containing_oneof = field->containing_oneof();
if (schema_.InRealOneof(field)) {
const uint32_t* const oneof_case_array =
GetConstPointerAtOffset<uint32_t>(&message,
schema_.oneof_case_offset_);
// Equivalent to: HasOneofField(message, field)
if (static_cast<int64_t>(oneof_case_array[containing_oneof->index()]) ==
field->number()) {
output->push_back(field);
}
} else if (has_bits && has_bits_indices[i] != static_cast<uint32_t>(-1)) {
CheckInvalidAccess(schema_, field);
// Equivalent to: HasBit(message, field)
if (IsIndexInHasBitSet(has_bits, has_bits_indices[i])) {
output->push_back(field);
}
} else if (HasBit(message, field)) { // Fall back on proto3-style HasBit.
output->push_back(field);
}
}
}
if (schema_.HasExtensionSet()) {
GetExtensionSet(message).AppendToList(descriptor_, descriptor_pool_,
output);
}
// ListFields() must sort output by field number.
std::sort(output->begin(), output->end(), FieldNumberSorter());
}
void Reflection::ListFields(const Message& message,
std::vector<const FieldDescriptor*>* output) const {
ListFieldsMayFailOnStripped(message, true, output);
}
void Reflection::ListFieldsOmitStripped(
const Message& message, std::vector<const FieldDescriptor*>* output) const {
ListFieldsMayFailOnStripped(message, false, output);
}
// -------------------------------------------------------------------
#undef DEFINE_PRIMITIVE_ACCESSORS
#define DEFINE_PRIMITIVE_ACCESSORS(TYPENAME, TYPE, PASSTYPE, CPPTYPE) \
PASSTYPE Reflection::Get##TYPENAME(const Message& message, \
const FieldDescriptor* field) const { \
USAGE_CHECK_ALL(Get##TYPENAME, SINGULAR, CPPTYPE); \
if (field->is_extension()) { \
return GetExtensionSet(message).Get##TYPENAME( \
field->number(), field->default_value_##PASSTYPE()); \
} else if (schema_.InRealOneof(field) && !HasOneofField(message, field)) { \
return field->default_value_##PASSTYPE(); \
} else { \
return GetField<TYPE>(message, field); \
} \
} \
\
void Reflection::Set##TYPENAME( \
Message* message, const FieldDescriptor* field, PASSTYPE value) const { \
USAGE_CHECK_ALL(Set##TYPENAME, SINGULAR, CPPTYPE); \
if (field->is_extension()) { \
return MutableExtensionSet(message)->Set##TYPENAME( \
field->number(), field->type(), value, field); \
} else { \
SetField<TYPE>(message, field, value); \
} \
} \
\
PASSTYPE Reflection::GetRepeated##TYPENAME( \
const Message& message, const FieldDescriptor* field, int index) const { \
USAGE_CHECK_ALL(GetRepeated##TYPENAME, REPEATED, CPPTYPE); \
if (field->is_extension()) { \
return GetExtensionSet(message).GetRepeated##TYPENAME(field->number(), \
index); \
} else { \
return GetRepeatedField<TYPE>(message, field, index); \
} \
} \
\
void Reflection::SetRepeated##TYPENAME(Message* message, \
const FieldDescriptor* field, \
int index, PASSTYPE value) const { \
USAGE_CHECK_ALL(SetRepeated##TYPENAME, REPEATED, CPPTYPE); \
if (field->is_extension()) { \
MutableExtensionSet(message)->SetRepeated##TYPENAME(field->number(), \
index, value); \
} else { \
SetRepeatedField<TYPE>(message, field, index, value); \
} \
} \
\
void Reflection::Add##TYPENAME( \
Message* message, const FieldDescriptor* field, PASSTYPE value) const { \
USAGE_CHECK_ALL(Add##TYPENAME, REPEATED, CPPTYPE); \
if (field->is_extension()) { \
MutableExtensionSet(message)->Add##TYPENAME( \
field->number(), field->type(), field->options().packed(), value, \
field); \
} else { \
AddField<TYPE>(message, field, value); \
} \
}
DEFINE_PRIMITIVE_ACCESSORS(Int32, int32_t, int32_t, INT32)
DEFINE_PRIMITIVE_ACCESSORS(Int64, int64_t, int64_t, INT64)
DEFINE_PRIMITIVE_ACCESSORS(UInt32, uint32_t, uint32_t, UINT32)
DEFINE_PRIMITIVE_ACCESSORS(UInt64, uint64_t, uint64_t, UINT64)
DEFINE_PRIMITIVE_ACCESSORS(Float, float, float, FLOAT)
DEFINE_PRIMITIVE_ACCESSORS(Double, double, double, DOUBLE)
DEFINE_PRIMITIVE_ACCESSORS(Bool, bool, bool, BOOL)
#undef DEFINE_PRIMITIVE_ACCESSORS
// -------------------------------------------------------------------
std::string Reflection::GetString(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK_ALL(GetString, SINGULAR, STRING);
if (field->is_extension()) {
return GetExtensionSet(message).GetString(field->number(),
field->default_value_string());
} else {
if (schema_.InRealOneof(field) && !HasOneofField(message, field)) {
return field->default_value_string();
}
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
if (IsInlined(field)) {
return GetField<InlinedStringField>(message, field).GetNoArena();
} else {
const auto& str = GetField<ArenaStringPtr>(message, field);
return str.IsDefault() ? field->default_value_string() : str.Get();
}
}
}
}
const std::string& Reflection::GetStringReference(const Message& message,
const FieldDescriptor* field,
std::string* scratch) const {
(void)scratch; // Parameter is used by Google-internal code.
USAGE_CHECK_ALL(GetStringReference, SINGULAR, STRING);
if (field->is_extension()) {
return GetExtensionSet(message).GetString(field->number(),
field->default_value_string());
} else {
if (schema_.InRealOneof(field) && !HasOneofField(message, field)) {
return field->default_value_string();
}
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
if (IsInlined(field)) {
return GetField<InlinedStringField>(message, field).GetNoArena();
} else {
const auto& str = GetField<ArenaStringPtr>(message, field);
return str.IsDefault() ? field->default_value_string() : str.Get();
}
}
}
}
void Reflection::SetString(Message* message, const FieldDescriptor* field,
std::string value) const {
USAGE_CHECK_ALL(SetString, SINGULAR, STRING);
if (field->is_extension()) {
return MutableExtensionSet(message)->SetString(
field->number(), field->type(), std::move(value), field);
} else {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING: {
if (IsInlined(field)) {
const uint32_t index = schema_.InlinedStringIndex(field);
GOOGLE_DCHECK_GT(index, 0);
uint32_t* states =
&MutableInlinedStringDonatedArray(message)[index / 32];
uint32_t mask = ~(static_cast<uint32_t>(1) << (index % 32));
MutableField<InlinedStringField>(message, field)
->Set(value, message->GetArenaForAllocation(),
IsInlinedStringDonated(*message, field), states, mask,
message);
break;
}
// Oneof string fields are never set as a default instance.
// We just need to pass some arbitrary default string to make it work.
// This allows us to not have the real default accessible from
// reflection.
if (schema_.InRealOneof(field) && !HasOneofField(*message, field)) {
ClearOneof(message, field->containing_oneof());
MutableField<ArenaStringPtr>(message, field)->InitDefault();
}
MutableField<ArenaStringPtr>(message, field)
->Set(std::move(value), message->GetArenaForAllocation());
break;
}
}
}
}
std::string Reflection::GetRepeatedString(const Message& message,
const FieldDescriptor* field,
int index) const {
USAGE_CHECK_ALL(GetRepeatedString, REPEATED, STRING);
if (field->is_extension()) {
return GetExtensionSet(message).GetRepeatedString(field->number(), index);
} else {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
return GetRepeatedPtrField<std::string>(message, field, index);
}
}
}
const std::string& Reflection::GetRepeatedStringReference(
const Message& message, const FieldDescriptor* field, int index,
std::string* scratch) const {
(void)scratch; // Parameter is used by Google-internal code.
USAGE_CHECK_ALL(GetRepeatedStringReference, REPEATED, STRING);
if (field->is_extension()) {
return GetExtensionSet(message).GetRepeatedString(field->number(), index);
} else {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
return GetRepeatedPtrField<std::string>(message, field, index);
}
}
}
void Reflection::SetRepeatedString(Message* message,
const FieldDescriptor* field, int index,
std::string value) const {
USAGE_CHECK_ALL(SetRepeatedString, REPEATED, STRING);
if (field->is_extension()) {
MutableExtensionSet(message)->SetRepeatedString(field->number(), index,
std::move(value));
} else {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
MutableRepeatedField<std::string>(message, field, index)
->assign(std::move(value));
break;
}
}
}
void Reflection::AddString(Message* message, const FieldDescriptor* field,
std::string value) const {
USAGE_CHECK_ALL(AddString, REPEATED, STRING);
if (field->is_extension()) {
MutableExtensionSet(message)->AddString(field->number(), field->type(),
std::move(value), field);
} else {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING:
AddField<std::string>(message, field)->assign(std::move(value));
break;
}
}
}
// -------------------------------------------------------------------
const EnumValueDescriptor* Reflection::GetEnum(
const Message& message, const FieldDescriptor* field) const {
// Usage checked by GetEnumValue.
int value = GetEnumValue(message, field);
return field->enum_type()->FindValueByNumberCreatingIfUnknown(value);
}
int Reflection::GetEnumValue(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK_ALL(GetEnumValue, SINGULAR, ENUM);
int32_t value;
if (field->is_extension()) {
value = GetExtensionSet(message).GetEnum(
field->number(), field->default_value_enum()->number());
} else if (schema_.InRealOneof(field) && !HasOneofField(message, field)) {
value = field->default_value_enum()->number();
} else {
value = GetField<int>(message, field);
}
return value;
}
void Reflection::SetEnum(Message* message, const FieldDescriptor* field,
const EnumValueDescriptor* value) const {
// Usage checked by SetEnumValue.
USAGE_CHECK_ENUM_VALUE(SetEnum);
SetEnumValueInternal(message, field, value->number());
}
void Reflection::SetEnumValue(Message* message, const FieldDescriptor* field,
int value) const {
USAGE_CHECK_ALL(SetEnumValue, SINGULAR, ENUM);
if (!CreateUnknownEnumValues(field)) {
// Check that the value is valid if we don't support direct storage of
// unknown enum values.
const EnumValueDescriptor* value_desc =
field->enum_type()->FindValueByNumber(value);
if (value_desc == nullptr) {
MutableUnknownFields(message)->AddVarint(field->number(), value);
return;
}
}
SetEnumValueInternal(message, field, value);
}
void Reflection::SetEnumValueInternal(Message* message,
const FieldDescriptor* field,
int value) const {
if (field->is_extension()) {
MutableExtensionSet(message)->SetEnum(field->number(), field->type(), value,
field);
} else {
SetField<int>(message, field, value);
}
}
const EnumValueDescriptor* Reflection::GetRepeatedEnum(
const Message& message, const FieldDescriptor* field, int index) const {
// Usage checked by GetRepeatedEnumValue.
int value = GetRepeatedEnumValue(message, field, index);
return field->enum_type()->FindValueByNumberCreatingIfUnknown(value);
}
int Reflection::GetRepeatedEnumValue(const Message& message,
const FieldDescriptor* field,
int index) const {
USAGE_CHECK_ALL(GetRepeatedEnumValue, REPEATED, ENUM);
int value;
if (field->is_extension()) {
value = GetExtensionSet(message).GetRepeatedEnum(field->number(), index);
} else {
value = GetRepeatedField<int>(message, field, index);
}
return value;
}
void Reflection::SetRepeatedEnum(Message* message, const FieldDescriptor* field,
int index,
const EnumValueDescriptor* value) const {
// Usage checked by SetRepeatedEnumValue.
USAGE_CHECK_ENUM_VALUE(SetRepeatedEnum);
SetRepeatedEnumValueInternal(message, field, index, value->number());
}
void Reflection::SetRepeatedEnumValue(Message* message,
const FieldDescriptor* field, int index,
int value) const {
USAGE_CHECK_ALL(SetRepeatedEnum, REPEATED, ENUM);
if (!CreateUnknownEnumValues(field)) {
// Check that the value is valid if we don't support direct storage of
// unknown enum values.
const EnumValueDescriptor* value_desc =
field->enum_type()->FindValueByNumber(value);
if (value_desc == nullptr) {
MutableUnknownFields(message)->AddVarint(field->number(), value);
return;
}
}
SetRepeatedEnumValueInternal(message, field, index, value);
}
void Reflection::SetRepeatedEnumValueInternal(Message* message,
const FieldDescriptor* field,
int index, int value) const {
if (field->is_extension()) {
MutableExtensionSet(message)->SetRepeatedEnum(field->number(), index,
value);
} else {
SetRepeatedField<int>(message, field, index, value);
}
}
void Reflection::AddEnum(Message* message, const FieldDescriptor* field,
const EnumValueDescriptor* value) const {
// Usage checked by AddEnumValue.
USAGE_CHECK_ENUM_VALUE(AddEnum);
AddEnumValueInternal(message, field, value->number());
}
void Reflection::AddEnumValue(Message* message, const FieldDescriptor* field,
int value) const {
USAGE_CHECK_ALL(AddEnum, REPEATED, ENUM);
if (!CreateUnknownEnumValues(field)) {
// Check that the value is valid if we don't support direct storage of
// unknown enum values.
const EnumValueDescriptor* value_desc =
field->enum_type()->FindValueByNumber(value);
if (value_desc == nullptr) {
MutableUnknownFields(message)->AddVarint(field->number(), value);
return;
}
}
AddEnumValueInternal(message, field, value);
}
void Reflection::AddEnumValueInternal(Message* message,
const FieldDescriptor* field,
int value) const {
if (field->is_extension()) {
MutableExtensionSet(message)->AddEnum(field->number(), field->type(),
field->options().packed(), value,
field);
} else {
AddField<int>(message, field, value);
}
}
// -------------------------------------------------------------------
const Message* Reflection::GetDefaultMessageInstance(
const FieldDescriptor* field) const {
// If we are using the generated factory, we cache the prototype in the field
// descriptor for faster access.
// The default instances of generated messages are not cross-linked, which
// means they contain null pointers on their message fields and can't be used
// to get the default of submessages.
if (message_factory_ == MessageFactory::generated_factory()) {
auto& ptr = field->default_generated_instance_;
auto* res = ptr.load(std::memory_order_acquire);
if (res == nullptr) {
// First time asking for this field's default. Load it and cache it.
res = message_factory_->GetPrototype(field->message_type());
ptr.store(res, std::memory_order_release);
}
return res;
}
// For other factories, we try the default's object field.
// In particular, the DynamicMessageFactory will cross link the default
// instances to allow for this. But only do this for real fields.
// This is an optimization to avoid going to GetPrototype() below, as that
// requires a lock and a map lookup.
if (!field->is_extension() && !field->options().weak() &&
!IsLazyField(field) && !schema_.InRealOneof(field)) {
auto* res = DefaultRaw<const Message*>(field);
if (res != nullptr) {
return res;
}
}
// Otherwise, just go to the factory.
return message_factory_->GetPrototype(field->message_type());
}
const Message& Reflection::GetMessage(const Message& message,
const FieldDescriptor* field,
MessageFactory* factory) const {
USAGE_CHECK_ALL(GetMessage, SINGULAR, MESSAGE);
CheckInvalidAccess(schema_, field);
if (factory == nullptr) factory = message_factory_;
if (field->is_extension()) {
return static_cast<const Message&>(GetExtensionSet(message).GetMessage(
field->number(), field->message_type(), factory));
} else {
if (schema_.InRealOneof(field) && !HasOneofField(message, field)) {
return *GetDefaultMessageInstance(field);
}
const Message* result = GetRaw<const Message*>(message, field);
if (result == nullptr) {
result = GetDefaultMessageInstance(field);
}
return *result;
}
}
Message* Reflection::MutableMessage(Message* message,
const FieldDescriptor* field,
MessageFactory* factory) const {
USAGE_CHECK_ALL(MutableMessage, SINGULAR, MESSAGE);
CheckInvalidAccess(schema_, field);
if (factory == nullptr) factory = message_factory_;
if (field->is_extension()) {
return static_cast<Message*>(
MutableExtensionSet(message)->MutableMessage(field, factory));
} else {
Message* result;
Message** result_holder = MutableRaw<Message*>(message, field);
if (schema_.InRealOneof(field)) {
if (!HasOneofField(*message, field)) {
ClearOneof(message, field->containing_oneof());
result_holder = MutableField<Message*>(message, field);
const Message* default_message = GetDefaultMessageInstance(field);
*result_holder = default_message->New(message->GetArenaForAllocation());
}
} else {
SetBit(message, field);
}
if (*result_holder == nullptr) {
const Message* default_message = GetDefaultMessageInstance(field);
*result_holder = default_message->New(message->GetArenaForAllocation());
}
result = *result_holder;
return result;
}
}
void Reflection::UnsafeArenaSetAllocatedMessage(
Message* message, Message* sub_message,
const FieldDescriptor* field) const {
USAGE_CHECK_ALL(SetAllocatedMessage, SINGULAR, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->UnsafeArenaSetAllocatedMessage(
field->number(), field->type(), field, sub_message);
} else {
if (schema_.InRealOneof(field)) {
if (sub_message == nullptr) {
ClearOneof(message, field->containing_oneof());
return;
}
ClearOneof(message, field->containing_oneof());
*MutableRaw<Message*>(message, field) = sub_message;
SetOneofCase(message, field);
return;
}
if (sub_message == nullptr) {
ClearBit(message, field);
} else {
SetBit(message, field);
}
Message** sub_message_holder = MutableRaw<Message*>(message, field);
if (message->GetArenaForAllocation() == nullptr) {
delete *sub_message_holder;
}
*sub_message_holder = sub_message;
}
}
void Reflection::SetAllocatedMessage(Message* message, Message* sub_message,
const FieldDescriptor* field) const {
GOOGLE_DCHECK(sub_message == nullptr || sub_message->GetOwningArena() == nullptr ||
sub_message->GetOwningArena() == message->GetArenaForAllocation());
CheckInvalidAccess(schema_, field);
// If message and sub-message are in different memory ownership domains
// (different arenas, or one is on heap and one is not), then we may need to
// do a copy.
if (sub_message != nullptr &&
sub_message->GetOwningArena() != message->GetArenaForAllocation()) {
if (sub_message->GetOwningArena() == nullptr &&
message->GetArenaForAllocation() != nullptr) {
// Case 1: parent is on an arena and child is heap-allocated. We can add
// the child to the arena's Own() list to free on arena destruction, then
// set our pointer.
message->GetArenaForAllocation()->Own(sub_message);
UnsafeArenaSetAllocatedMessage(message, sub_message, field);
} else {
// Case 2: all other cases. We need to make a copy. MutableMessage() will
// either get the existing message object, or instantiate a new one as
// appropriate w.r.t. our arena.
Message* sub_message_copy = MutableMessage(message, field);
sub_message_copy->CopyFrom(*sub_message);
}
} else {
// Same memory ownership domains.
UnsafeArenaSetAllocatedMessage(message, sub_message, field);
}
}
Message* Reflection::UnsafeArenaReleaseMessage(Message* message,
const FieldDescriptor* field,
MessageFactory* factory) const {
USAGE_CHECK_ALL(ReleaseMessage, SINGULAR, MESSAGE);
CheckInvalidAccess(schema_, field);
if (factory == nullptr) factory = message_factory_;
if (field->is_extension()) {
return static_cast<Message*>(
MutableExtensionSet(message)->UnsafeArenaReleaseMessage(field,
factory));
} else {
if (!(field->is_repeated() || schema_.InRealOneof(field))) {
ClearBit(message, field);
}
if (schema_.InRealOneof(field)) {
if (HasOneofField(*message, field)) {
*MutableOneofCase(message, field->containing_oneof()) = 0;
} else {
return nullptr;
}
}
Message** result = MutableRaw<Message*>(message, field);
Message* ret = *result;
*result = nullptr;
return ret;
}
}
Message* Reflection::ReleaseMessage(Message* message,
const FieldDescriptor* field,
MessageFactory* factory) const {
CheckInvalidAccess(schema_, field);
Message* released = UnsafeArenaReleaseMessage(message, field, factory);
#ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
released = MaybeForceCopy(message->GetArenaForAllocation(), released);
#endif // PROTOBUF_FORCE_COPY_IN_RELEASE
if (message->GetArenaForAllocation() != nullptr && released != nullptr) {
Message* copy_from_arena = released->New();
copy_from_arena->CopyFrom(*released);
released = copy_from_arena;
}
return released;
}
const Message& Reflection::GetRepeatedMessage(const Message& message,
const FieldDescriptor* field,
int index) const {
USAGE_CHECK_ALL(GetRepeatedMessage, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
return static_cast<const Message&>(
GetExtensionSet(message).GetRepeatedMessage(field->number(), index));
} else {
if (IsMapFieldInApi(field)) {
return GetRaw<MapFieldBase>(message, field)
.GetRepeatedField()
.Get<GenericTypeHandler<Message> >(index);
} else {
return GetRaw<RepeatedPtrFieldBase>(message, field)
.Get<GenericTypeHandler<Message> >(index);
}
}
}
Message* Reflection::MutableRepeatedMessage(Message* message,
const FieldDescriptor* field,
int index) const {
USAGE_CHECK_ALL(MutableRepeatedMessage, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
return static_cast<Message*>(
MutableExtensionSet(message)->MutableRepeatedMessage(field->number(),
index));
} else {
if (IsMapFieldInApi(field)) {
return MutableRaw<MapFieldBase>(message, field)
->MutableRepeatedField()
->Mutable<GenericTypeHandler<Message> >(index);
} else {
return MutableRaw<RepeatedPtrFieldBase>(message, field)
->Mutable<GenericTypeHandler<Message> >(index);
}
}
}
Message* Reflection::AddMessage(Message* message, const FieldDescriptor* field,
MessageFactory* factory) const {
USAGE_CHECK_ALL(AddMessage, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (factory == nullptr) factory = message_factory_;
if (field->is_extension()) {
return static_cast<Message*>(
MutableExtensionSet(message)->AddMessage(field, factory));
} else {
Message* result = nullptr;
// We can't use AddField<Message>() because RepeatedPtrFieldBase doesn't
// know how to allocate one.
RepeatedPtrFieldBase* repeated = nullptr;
if (IsMapFieldInApi(field)) {
repeated =
MutableRaw<MapFieldBase>(message, field)->MutableRepeatedField();
} else {
repeated = MutableRaw<RepeatedPtrFieldBase>(message, field);
}
result = repeated->AddFromCleared<GenericTypeHandler<Message> >();
if (result == nullptr) {
// We must allocate a new object.
const Message* prototype;
if (repeated->size() == 0) {
prototype = factory->GetPrototype(field->message_type());
} else {
prototype = &repeated->Get<GenericTypeHandler<Message> >(0);
}
result = prototype->New(message->GetArenaForAllocation());
// We can guarantee here that repeated and result are either both heap
// allocated or arena owned. So it is safe to call the unsafe version
// of AddAllocated.
repeated->UnsafeArenaAddAllocated<GenericTypeHandler<Message> >(result);
}
return result;
}
}
void Reflection::AddAllocatedMessage(Message* message,
const FieldDescriptor* field,
Message* new_entry) const {
USAGE_CHECK_ALL(AddAllocatedMessage, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->AddAllocatedMessage(field, new_entry);
} else {
RepeatedPtrFieldBase* repeated = nullptr;
if (IsMapFieldInApi(field)) {
repeated =
MutableRaw<MapFieldBase>(message, field)->MutableRepeatedField();
} else {
repeated = MutableRaw<RepeatedPtrFieldBase>(message, field);
}
repeated->AddAllocated<GenericTypeHandler<Message> >(new_entry);
}
}
void Reflection::UnsafeArenaAddAllocatedMessage(Message* message,
const FieldDescriptor* field,
Message* new_entry) const {
USAGE_CHECK_ALL(UnsafeArenaAddAllocatedMessage, REPEATED, MESSAGE);
CheckInvalidAccess(schema_, field);
if (field->is_extension()) {
MutableExtensionSet(message)->UnsafeArenaAddAllocatedMessage(field,
new_entry);
} else {
RepeatedPtrFieldBase* repeated = nullptr;
if (IsMapFieldInApi(field)) {
repeated =
MutableRaw<MapFieldBase>(message, field)->MutableRepeatedField();
} else {
repeated = MutableRaw<RepeatedPtrFieldBase>(message, field);
}
repeated->UnsafeArenaAddAllocated<GenericTypeHandler<Message>>(new_entry);
}
}
void* Reflection::MutableRawRepeatedField(Message* message,
const FieldDescriptor* field,
FieldDescriptor::CppType cpptype,
int ctype,
const Descriptor* desc) const {
(void)ctype; // Parameter is used by Google-internal code.
USAGE_CHECK_REPEATED("MutableRawRepeatedField");
CheckInvalidAccess(schema_, field);
if (field->cpp_type() != cpptype &&
(field->cpp_type() != FieldDescriptor::CPPTYPE_ENUM ||
cpptype != FieldDescriptor::CPPTYPE_INT32))
ReportReflectionUsageTypeError(descriptor_, field,
"MutableRawRepeatedField", cpptype);
if (desc != nullptr)
GOOGLE_CHECK_EQ(field->message_type(), desc) << "wrong submessage type";
if (field->is_extension()) {
return MutableExtensionSet(message)->MutableRawRepeatedField(
field->number(), field->type(), field->is_packed(), field);
} else {
// Trigger transform for MapField
if (IsMapFieldInApi(field)) {
return MutableRawNonOneof<MapFieldBase>(message, field)
->MutableRepeatedField();
}
return MutableRawNonOneof<void>(message, field);
}
}
const void* Reflection::GetRawRepeatedField(const Message& message,
const FieldDescriptor* field,
FieldDescriptor::CppType cpptype,
int ctype,
const Descriptor* desc) const {
USAGE_CHECK_REPEATED("GetRawRepeatedField");
if (field->cpp_type() != cpptype)
ReportReflectionUsageTypeError(descriptor_, field, "GetRawRepeatedField",
cpptype);
if (ctype >= 0)
GOOGLE_CHECK_EQ(field->options().ctype(), ctype) << "subtype mismatch";
if (desc != nullptr)
GOOGLE_CHECK_EQ(field->message_type(), desc) << "wrong submessage type";
if (field->is_extension()) {
// Should use extension_set::GetRawRepeatedField. However, the required
// parameter "default repeated value" is not very easy to get here.
// Map is not supported in extensions, it is acceptable to use
// extension_set::MutableRawRepeatedField which does not change the message.
return MutableExtensionSet(const_cast<Message*>(&message))
->MutableRawRepeatedField(field->number(), field->type(),
field->is_packed(), field);
} else {
// Trigger transform for MapField
if (IsMapFieldInApi(field)) {
return &(GetRawNonOneof<MapFieldBase>(message, field).GetRepeatedField());
}
return &GetRawNonOneof<char>(message, field);
}
}
const FieldDescriptor* Reflection::GetOneofFieldDescriptor(
const Message& message, const OneofDescriptor* oneof_descriptor) const {
if (oneof_descriptor->is_synthetic()) {
const FieldDescriptor* field = oneof_descriptor->field(0);
return HasField(message, field) ? field : nullptr;
}
uint32_t field_number = GetOneofCase(message, oneof_descriptor);
if (field_number == 0) {
return nullptr;
}
return descriptor_->FindFieldByNumber(field_number);
}
bool Reflection::ContainsMapKey(const Message& message,
const FieldDescriptor* field,
const MapKey& key) const {
USAGE_CHECK(IsMapFieldInApi(field), "LookupMapValue",
"Field is not a map field.");
return GetRaw<MapFieldBase>(message, field).ContainsMapKey(key);
}
bool Reflection::InsertOrLookupMapValue(Message* message,
const FieldDescriptor* field,
const MapKey& key,
MapValueRef* val) const {
USAGE_CHECK(IsMapFieldInApi(field), "InsertOrLookupMapValue",
"Field is not a map field.");
val->SetType(field->message_type()->map_value()->cpp_type());
return MutableRaw<MapFieldBase>(message, field)
->InsertOrLookupMapValue(key, val);
}
bool Reflection::LookupMapValue(const Message& message,
const FieldDescriptor* field, const MapKey& key,
MapValueConstRef* val) const {
USAGE_CHECK(IsMapFieldInApi(field), "LookupMapValue",
"Field is not a map field.");
val->SetType(field->message_type()->map_value()->cpp_type());
return GetRaw<MapFieldBase>(message, field).LookupMapValue(key, val);
}
bool Reflection::DeleteMapValue(Message* message, const FieldDescriptor* field,
const MapKey& key) const {
USAGE_CHECK(IsMapFieldInApi(field), "DeleteMapValue",
"Field is not a map field.");
return MutableRaw<MapFieldBase>(message, field)->DeleteMapValue(key);
}
MapIterator Reflection::MapBegin(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field), "MapBegin", "Field is not a map field.");
MapIterator iter(message, field);
GetRaw<MapFieldBase>(*message, field).MapBegin(&iter);
return iter;
}
MapIterator Reflection::MapEnd(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field), "MapEnd", "Field is not a map field.");
MapIterator iter(message, field);
GetRaw<MapFieldBase>(*message, field).MapEnd(&iter);
return iter;
}
int Reflection::MapSize(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field), "MapSize", "Field is not a map field.");
return GetRaw<MapFieldBase>(message, field).size();
}
// -----------------------------------------------------------------------------
const FieldDescriptor* Reflection::FindKnownExtensionByName(
const std::string& name) const {
if (!schema_.HasExtensionSet()) return nullptr;
return descriptor_pool_->FindExtensionByPrintableName(descriptor_, name);
}
const FieldDescriptor* Reflection::FindKnownExtensionByNumber(
int number) const {
if (!schema_.HasExtensionSet()) return nullptr;
return descriptor_pool_->FindExtensionByNumber(descriptor_, number);
}
bool Reflection::SupportsUnknownEnumValues() const {
return CreateUnknownEnumValues(descriptor_->file());
}
// ===================================================================
// Some private helpers.
// These simple template accessors obtain pointers (or references) to
// the given field.
template <class Type>
const Type& Reflection::GetRawNonOneof(const Message& message,
const FieldDescriptor* field) const {
return GetConstRefAtOffset<Type>(message,
schema_.GetFieldOffsetNonOneof(field));
}
template <class Type>
Type* Reflection::MutableRawNonOneof(Message* message,
const FieldDescriptor* field) const {
return GetPointerAtOffset<Type>(message,
schema_.GetFieldOffsetNonOneof(field));
}
template <typename Type>
Type* Reflection::MutableRaw(Message* message,
const FieldDescriptor* field) const {
return GetPointerAtOffset<Type>(message, schema_.GetFieldOffset(field));
}
const uint32_t* Reflection::GetHasBits(const Message& message) const {
GOOGLE_DCHECK(schema_.HasHasbits());
return &GetConstRefAtOffset<uint32_t>(message, schema_.HasBitsOffset());
}
uint32_t* Reflection::MutableHasBits(Message* message) const {
GOOGLE_DCHECK(schema_.HasHasbits());
return GetPointerAtOffset<uint32_t>(message, schema_.HasBitsOffset());
}
uint32_t* Reflection::MutableOneofCase(
Message* message, const OneofDescriptor* oneof_descriptor) const {
GOOGLE_DCHECK(!oneof_descriptor->is_synthetic());
return GetPointerAtOffset<uint32_t>(
message, schema_.GetOneofCaseOffset(oneof_descriptor));
}
const ExtensionSet& Reflection::GetExtensionSet(const Message& message) const {
return GetConstRefAtOffset<ExtensionSet>(message,
schema_.GetExtensionSetOffset());
}
ExtensionSet* Reflection::MutableExtensionSet(Message* message) const {
return GetPointerAtOffset<ExtensionSet>(message,
schema_.GetExtensionSetOffset());
}
const InternalMetadata& Reflection::GetInternalMetadata(
const Message& message) const {
return GetConstRefAtOffset<InternalMetadata>(message,
schema_.GetMetadataOffset());
}
InternalMetadata* Reflection::MutableInternalMetadata(Message* message) const {
return GetPointerAtOffset<InternalMetadata>(message,
schema_.GetMetadataOffset());
}
const uint32_t* Reflection::GetInlinedStringDonatedArray(
const Message& message) const {
GOOGLE_DCHECK(schema_.HasInlinedString());
return &GetConstRefAtOffset<uint32_t>(message,
schema_.InlinedStringDonatedOffset());
}
uint32_t* Reflection::MutableInlinedStringDonatedArray(Message* message) const {
GOOGLE_DCHECK(schema_.HasInlinedString());
return GetPointerAtOffset<uint32_t>(message,
schema_.InlinedStringDonatedOffset());
}
// Simple accessors for manipulating _inlined_string_donated_;
bool Reflection::IsInlinedStringDonated(const Message& message,
const FieldDescriptor* field) const {
uint32_t index = schema_.InlinedStringIndex(field);
GOOGLE_DCHECK_GT(index, 0);
return IsIndexInHasBitSet(GetInlinedStringDonatedArray(message), index);
}
inline void SetInlinedStringDonated(uint32_t index, uint32_t* array) {
array[index / 32] |= (static_cast<uint32_t>(1) << (index % 32));
}
inline void ClearInlinedStringDonated(uint32_t index, uint32_t* array) {
array[index / 32] &= ~(static_cast<uint32_t>(1) << (index % 32));
}
void Reflection::SwapInlinedStringDonated(Message* lhs, Message* rhs,
const FieldDescriptor* field) const {
Arena* lhs_arena = lhs->GetArenaForAllocation();
Arena* rhs_arena = rhs->GetArenaForAllocation();
// If arenas differ, inined string fields are swapped by copying values.
// Donation status should not be swapped.
if (lhs_arena != rhs_arena) {
return;
}
bool lhs_donated = IsInlinedStringDonated(*lhs, field);
bool rhs_donated = IsInlinedStringDonated(*rhs, field);
if (lhs_donated == rhs_donated) {
return;
}
// If one is undonated, both must have already registered ArenaDtor.
uint32_t* lhs_array = MutableInlinedStringDonatedArray(lhs);
uint32_t* rhs_array = MutableInlinedStringDonatedArray(rhs);
GOOGLE_CHECK_EQ(lhs_array[0] & 0x1u, 0u);
GOOGLE_CHECK_EQ(rhs_array[0] & 0x1u, 0u);
// Swap donation status bit.
uint32_t index = schema_.InlinedStringIndex(field);
GOOGLE_DCHECK_GT(index, 0);
if (rhs_donated) {
SetInlinedStringDonated(index, lhs_array);
ClearInlinedStringDonated(index, rhs_array);
} else { // lhs_donated
ClearInlinedStringDonated(index, lhs_array);
SetInlinedStringDonated(index, rhs_array);
}
}
// Simple accessors for manipulating has_bits_.
bool Reflection::HasBit(const Message& message,
const FieldDescriptor* field) const {
GOOGLE_DCHECK(!field->options().weak());
if (schema_.HasBitIndex(field) != static_cast<uint32_t>(-1)) {
return IsIndexInHasBitSet(GetHasBits(message), schema_.HasBitIndex(field));
}
// Intentionally check here because HasBitIndex(field) != -1 means valid.
CheckInvalidAccess(schema_, field);
// proto3: no has-bits. All fields present except messages, which are
// present only if their message-field pointer is non-null.
if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
return !schema_.IsDefaultInstance(message) &&
GetRaw<const Message*>(message, field) != nullptr;
} else {
// Non-message field (and non-oneof, since that was handled in HasField()
// before calling us), and singular (again, checked in HasField). So, this
// field must be a scalar.
// Scalar primitive (numeric or string/bytes) fields are present if
// their value is non-zero (numeric) or non-empty (string/bytes). N.B.:
// we must use this definition here, rather than the "scalar fields
// always present" in the proto3 docs, because MergeFrom() semantics
// require presence as "present on wire", and reflection-based merge
// (which uses HasField()) needs to be consistent with this.
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_STRING:
switch (field->options().ctype()) {
default: {
if (IsInlined(field)) {
return !GetField<InlinedStringField>(message, field)
.GetNoArena()
.empty();
}
return GetField<ArenaStringPtr>(message, field).Get().size() > 0;
}
}
return false;
case FieldDescriptor::CPPTYPE_BOOL:
return GetRaw<bool>(message, field) != false;
case FieldDescriptor::CPPTYPE_INT32:
return GetRaw<int32_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_INT64:
return GetRaw<int64_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_UINT32:
return GetRaw<uint32_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_UINT64:
return GetRaw<uint64_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_FLOAT:
static_assert(sizeof(uint32_t) == sizeof(float),
"Code assumes uint32_t and float are the same size.");
return GetRaw<uint32_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_DOUBLE:
static_assert(sizeof(uint64_t) == sizeof(double),
"Code assumes uint64_t and double are the same size.");
return GetRaw<uint64_t>(message, field) != 0;
case FieldDescriptor::CPPTYPE_ENUM:
return GetRaw<int>(message, field) != 0;
case FieldDescriptor::CPPTYPE_MESSAGE:
// handled above; avoid warning
break;
}
GOOGLE_LOG(FATAL) << "Reached impossible case in HasBit().";
return false;
}
}
void Reflection::SetBit(Message* message, const FieldDescriptor* field) const {
GOOGLE_DCHECK(!field->options().weak());
const uint32_t index = schema_.HasBitIndex(field);
if (index == static_cast<uint32_t>(-1)) return;
MutableHasBits(message)[index / 32] |=
(static_cast<uint32_t>(1) << (index % 32));
}
void Reflection::ClearBit(Message* message,
const FieldDescriptor* field) const {
GOOGLE_DCHECK(!field->options().weak());
const uint32_t index = schema_.HasBitIndex(field);
if (index == static_cast<uint32_t>(-1)) return;
MutableHasBits(message)[index / 32] &=
~(static_cast<uint32_t>(1) << (index % 32));
}
void Reflection::SwapBit(Message* message1, Message* message2,
const FieldDescriptor* field) const {
GOOGLE_DCHECK(!field->options().weak());
if (!schema_.HasHasbits()) {
return;
}
bool temp_has_bit = HasBit(*message1, field);
if (HasBit(*message2, field)) {
SetBit(message1, field);
} else {
ClearBit(message1, field);
}
if (temp_has_bit) {
SetBit(message2, field);
} else {
ClearBit(message2, field);
}
}
bool Reflection::HasOneof(const Message& message,
const OneofDescriptor* oneof_descriptor) const {
if (oneof_descriptor->is_synthetic()) {
return HasField(message, oneof_descriptor->field(0));
}
return (GetOneofCase(message, oneof_descriptor) > 0);
}
void Reflection::SetOneofCase(Message* message,
const FieldDescriptor* field) const {
*MutableOneofCase(message, field->containing_oneof()) = field->number();
}
void Reflection::ClearOneofField(Message* message,
const FieldDescriptor* field) const {
if (HasOneofField(*message, field)) {
ClearOneof(message, field->containing_oneof());
}
}
void Reflection::ClearOneof(Message* message,
const OneofDescriptor* oneof_descriptor) const {
if (oneof_descriptor->is_synthetic()) {
ClearField(message, oneof_descriptor->field(0));
return;
}
// TODO(jieluo): Consider to cache the unused object instead of deleting
// it. It will be much faster if an application switches a lot from
// a few oneof fields. Time/space tradeoff
uint32_t oneof_case = GetOneofCase(*message, oneof_descriptor);
if (oneof_case > 0) {
const FieldDescriptor* field = descriptor_->FindFieldByNumber(oneof_case);
if (message->GetArenaForAllocation() == nullptr) {
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_STRING: {
switch (field->options().ctype()) {
default: // TODO(kenton): Support other string reps.
case FieldOptions::STRING: {
// Oneof string fields are never set as a default instance.
// We just need to pass some arbitrary default string to make it
// work. This allows us to not have the real default accessible
// from reflection.
MutableField<ArenaStringPtr>(message, field)->Destroy();
break;
}
}
break;
}
case FieldDescriptor::CPPTYPE_MESSAGE:
delete *MutableRaw<Message*>(message, field);
break;
default:
break;
}
} else {
}
*MutableOneofCase(message, oneof_descriptor) = 0;
}
}
#define HANDLE_TYPE(TYPE, CPPTYPE, CTYPE) \
template <> \
const RepeatedField<TYPE>& Reflection::GetRepeatedFieldInternal<TYPE>( \
const Message& message, const FieldDescriptor* field) const { \
return *static_cast<RepeatedField<TYPE>*>(MutableRawRepeatedField( \
const_cast<Message*>(&message), field, CPPTYPE, CTYPE, nullptr)); \
} \
\
template <> \
RepeatedField<TYPE>* Reflection::MutableRepeatedFieldInternal<TYPE>( \
Message * message, const FieldDescriptor* field) const { \
return static_cast<RepeatedField<TYPE>*>( \
MutableRawRepeatedField(message, field, CPPTYPE, CTYPE, nullptr)); \
}
HANDLE_TYPE(int32_t, FieldDescriptor::CPPTYPE_INT32, -1);
HANDLE_TYPE(int64_t, FieldDescriptor::CPPTYPE_INT64, -1);
HANDLE_TYPE(uint32_t, FieldDescriptor::CPPTYPE_UINT32, -1);
HANDLE_TYPE(uint64_t, FieldDescriptor::CPPTYPE_UINT64, -1);
HANDLE_TYPE(float, FieldDescriptor::CPPTYPE_FLOAT, -1);
HANDLE_TYPE(double, FieldDescriptor::CPPTYPE_DOUBLE, -1);
HANDLE_TYPE(bool, FieldDescriptor::CPPTYPE_BOOL, -1);
#undef HANDLE_TYPE
void* Reflection::MutableRawRepeatedString(Message* message,
const FieldDescriptor* field,
bool is_string) const {
(void)is_string; // Parameter is used by Google-internal code.
return MutableRawRepeatedField(message, field,
FieldDescriptor::CPPTYPE_STRING,
FieldOptions::STRING, nullptr);
}
// Template implementations of basic accessors. Inline because each
// template instance is only called from one location. These are
// used for all types except messages.
template <typename Type>
const Type& Reflection::GetField(const Message& message,
const FieldDescriptor* field) const {
return GetRaw<Type>(message, field);
}
template <typename Type>
void Reflection::SetField(Message* message, const FieldDescriptor* field,
const Type& value) const {
bool real_oneof = schema_.InRealOneof(field);
if (real_oneof && !HasOneofField(*message, field)) {
ClearOneof(message, field->containing_oneof());
}
*MutableRaw<Type>(message, field) = value;
real_oneof ? SetOneofCase(message, field) : SetBit(message, field);
}
template <typename Type>
Type* Reflection::MutableField(Message* message,
const FieldDescriptor* field) const {
schema_.InRealOneof(field) ? SetOneofCase(message, field)
: SetBit(message, field);
return MutableRaw<Type>(message, field);
}
template <typename Type>
const Type& Reflection::GetRepeatedField(const Message& message,
const FieldDescriptor* field,
int index) const {
return GetRaw<RepeatedField<Type> >(message, field).Get(index);
}
template <typename Type>
const Type& Reflection::GetRepeatedPtrField(const Message& message,
const FieldDescriptor* field,
int index) const {
return GetRaw<RepeatedPtrField<Type> >(message, field).Get(index);
}
template <typename Type>
void Reflection::SetRepeatedField(Message* message,
const FieldDescriptor* field, int index,
Type value) const {
MutableRaw<RepeatedField<Type> >(message, field)->Set(index, value);
}
template <typename Type>
Type* Reflection::MutableRepeatedField(Message* message,
const FieldDescriptor* field,
int index) const {
RepeatedPtrField<Type>* repeated =
MutableRaw<RepeatedPtrField<Type> >(message, field);
return repeated->Mutable(index);
}
template <typename Type>
void Reflection::AddField(Message* message, const FieldDescriptor* field,
const Type& value) const {
MutableRaw<RepeatedField<Type> >(message, field)->Add(value);
}
template <typename Type>
Type* Reflection::AddField(Message* message,
const FieldDescriptor* field) const {
RepeatedPtrField<Type>* repeated =
MutableRaw<RepeatedPtrField<Type> >(message, field);
return repeated->Add();
}
MessageFactory* Reflection::GetMessageFactory() const {
return message_factory_;
}
void* Reflection::RepeatedFieldData(Message* message,
const FieldDescriptor* field,
FieldDescriptor::CppType cpp_type,
const Descriptor* message_type) const {
GOOGLE_CHECK(field->is_repeated());
GOOGLE_CHECK(field->cpp_type() == cpp_type ||
(field->cpp_type() == FieldDescriptor::CPPTYPE_ENUM &&
cpp_type == FieldDescriptor::CPPTYPE_INT32))
<< "The type parameter T in RepeatedFieldRef<T> API doesn't match "
<< "the actual field type (for enums T should be the generated enum "
<< "type or int32_t).";
if (message_type != nullptr) {
GOOGLE_CHECK_EQ(message_type, field->message_type());
}
if (field->is_extension()) {
return MutableExtensionSet(message)->MutableRawRepeatedField(
field->number(), field->type(), field->is_packed(), field);
} else {
return MutableRawNonOneof<char>(message, field);
}
}
MapFieldBase* Reflection::MutableMapData(Message* message,
const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field), "GetMapData",
"Field is not a map field.");
return MutableRaw<MapFieldBase>(message, field);
}
const MapFieldBase* Reflection::GetMapData(const Message& message,
const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field), "GetMapData",
"Field is not a map field.");
return &(GetRaw<MapFieldBase>(message, field));
}
namespace {
// Helper function to transform migration schema into reflection schema.
ReflectionSchema MigrationToReflectionSchema(
const Message* const* default_instance, const uint32_t* offsets,
MigrationSchema migration_schema) {
ReflectionSchema result;
result.default_instance_ = *default_instance;
// First 7 offsets are offsets to the special fields. The following offsets
// are the proto fields.
result.offsets_ = offsets + migration_schema.offsets_index + 6;
result.has_bit_indices_ = offsets + migration_schema.has_bit_indices_index;
result.has_bits_offset_ = offsets[migration_schema.offsets_index + 0];
result.metadata_offset_ = offsets[migration_schema.offsets_index + 1];
result.extensions_offset_ = offsets[migration_schema.offsets_index + 2];
result.oneof_case_offset_ = offsets[migration_schema.offsets_index + 3];
result.object_size_ = migration_schema.object_size;
result.weak_field_map_offset_ = offsets[migration_schema.offsets_index + 4];
result.inlined_string_donated_offset_ =
offsets[migration_schema.offsets_index + 5];
result.inlined_string_indices_ =
offsets + migration_schema.inlined_string_indices_index;
return result;
}
} // namespace
class AssignDescriptorsHelper {
public:
AssignDescriptorsHelper(MessageFactory* factory,
Metadata* file_level_metadata,
const EnumDescriptor** file_level_enum_descriptors,
const MigrationSchema* schemas,
const Message* const* default_instance_data,
const uint32_t* offsets)
: factory_(factory),
file_level_metadata_(file_level_metadata),
file_level_enum_descriptors_(file_level_enum_descriptors),
schemas_(schemas),
default_instance_data_(default_instance_data),
offsets_(offsets) {}
void AssignMessageDescriptor(const Descriptor* descriptor) {
for (int i = 0; i < descriptor->nested_type_count(); i++) {
AssignMessageDescriptor(descriptor->nested_type(i));
}
file_level_metadata_->descriptor = descriptor;
file_level_metadata_->reflection =
new Reflection(descriptor,
MigrationToReflectionSchema(default_instance_data_,
offsets_, *schemas_),
DescriptorPool::internal_generated_pool(), factory_);
for (int i = 0; i < descriptor->enum_type_count(); i++) {
AssignEnumDescriptor(descriptor->enum_type(i));
}
schemas_++;
default_instance_data_++;
file_level_metadata_++;
}
void AssignEnumDescriptor(const EnumDescriptor* descriptor) {
*file_level_enum_descriptors_ = descriptor;
file_level_enum_descriptors_++;
}
const Metadata* GetCurrentMetadataPtr() const { return file_level_metadata_; }
private:
MessageFactory* factory_;
Metadata* file_level_metadata_;
const EnumDescriptor** file_level_enum_descriptors_;
const MigrationSchema* schemas_;
const Message* const* default_instance_data_;
const uint32_t* offsets_;
};
namespace {
// We have the routines that assign descriptors and build reflection
// automatically delete the allocated reflection. MetadataOwner owns
// all the allocated reflection instances.
struct MetadataOwner {
~MetadataOwner() {
for (auto range : metadata_arrays_) {
for (const Metadata* m = range.first; m < range.second; m++) {
delete m->reflection;
}
}
}
void AddArray(const Metadata* begin, const Metadata* end) {
mu_.Lock();
metadata_arrays_.push_back(std::make_pair(begin, end));
mu_.Unlock();
}
static MetadataOwner* Instance() {
static MetadataOwner* res = OnShutdownDelete(new MetadataOwner);
return res;
}
private:
MetadataOwner() = default; // private because singleton
WrappedMutex mu_;
std::vector<std::pair<const Metadata*, const Metadata*> > metadata_arrays_;
};
void AddDescriptors(const DescriptorTable* table);
void AssignDescriptorsImpl(const DescriptorTable* table, bool eager) {
// Ensure the file descriptor is added to the pool.
{
// This only happens once per proto file. So a global mutex to serialize
// calls to AddDescriptors.
static WrappedMutex mu{GOOGLE_PROTOBUF_LINKER_INITIALIZED};
mu.Lock();
AddDescriptors(table);
mu.Unlock();
}
if (eager) {
// Normally we do not want to eagerly build descriptors of our deps.
// However if this proto is optimized for code size (ie using reflection)
// and it has a message extending a custom option of a descriptor with that
// message being optimized for code size as well. Building the descriptors
// in this file requires parsing the serialized file descriptor, which now
// requires parsing the message extension, which potentially requires
// building the descriptor of the message extending one of the options.
// However we are already updating descriptor pool under a lock. To prevent
// this the compiler statically looks for this case and we just make sure we
// first build the descriptors of all our dependencies, preventing the
// deadlock.
int num_deps = table->num_deps;
for (int i = 0; i < num_deps; i++) {
// In case of weak fields deps[i] could be null.
if (table->deps[i]) AssignDescriptors(table->deps[i], true);
}
}
// Fill the arrays with pointers to descriptors and reflection classes.
const FileDescriptor* file =
DescriptorPool::internal_generated_pool()->FindFileByName(
table->filename);
GOOGLE_CHECK(file != nullptr);
MessageFactory* factory = MessageFactory::generated_factory();
AssignDescriptorsHelper helper(
factory, table->file_level_metadata, table->file_level_enum_descriptors,
table->schemas, table->default_instances, table->offsets);
for (int i = 0; i < file->message_type_count(); i++) {
helper.AssignMessageDescriptor(file->message_type(i));
}
for (int i = 0; i < file->enum_type_count(); i++) {
helper.AssignEnumDescriptor(file->enum_type(i));
}
if (file->options().cc_generic_services()) {
for (int i = 0; i < file->service_count(); i++) {
table->file_level_service_descriptors[i] = file->service(i);
}
}
MetadataOwner::Instance()->AddArray(table->file_level_metadata,
helper.GetCurrentMetadataPtr());
}
void AddDescriptorsImpl(const DescriptorTable* table) {
// Reflection refers to the default fields so make sure they are initialized.
internal::InitProtobufDefaults();
// Ensure all dependent descriptors are registered to the generated descriptor
// pool and message factory.
int num_deps = table->num_deps;
for (int i = 0; i < num_deps; i++) {
// In case of weak fields deps[i] could be null.
if (table->deps[i]) AddDescriptors(table->deps[i]);
}
// Register the descriptor of this file.
DescriptorPool::InternalAddGeneratedFile(table->descriptor, table->size);
MessageFactory::InternalRegisterGeneratedFile(table);
}
void AddDescriptors(const DescriptorTable* table) {
// AddDescriptors is not thread safe. Callers need to ensure calls are
// properly serialized. This function is only called pre-main by global
// descriptors and we can assume single threaded access or it's called
// by AssignDescriptorImpl which uses a mutex to sequence calls.
if (table->is_initialized) return;
table->is_initialized = true;
AddDescriptorsImpl(table);
}
} // namespace
// Separate function because it needs to be a friend of
// Reflection
void RegisterAllTypesInternal(const Metadata* file_level_metadata, int size) {
for (int i = 0; i < size; i++) {
const Reflection* reflection = file_level_metadata[i].reflection;
MessageFactory::InternalRegisterGeneratedMessage(
file_level_metadata[i].descriptor,
reflection->schema_.default_instance_);
}
}
namespace internal {
Metadata AssignDescriptors(const DescriptorTable* (*table)(),
internal::once_flag* once,
const Metadata& metadata) {
call_once(*once, [=] {
auto* t = table();
AssignDescriptorsImpl(t, t->is_eager);
});
return metadata;
}
void AssignDescriptors(const DescriptorTable* table, bool eager) {
if (!eager) eager = table->is_eager;
call_once(*table->once, AssignDescriptorsImpl, table, eager);
}
AddDescriptorsRunner::AddDescriptorsRunner(const DescriptorTable* table) {
AddDescriptors(table);
}
void RegisterFileLevelMetadata(const DescriptorTable* table) {
AssignDescriptors(table);
RegisterAllTypesInternal(table->file_level_metadata, table->num_messages);
}
void UnknownFieldSetSerializer(const uint8_t* base, uint32_t offset,
uint32_t /*tag*/, uint32_t /*has_offset*/,
io::CodedOutputStream* output) {
const void* ptr = base + offset;
const InternalMetadata* metadata = static_cast<const InternalMetadata*>(ptr);
if (metadata->have_unknown_fields()) {
metadata->unknown_fields<UnknownFieldSet>(UnknownFieldSet::default_instance)
.SerializeToCodedStream(output);
}
}
bool IsDescendant(Message& root, const Message& message) {
const Reflection* reflection = root.GetReflection();
std::vector<const FieldDescriptor*> fields;
reflection->ListFieldsOmitStripped(root, &fields);
for (const auto* field : fields) {
// Skip non-message fields.
if (field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) continue;
// Optional messages.
if (!field->is_repeated()) {
Message* sub_message = reflection->MutableMessage(&root, field);
if (sub_message == &message || IsDescendant(*sub_message, message)) {
return true;
}
continue;
}
// Repeated messages.
if (!IsMapFieldInApi(field)) {
int count = reflection->FieldSize(root, field);
for (int i = 0; i < count; i++) {
Message* sub_message =
reflection->MutableRepeatedMessage(&root, field, i);
if (sub_message == &message || IsDescendant(*sub_message, message)) {
return true;
}
}
continue;
}
// Map field: if accessed as repeated fields, messages are *copied* and
// matching pointer won't work. Must directly access map.
constexpr int kValIdx = 1;
const FieldDescriptor* val_field = field->message_type()->field(kValIdx);
// Skip map fields whose value type is not message.
if (val_field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) continue;
MapIterator end = reflection->MapEnd(&root, field);
for (auto iter = reflection->MapBegin(&root, field); iter != end; ++iter) {
Message* sub_message = iter.MutableValueRef()->MutableMessageValue();
if (sub_message == &message || IsDescendant(*sub_message, message)) {
return true;
}
}
}
return false;
}
} // namespace internal
} // namespace protobuf