inline void RepeatedPtrFieldBase::Swap()

in cdk/extra/protobuf/protobuf-3.19.6/src/google/protobuf/repeated_ptr_field.h [817:1226]


inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) {
#ifdef PROTOBUF_FORCE_COPY_IN_SWAP
  if (GetArena() != nullptr && GetArena() == other->GetArena()) {
#else   // PROTOBUF_FORCE_COPY_IN_SWAP
  if (GetArena() == other->GetArena()) {
#endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
    InternalSwap(other);
  } else {
    SwapFallback<TypeHandler>(other);
  }
}

template <typename TypeHandler>
void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) {
#ifdef PROTOBUF_FORCE_COPY_IN_SWAP
  GOOGLE_DCHECK(GetArena() == nullptr || other->GetArena() != GetArena());
#else   // PROTOBUF_FORCE_COPY_IN_SWAP
  GOOGLE_DCHECK(other->GetArena() != GetArena());
#endif  // !PROTOBUF_FORCE_COPY_IN_SWAP

  // Copy semantics in this case. We try to improve efficiency by placing the
  // temporary on |other|'s arena so that messages are copied twice rather than
  // three times.
  RepeatedPtrFieldBase temp(other->GetArena());
  temp.MergeFrom<TypeHandler>(*this);
  this->Clear<TypeHandler>();
  this->MergeFrom<TypeHandler>(*other);
  other->InternalSwap(&temp);
  temp.Destroy<TypeHandler>();  // Frees rep_ if `other` had no arena.
}

inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; }

inline int RepeatedPtrFieldBase::size() const { return current_size_; }

template <typename TypeHandler>
inline const typename TypeHandler::Type& RepeatedPtrFieldBase::Get(
    int index) const {
  GOOGLE_DCHECK_GE(index, 0);
  GOOGLE_DCHECK_LT(index, current_size_);
  return *cast<TypeHandler>(rep_->elements[index]);
}

template <typename TypeHandler>
inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at(
    int index) const {
  GOOGLE_CHECK_GE(index, 0);
  GOOGLE_CHECK_LT(index, current_size_);
  return *cast<TypeHandler>(rep_->elements[index]);
}

template <typename TypeHandler>
inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) {
  GOOGLE_CHECK_GE(index, 0);
  GOOGLE_CHECK_LT(index, current_size_);
  return *cast<TypeHandler>(rep_->elements[index]);
}

template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) {
  GOOGLE_DCHECK_GE(index, 0);
  GOOGLE_DCHECK_LT(index, current_size_);
  return cast<TypeHandler>(rep_->elements[index]);
}

template <typename TypeHandler>
inline void RepeatedPtrFieldBase::Delete(int index) {
  GOOGLE_DCHECK_GE(index, 0);
  GOOGLE_DCHECK_LT(index, current_size_);
  TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
}

template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add(
    typename TypeHandler::Type* prototype) {
  if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
    return cast<TypeHandler>(rep_->elements[current_size_++]);
  }
  typename TypeHandler::Type* result =
      TypeHandler::NewFromPrototype(prototype, arena_);
  return reinterpret_cast<typename TypeHandler::Type*>(
      AddOutOfLineHelper(result));
}

template <typename TypeHandler,
          typename std::enable_if<TypeHandler::Movable::value>::type*>
inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) {
  if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
    *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value);
    return;
  }
  if (!rep_ || rep_->allocated_size == total_size_) {
    Reserve(total_size_ + 1);
  }
  ++rep_->allocated_size;
  typename TypeHandler::Type* result =
      TypeHandler::New(arena_, std::move(value));
  rep_->elements[current_size_++] = result;
}

template <typename TypeHandler>
inline void RepeatedPtrFieldBase::RemoveLast() {
  GOOGLE_DCHECK_GT(current_size_, 0);
  TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_]));
}

template <typename TypeHandler>
void RepeatedPtrFieldBase::Clear() {
  const int n = current_size_;
  GOOGLE_DCHECK_GE(n, 0);
  if (n > 0) {
    void* const* elements = rep_->elements;
    int i = 0;
    do {
      TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
    } while (i < n);
    current_size_ = 0;
  }
}

// To avoid unnecessary code duplication and reduce binary size, we use a
// layered approach to implementing MergeFrom(). The toplevel method is
// templated, so we get a small thunk per concrete message type in the binary.
// This calls a shared implementation with most of the logic, passing a function
// pointer to another type-specific piece of code that calls the object-allocate
// and merge handlers.
template <typename TypeHandler>
inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) {
  GOOGLE_DCHECK_NE(&other, this);
  if (other.current_size_ == 0) return;
  MergeFromInternal(other,
                    &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>);
}

inline void RepeatedPtrFieldBase::MergeFromInternal(
    const RepeatedPtrFieldBase& other,
    void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
  // Note: wrapper has already guaranteed that other.rep_ != nullptr here.
  int other_size = other.current_size_;
  void** other_elements = other.rep_->elements;
  void** new_elements = InternalExtend(other_size);
  int allocated_elems = rep_->allocated_size - current_size_;
  (this->*inner_loop)(new_elements, other_elements, other_size,
                      allocated_elems);
  current_size_ += other_size;
  if (rep_->allocated_size < current_size_) {
    rep_->allocated_size = current_size_;
  }
}

// Merges other_elems to our_elems.
template <typename TypeHandler>
void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems,
                                              void** other_elems, int length,
                                              int already_allocated) {
  if (already_allocated < length) {
    Arena* arena = GetArena();
    typename TypeHandler::Type* elem_prototype =
        reinterpret_cast<typename TypeHandler::Type*>(other_elems[0]);
    for (int i = already_allocated; i < length; i++) {
      // Allocate a new empty element that we'll merge into below
      typename TypeHandler::Type* new_elem =
          TypeHandler::NewFromPrototype(elem_prototype, arena);
      our_elems[i] = new_elem;
    }
  }
  // Main loop that does the actual merging
  for (int i = 0; i < length; i++) {
    // Already allocated: use existing element.
    typename TypeHandler::Type* other_elem =
        reinterpret_cast<typename TypeHandler::Type*>(other_elems[i]);
    typename TypeHandler::Type* new_elem =
        reinterpret_cast<typename TypeHandler::Type*>(our_elems[i]);
    TypeHandler::Merge(*other_elem, new_elem);
  }
}

template <typename TypeHandler>
inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) {
  if (&other == this) return;
  RepeatedPtrFieldBase::Clear<TypeHandler>();
  RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
}

inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; }

inline void* const* RepeatedPtrFieldBase::raw_data() const {
  return rep_ ? rep_->elements : nullptr;
}

inline void** RepeatedPtrFieldBase::raw_mutable_data() const {
  return rep_ ? const_cast<void**>(rep_->elements) : nullptr;
}

template <typename TypeHandler>
inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() {
  // TODO(kenton):  Breaks C++ aliasing rules.  We should probably remove this
  //   method entirely.
  return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data());
}

template <typename TypeHandler>
inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data()
    const {
  // TODO(kenton):  Breaks C++ aliasing rules.  We should probably remove this
  //   method entirely.
  return reinterpret_cast<const typename TypeHandler::Type* const*>(raw_data());
}

inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) {
  using std::swap;  // enable ADL with fallback
  swap(rep_->elements[index1], rep_->elements[index2]);
}

template <typename TypeHandler>
inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const {
  size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
  if (rep_ != nullptr) {
    for (int i = 0; i < rep_->allocated_size; ++i) {
      allocated_bytes +=
          TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
    }
    allocated_bytes += kRepHeaderSize;
  }
  return allocated_bytes;
}

template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() {
  if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
    return cast<TypeHandler>(rep_->elements[current_size_++]);
  } else {
    return nullptr;
  }
}

// AddAllocated version that implements arena-safe copying behavior.
template <typename TypeHandler>
void RepeatedPtrFieldBase::AddAllocatedInternal(
    typename TypeHandler::Type* value, std::true_type) {
  Arena* element_arena =
      reinterpret_cast<Arena*>(TypeHandler::GetOwningArena(value));
  Arena* arena = GetArena();
  if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
    // Fast path: underlying arena representation (tagged pointer) is equal to
    // our arena pointer, and we can add to array without resizing it (at least
    // one slot that is not allocated).
    void** elems = rep_->elements;
    if (current_size_ < rep_->allocated_size) {
      // Make space at [current] by moving first allocated element to end of
      // allocated list.
      elems[rep_->allocated_size] = elems[current_size_];
    }
    elems[current_size_] = value;
    current_size_ = current_size_ + 1;
    rep_->allocated_size = rep_->allocated_size + 1;
  } else {
    AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena);
  }
}

// Slowpath handles all cases, copying if necessary.
template <typename TypeHandler>
void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy(
    // Pass value_arena and my_arena to avoid duplicate virtual call (value) or
    // load (mine).
    typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) {
  // Ensure that either the value is in the same arena, or if not, we do the
  // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy
  // it to our arena/heap (otherwise).
  if (my_arena != nullptr && value_arena == nullptr) {
    my_arena->Own(value);
  } else if (my_arena != value_arena) {
    typename TypeHandler::Type* new_value =
        TypeHandler::NewFromPrototype(value, my_arena);
    TypeHandler::Merge(*value, new_value);
    TypeHandler::Delete(value, value_arena);
    value = new_value;
  }

  UnsafeArenaAddAllocated<TypeHandler>(value);
}

// AddAllocated version that does not implement arena-safe copying behavior.
template <typename TypeHandler>
void RepeatedPtrFieldBase::AddAllocatedInternal(
    typename TypeHandler::Type* value, std::false_type) {
  if (rep_ && rep_->allocated_size < total_size_) {
    // Fast path: underlying arena representation (tagged pointer) is equal to
    // our arena pointer, and we can add to array without resizing it (at least
    // one slot that is not allocated).
    void** elems = rep_->elements;
    if (current_size_ < rep_->allocated_size) {
      // Make space at [current] by moving first allocated element to end of
      // allocated list.
      elems[rep_->allocated_size] = elems[current_size_];
    }
    elems[current_size_] = value;
    current_size_ = current_size_ + 1;
    ++rep_->allocated_size;
  } else {
    UnsafeArenaAddAllocated<TypeHandler>(value);
  }
}

template <typename TypeHandler>
void RepeatedPtrFieldBase::UnsafeArenaAddAllocated(
    typename TypeHandler::Type* value) {
  // Make room for the new pointer.
  if (!rep_ || current_size_ == total_size_) {
    // The array is completely full with no cleared objects, so grow it.
    Reserve(total_size_ + 1);
    ++rep_->allocated_size;
  } else if (rep_->allocated_size == total_size_) {
    // There is no more space in the pointer array because it contains some
    // cleared objects awaiting reuse.  We don't want to grow the array in this
    // case because otherwise a loop calling AddAllocated() followed by Clear()
    // would leak memory.
    TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
                        arena_);
  } else if (current_size_ < rep_->allocated_size) {
    // We have some cleared objects.  We don't care about their order, so we
    // can just move the first one to the end to make space.
    rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
    ++rep_->allocated_size;
  } else {
    // There are no cleared objects.
    ++rep_->allocated_size;
  }

  rep_->elements[current_size_++] = value;
}

// ReleaseLast() for types that implement merge/copy behavior.
template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
    std::true_type) {
  // First, release an element.
  typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>();
  // Now perform a copy if we're on an arena.
  Arena* arena = GetArena();

  typename TypeHandler::Type* new_result;
#ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
  new_result = copy<TypeHandler>(result);
  if (arena == nullptr) delete result;
#else   // PROTOBUF_FORCE_COPY_IN_RELEASE
  new_result = (arena == nullptr) ? result : copy<TypeHandler>(result);
#endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
  return new_result;
}

// ReleaseLast() for types that *do not* implement merge/copy behavior -- this
// is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on
// an arena, since the user really should implement the copy operation in this
// case.
template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
    std::false_type) {
  GOOGLE_DCHECK(GetArena() == nullptr)
      << "ReleaseLast() called on a RepeatedPtrField that is on an arena, "
      << "with a type that does not implement MergeFrom. This is unsafe; "
      << "please implement MergeFrom for your type.";
  return UnsafeArenaReleaseLast<TypeHandler>();
}

template <typename TypeHandler>
inline typename TypeHandler::Type*
RepeatedPtrFieldBase::UnsafeArenaReleaseLast() {
  GOOGLE_DCHECK_GT(current_size_, 0);
  typename TypeHandler::Type* result =
      cast<TypeHandler>(rep_->elements[--current_size_]);
  --rep_->allocated_size;
  if (current_size_ < rep_->allocated_size) {
    // There are cleared elements on the end; replace the removed element
    // with the last allocated element.
    rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
  }
  return result;
}

inline int RepeatedPtrFieldBase::ClearedCount() const {
  return rep_ ? (rep_->allocated_size - current_size_) : 0;
}

template <typename TypeHandler>
inline void RepeatedPtrFieldBase::AddCleared(
    typename TypeHandler::Type* value) {
  GOOGLE_DCHECK(GetArena() == nullptr)
      << "AddCleared() can only be used on a RepeatedPtrField not on an arena.";
  GOOGLE_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
      << "AddCleared() can only accept values not on an arena.";
  if (!rep_ || rep_->allocated_size == total_size_) {
    Reserve(total_size_ + 1);
  }
  rep_->elements[rep_->allocated_size++] = value;
}

template <typename TypeHandler>
inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() {
  GOOGLE_DCHECK(GetArena() == nullptr)
      << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
      << "an arena.";
  GOOGLE_DCHECK(GetArena() == nullptr);
  GOOGLE_DCHECK(rep_ != nullptr);
  GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_);
  return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
}

}  // namespace internal