inline bool enqueue()

in include/ylt/util/concurrentqueue.h [2169:2292]


    inline bool enqueue(U&& element) {
      index_t currentTailIndex =
          this->tailIndex.load(std::memory_order_relaxed);
      index_t newTailIndex = 1 + currentTailIndex;
      if ((currentTailIndex & static_cast<index_t>(QUEUE_BLOCK_SIZE - 1)) ==
          0) {
        // We reached the end of a block, start a new one
        auto startBlock = this->tailBlock;
        auto originalBlockIndexSlotsUsed = pr_blockIndexSlotsUsed;
        if (this->tailBlock != nullptr &&
            this->tailBlock->next->ConcurrentQueue::Block::template is_empty<
                explicit_context>()) {
          // We can re-use the block ahead of us, it's empty!
          this->tailBlock = this->tailBlock->next;
          this->tailBlock->ConcurrentQueue::Block::template reset_empty<
              explicit_context>();

          // We'll put the block on the block index (guaranteed to be room since
          // we're conceptually removing the last block from it first -- except
          // instead of removing then adding, we can just overwrite). Note that
          // there must be a valid block index here, since even if allocation
          // failed in the ctor, it would have been re-attempted when adding the
          // first block to the queue; since there is such a block, a block
          // index must have been successfully allocated.
        }
        else {
          // Whatever head value we see here is >= the last value we saw here
          // (relatively), and <= its current value. Since we have the most
          // recent tail, the head must be
          // <= to it.
          auto head = this->headIndex.load(std::memory_order_relaxed);
          assert(!details::circular_less_than<index_t>(currentTailIndex, head));
          if (!details::circular_less_than<index_t>(
                  head, currentTailIndex + QUEUE_BLOCK_SIZE) ||
              (MAX_SUBQUEUE_SIZE != details::const_numeric_max<size_t>::value &&
               (MAX_SUBQUEUE_SIZE == 0 || MAX_SUBQUEUE_SIZE - QUEUE_BLOCK_SIZE <
                                              currentTailIndex - head))) {
            // We can't enqueue in another block because there's not enough
            // leeway -- the tail could surpass the head by the time the block
            // fills up! (Or we'll exceed the size limit, if the second part of
            // the condition was true.)
            return false;
          }
          // We're going to need a new block; check that the block index has
          // room
          if (pr_blockIndexRaw == nullptr ||
              pr_blockIndexSlotsUsed == pr_blockIndexSize) {
            // Hmm, the circular block index is already full -- we'll need
            // to allocate a new index. Note pr_blockIndexRaw can only be
            // nullptr if the initial allocation failed in the constructor.

            MOODYCAMEL_CONSTEXPR_IF(allocMode == CannotAlloc) { return false; }
            else if (!new_block_index(pr_blockIndexSlotsUsed)) {
              return false;
            }
          }

          // Insert a new block in the circular linked list
          auto newBlock =
              this->parent
                  ->ConcurrentQueue::template requisition_block<allocMode>();
          if (newBlock == nullptr) {
            return false;
          }
#ifdef MCDBGQ_TRACKMEM
          newBlock->owner = this;
#endif
          newBlock->ConcurrentQueue::Block::template reset_empty<
              explicit_context>();
          if (this->tailBlock == nullptr) {
            newBlock->next = newBlock;
          }
          else {
            newBlock->next = this->tailBlock->next;
            this->tailBlock->next = newBlock;
          }
          this->tailBlock = newBlock;
          ++pr_blockIndexSlotsUsed;
        }

        MOODYCAMEL_CONSTEXPR_IF(!MOODYCAMEL_NOEXCEPT_CTOR(
            T, U, new (static_cast<T*>(nullptr)) T(std::forward<U>(element)))) {
          // The constructor may throw. We want the element not to appear in the
          // queue in that case (without corrupting the queue):
          MOODYCAMEL_TRY {
            new ((*this->tailBlock)[currentTailIndex])
                T(std::forward<U>(element));
          }
          MOODYCAMEL_CATCH(...) {
            // Revert change to the current block, but leave the new block
            // available for next time
            pr_blockIndexSlotsUsed = originalBlockIndexSlotsUsed;
            this->tailBlock =
                startBlock == nullptr ? this->tailBlock : startBlock;
            MOODYCAMEL_RETHROW;
          }
        }
        else {
          (void)startBlock;
          (void)originalBlockIndexSlotsUsed;
        }

        // Add block to block index
        auto& entry = blockIndex.load(std::memory_order_relaxed)
                          ->entries[pr_blockIndexFront];
        entry.base = currentTailIndex;
        entry.block = this->tailBlock;
        blockIndex.load(std::memory_order_relaxed)
            ->front.store(pr_blockIndexFront, std::memory_order_release);
        pr_blockIndexFront = (pr_blockIndexFront + 1) & (pr_blockIndexSize - 1);

        MOODYCAMEL_CONSTEXPR_IF(!MOODYCAMEL_NOEXCEPT_CTOR(
            T, U, new (static_cast<T*>(nullptr)) T(std::forward<U>(element)))) {
          this->tailIndex.store(newTailIndex, std::memory_order_release);
          return true;
        }
      }

      // Enqueue
      new ((*this->tailBlock)[currentTailIndex]) T(std::forward<U>(element));

      this->tailIndex.store(newTailIndex, std::memory_order_release);
      return true;
    }