static MemStats getFor()

in include/ylt/util/concurrentqueue.h [3712:3806]


    static MemStats getFor(ConcurrentQueue* q) {
      MemStats stats = {0};

      stats.elementsEnqueued = q->size_approx();

      auto block = q->freeList.head_unsafe();
      while (block != nullptr) {
        ++stats.allocatedBlocks;
        ++stats.freeBlocks;
        block = block->freeListNext.load(std::memory_order_relaxed);
      }

      for (auto ptr = q->producerListTail.load(std::memory_order_acquire);
           ptr != nullptr; ptr = ptr->next_prod()) {
        bool implicit = dynamic_cast<ImplicitProducer*>(ptr) != nullptr;
        stats.implicitProducers += implicit ? 1 : 0;
        stats.explicitProducers += implicit ? 0 : 1;

        if (implicit) {
          auto prod = static_cast<ImplicitProducer*>(ptr);
          stats.queueClassBytes += sizeof(ImplicitProducer);
          auto head = prod->headIndex.load(std::memory_order_relaxed);
          auto tail = prod->tailIndex.load(std::memory_order_relaxed);
          auto hash = prod->blockIndex.load(std::memory_order_relaxed);
          if (hash != nullptr) {
            for (size_t i = 0; i != hash->capacity; ++i) {
              if (hash->index[i]->key.load(std::memory_order_relaxed) !=
                      ImplicitProducer::INVALID_BLOCK_BASE &&
                  hash->index[i]->value.load(std::memory_order_relaxed) !=
                      nullptr) {
                ++stats.allocatedBlocks;
                ++stats.ownedBlocksImplicit;
              }
            }
            stats.implicitBlockIndexBytes +=
                hash->capacity *
                sizeof(typename ImplicitProducer::BlockIndexEntry);
            for (; hash != nullptr; hash = hash->prev) {
              stats.implicitBlockIndexBytes +=
                  sizeof(typename ImplicitProducer::BlockIndexHeader) +
                  hash->capacity *
                      sizeof(typename ImplicitProducer::BlockIndexEntry*);
            }
          }
          for (; details::circular_less_than<index_t>(head, tail);
               head += QUEUE_BLOCK_SIZE) {
            // auto block = prod->get_block_index_entry_for_index(head);
            ++stats.usedBlocks;
          }
        }
        else {
          auto prod = static_cast<ExplicitProducer*>(ptr);
          stats.queueClassBytes += sizeof(ExplicitProducer);
          auto tailBlock = prod->tailBlock;
          bool wasNonEmpty = false;
          if (tailBlock != nullptr) {
            auto block = tailBlock;
            do {
              ++stats.allocatedBlocks;
              if (!block->ConcurrentQueue::Block::template is_empty<
                      explicit_context>() ||
                  wasNonEmpty) {
                ++stats.usedBlocks;
                wasNonEmpty = wasNonEmpty || block != tailBlock;
              }
              ++stats.ownedBlocksExplicit;
              block = block->next;
            } while (block != tailBlock);
          }
          auto index = prod->blockIndex.load(std::memory_order_relaxed);
          while (index != nullptr) {
            stats.explicitBlockIndexBytes +=
                sizeof(typename ExplicitProducer::BlockIndexHeader) +
                index->size *
                    sizeof(typename ExplicitProducer::BlockIndexEntry);
            index = static_cast<typename ExplicitProducer::BlockIndexHeader*>(
                index->prev);
          }
        }
      }

      auto freeOnInitialPool =
          q->initialBlockPoolIndex.load(std::memory_order_relaxed) >=
                  q->initialBlockPoolSize
              ? 0
              : q->initialBlockPoolSize -
                    q->initialBlockPoolIndex.load(std::memory_order_relaxed);
      stats.allocatedBlocks += freeOnInitialPool;
      stats.freeBlocks += freeOnInitialPool;

      stats.blockClassBytes = sizeof(Block) * stats.allocatedBlocks;
      stats.queueClassBytes += sizeof(ConcurrentQueue);

      return stats;
    }