def compute_stats()

in meta_sizing.py [0:0]


def compute_stats(page_size, max_size, wal_entries):
    # multiply by 2, as next transaction might require same amount
    # of pages

    max_pages = max_size / page_size

    stats = {
        "pagesize": page_size,
        "max_size": max_size,
        "max_pages": max_pages,
        "wal_entries": wal_entries,
    }

    wal_meta = wal_mapping_pages(page_size, wal_entries)
    stats['wal_meta'] = 2 * wal_meta
    stats['wal_meta_bytes'] = 2 * wal_meta * page_size
    stats['wal_meta_bytes_io_per_tx'] = wal_meta * page_size

    freelist = freelist_pages(page_size, max_pages)
    stats['freelist_pages'] = 2 * freelist
    stats['freelist_bytes'] = 2 * freelist * page_size
    stats['freelist_bytes_io_per_tx'] = freelist * page_size

    file_header = 2
    stats['file header'] = file_header

    count = wal_meta + wal_entries + 2 * freelist + file_header
    stats['min_meta_pages'] = count

    # meta allocator grows in power of 2
    meta_pages = next_power_of_2(count)
    internal_frag = meta_pages - count
    data_pages = max_pages - meta_pages

    stats['meta_pages'] = meta_pages
    stats['data_pages'] = data_pages
    stats['meta_bytes'] = meta_pages * page_size
    stats['data_bytes'] = data_pages * page_size
    stats['internal_fragmentation'] = internal_frag
    stats['meta_percentage'] = 100.0 * float(meta_pages) / float(max_pages)
    stats['data_percentage'] = 100.0 * float(data_pages) / float(max_pages)
    stats['frag_percentage'] = 100.0 * float(internal_frag) / float(max_pages)

    return stats