in analytics/duplicates_analyze.py [0:0]
def print_sizes(libname, depth: int = 2) -> None:
libs = [libname]
depth = 2
symbols = {os.path.basename(libname): get_defined_symbols(libname, verbose=True)}
for _ in range(depth):
for lib in libs:
dirname = os.path.dirname(lib)
for dep in get_deps(lib):
path = os.path.join(dirname, dep)
if not os.path.exists(path):
continue
if path not in libs:
libs.append(path)
symbols[dep] = get_defined_symbols(path, verbose=True)
for lib in libs:
lib_symbols = symbols[os.path.basename(lib)]
lib_keys = set(lib_symbols.keys())
rc = f"{lib} symbols size {humansize(sum(lib_symbols.values()))}"
for dep in get_deps(lib):
if dep not in symbols:
continue
dep_overlap = lib_keys.intersection(set(symbols[dep].keys()))
overlap_size = sum(lib_symbols[k] for k in dep_overlap)
if overlap_size > 0:
rc += f" {dep} overlap is {humansize(overlap_size)}"
print(rc)