in curator/indexlist.py [0:0]
def get_index_stats(self):
"""
Populate ``index_info`` with index ``size_in_bytes``,
``primary_size_in_bytes`` and doc count information for each index.
"""
self.loggit.debug('Getting index stats -- BEGIN')
self.empty_list_check()
fields = ['size_in_bytes', 'docs', 'primary_size_in_bytes']
# This ensures that the index state is populated
self.get_index_state()
# Don't populate working_list until after the get_index state as it
# can and will remove missing indices
working_list = self.working_list()
for index in self.working_list():
if self.index_info[index]['state'] == 'close':
working_list.remove(index)
if working_list:
index_lists = chunk_index_list(working_list)
for lst in index_lists:
# This portion here is to ensure that we're not polling for data
# unless we must
needful = self.needs_data(lst, fields)
if not needful:
# All indices are populated with some data, so we can skip
# data collection
continue
# Now we only need to run on the 'needful'
for sii, wli, index in self.data_getter(
needful, self._get_indices_stats
):
try:
size = wli['total']['store']['size_in_bytes']
docs = wli['total']['docs']['count']
primary_size = wli['primaries']['store']['size_in_bytes']
msg = (
f'Index: {index} Size: {byte_size(size)} Docs: {docs} '
f'PrimarySize: {byte_size(primary_size)}'
)
self.loggit.debug(msg)
sii['size_in_bytes'] = size
sii['docs'] = docs
sii['primary_size_in_bytes'] = primary_size
except KeyError:
msg = f'Index stats missing for "{index}" -- might be closed'
self.loggit.warning(msg)