in ambari-metrics-host-monitoring/src/main/python/core/host_info.py [0:0]
def get_disk_io_counters_per_disk(self):
# Return a normalized disk name with the counters per disk
disk_io_counters = psutil.disk_io_counters(True)
per_disk_io_counters = {}
sortByKey = lambda x: sorted(list(x.items()), key=operator.itemgetter(0))
disk_counter = 0
if disk_io_counters:
# Sort disks lexically, best chance for similar disk topologies getting
# aggregated correctly
disk_io_counters = sortByKey(disk_io_counters)
for item in disk_io_counters:
disk_counter += 1
disk = item[0]
logger.debug('Adding disk counters for %s' % str(disk))
sdiskio = item[1]
prefix = 'sdisk_{0}_'.format(disk)
counter_dict = {
prefix + 'read_count' : sdiskio.read_count if hasattr(sdiskio, 'read_count') else 0,
prefix + 'write_count' : sdiskio.write_count if hasattr(sdiskio, 'write_count') else 0,
prefix + 'read_bytes' : sdiskio.read_bytes if hasattr(sdiskio, 'read_bytes') else 0,
prefix + 'write_bytes' : sdiskio.write_bytes if hasattr(sdiskio, 'write_bytes') else 0,
prefix + 'read_time' : sdiskio.read_time if hasattr(sdiskio, 'read_time') else 0,
prefix + 'write_time' : sdiskio.write_time if hasattr(sdiskio, 'write_time') else 0
}
# Optional platform specific attributes
if hasattr(sdiskio, 'busy_time'):
counter_dict[ prefix + 'busy_time' ] = sdiskio.busy_time
if hasattr(sdiskio, 'read_merged_count'):
counter_dict[ prefix + 'read_merged_count' ] = sdiskio.read_merged_count
if hasattr(sdiskio, 'write_merged_count'):
counter_dict[ prefix + 'write_merged_count' ] = sdiskio.write_merged_count
per_disk_io_counters.update(counter_dict)
pass
pass
# Send total disk count as a metric
per_disk_io_counters[ 'disk_num' ] = disk_counter
return per_disk_io_counters