in mlebench/competitions/3d-object-detection-for-autonomous-vehicles/mAP_evaluation.py [0:0]
def __init__(self, **kwargs):
sample_token = kwargs["sample_token"]
translation = kwargs["translation"]
size = kwargs["size"]
rotation = kwargs["rotation"]
name = kwargs["name"]
score = kwargs.get("score", -1)
if not isinstance(sample_token, str):
raise TypeError("Sample_token must be a string!")
if not len(translation) == 3:
raise ValueError("Translation must have 3 elements!")
if np.any(np.isnan(translation)):
raise ValueError("Translation may not be NaN!")
if not len(size) == 3:
raise ValueError("Size must have 3 elements!")
if np.any(np.isnan(size)):
raise ValueError("Size may not be NaN!")
if not len(rotation) == 4:
raise ValueError("Rotation must have 4 elements!")
if np.any(np.isnan(rotation)):
raise ValueError("Rotation may not be NaN!")
if name is None:
raise ValueError("Name cannot be empty!")
# Assign.
self.sample_token = sample_token
self.translation = translation
self.size = size
self.volume = np.prod(self.size)
self.score = score
assert np.all([x > 0 for x in size])
self.rotation = rotation
self.name = name
self.quaternion = Quaternion(self.rotation)
self.width, self.length, self.height = size
self.center_x, self.center_y, self.center_z = self.translation
self.min_z = self.center_z - self.height / 2
self.max_z = self.center_z + self.height / 2
self.ground_bbox_coords = None
self.ground_bbox_coords = self.get_ground_bbox_coords()