in downstream/votenet_det_new/lib/datasets/sunrgbd/sunrgbd_data.py [0:0]
def extract_sunrgbd_data(idx_filename, split, output_folder, num_point=20000,
type_whitelist=DEFAULT_TYPE_WHITELIST,
save_votes=False, use_v1=False, skip_empty_scene=True):
""" Extract scene point clouds and
bounding boxes (centroids, box sizes, heading angles, semantic classes).
Dumped point clouds and boxes are in upright depth coord.
Args:
idx_filename: a TXT file where each line is an int number (index)
split: training or testing
save_votes: whether to compute and save Ground truth votes.
use_v1: use the SUN RGB-D V1 data
skip_empty_scene: if True, skip scenes that contain no object (no objet in whitelist)
Dumps:
<id>_pc.npz of (N,6) where N is for number of subsampled points and 6 is
for XYZ and RGB (in 0~1) in upright depth coord
<id>_bbox.npy of (K,8) where K is the number of objects, 8 is for
centroids (cx,cy,cz), dimension (l,w,h), heanding_angle and semantic_class
<id>_votes.npz of (N,10) with 0/1 indicating whether the point belongs to an object,
then three sets of GT votes for up to three objects. If the point is only in one
object's OBB, then the three GT votes are the same.
"""
dataset = sunrgbd_object('./sunrgbd_trainval', split, use_v1=use_v1)
data_idx_list = [int(line.rstrip()) for line in open(idx_filename)]
if not os.path.exists(output_folder):
os.mkdir(output_folder)
for data_idx in data_idx_list:
print('------------- ', data_idx)
objects = dataset.get_label_objects(data_idx)
# Skip scenes with 0 object
if skip_empty_scene and (len(objects)==0 or \
len([obj for obj in objects if obj.classname in type_whitelist])==0):
continue
object_list = []
for obj in objects:
if obj.classname not in type_whitelist: continue
obb = np.zeros((8))
obb[0:3] = obj.centroid
# Note that compared with that in data_viz, we do not time 2 to l,w.h
# neither do we flip the heading angle
obb[3:6] = np.array([obj.l,obj.w,obj.h])
obb[6] = obj.heading_angle
obb[7] = sunrgbd_utils.type2class[obj.classname]
object_list.append(obb)
if len(object_list)==0:
obbs = np.zeros((0,8))
else:
obbs = np.vstack(object_list) # (K,8)
pc_upright_depth = dataset.get_depth(data_idx)
pc_upright_depth_subsampled = pc_util.random_sampling(pc_upright_depth, num_point)
np.savez_compressed(os.path.join(output_folder,'%06d_pc.npz'%(data_idx)),
pc=pc_upright_depth_subsampled)
np.save(os.path.join(output_folder, '%06d_bbox.npy'%(data_idx)), obbs)
if save_votes:
N = pc_upright_depth_subsampled.shape[0]
point_votes = np.zeros((N,10)) # 3 votes and 1 vote mask
point_vote_idx = np.zeros((N)).astype(np.int32) # in the range of [0,2]
indices = np.arange(N)
for obj in objects:
if obj.classname not in type_whitelist: continue
try:
# Find all points in this object's OBB
box3d_pts_3d = sunrgbd_utils.my_compute_box_3d(obj.centroid,
np.array([obj.l,obj.w,obj.h]), obj.heading_angle)
pc_in_box3d,inds = sunrgbd_utils.extract_pc_in_box3d(\
pc_upright_depth_subsampled, box3d_pts_3d)
# Assign first dimension to indicate it is in an object box
point_votes[inds,0] = 1
# Add the votes (all 0 if the point is not in any object's OBB)
votes = np.expand_dims(obj.centroid,0) - pc_in_box3d[:,0:3]
sparse_inds = indices[inds] # turn dense True,False inds to sparse number-wise inds
for i in range(len(sparse_inds)):
j = sparse_inds[i]
point_votes[j, int(point_vote_idx[j]*3+1):int((point_vote_idx[j]+1)*3+1)] = votes[i,:]
# Populate votes with the fisrt vote
if point_vote_idx[j] == 0:
point_votes[j,4:7] = votes[i,:]
point_votes[j,7:10] = votes[i,:]
point_vote_idx[inds] = np.minimum(2, point_vote_idx[inds]+1)
except:
print('ERROR ----', data_idx, obj.classname)
np.savez_compressed(os.path.join(output_folder, '%06d_votes.npz'%(data_idx)),
point_votes = point_votes)