Skip to content

Commit dc69933

Browse files
committed
add od3 dialect
1 parent 1355d5f commit dc69933

2 files changed

Lines changed: 14 additions & 4 deletions

File tree

GEMstack/onboard/perception/fusion.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,19 @@ def fusion_callback(self, image: Image, lidar_pc2_msg: PointCloud2):
5050

5151
# Project the transformed points into the image plane.
5252
projected_pts = project_points(lidar_in_camera, self.K)
53+
54+
# Convert numpy array to Open3D point cloud
55+
transformed_points = projected_pts
56+
pcd = o3d.geometry.PointCloud()
57+
pcd.points = o3d.utility.Vector3dVector(transformed_points)
58+
59+
# Apply voxel grid downsampling
60+
voxel_size = 0.1 # Adjust for desired resolution
61+
downsampled_pcd = pcd.voxel_down_sample(voxel_size=voxel_size)
62+
63+
# Convert back to numpy array
64+
transformed_points = np.asarray(downsampled_pcd.points)
65+
print(f"after :{transformed_points.shape}")
5366

5467
# Process bboxes
5568
self.last_person_boxes = []

GEMstack/onboard/perception/fusion_utils.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,7 @@
88

99
def convert_pointcloud2_to_xyz(lidar_pc2_msg: PointCloud2):
1010
""" Convert 1D PointCloud2 data to x, y, z coords """
11-
lidar_points = []
12-
for point in pc2.read_points(lidar_pc2_msg, field_names=("x", "y", "z", "intensity"), skip_nans=True):
13-
lidar_points.append([point[0], point[1], point[2]])
14-
return np.array(lidar_points)
11+
return np.array(list(pc2.read_points(lidar_pc2_msg, skip_nans=True)), dtype=np.float32)[:, :3]
1512

1613

1714
# Credits: The following lines of codes (17 to 159 excluding lines 80 to 115) are adapted from the Calibration Team B

0 commit comments

Comments
 (0)