Skip to content

Commit d319eaa

Browse files
committed
extract 3D pc2 and visualize
1 parent ced4804 commit d319eaa

3 files changed

Lines changed: 58 additions & 11 deletions

File tree

GEMstack/onboard/perception/fusion.py

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ def __init__(self):
3434
self.tf_listener = tf.TransformListener()
3535

3636
# Publishers
37+
self.pub_pedestrians_pc2 = rospy.Publisher("/point_cloud/pedestrians", PointCloud2, queue_size=10)
3738
if(self.visualization):
3839
self.pub_image = rospy.Publisher("/camera/image_detection", Image, queue_size=1)
3940

@@ -80,14 +81,26 @@ def fusion_callback(self, rgb_image_msg: Image, lidar_pc2_msg: PointCloud2):
8081
]
8182

8283
all_extracted_pts = all_extracted_pts + list(extracted_pts)
83-
84+
8485
# Used for visualization
8586
if(self.visualization):
8687
cv_image = vis_2d_bbox(cv_image, xywh, box)
8788

88-
# Draw projected LiDAR points on the image.
89-
for pt in all_extracted_pts:
90-
cv2.circle(cv_image, pt, 2, (0, 0, 255), -1)
89+
if len(all_extracted_pts) > 0:
90+
# Extract 2D points
91+
extracted_2d_pts = list(np.array(all_extracted_pts)[:, :2].astype(int))
92+
93+
# Draw projected 2D LiDAR points on the image.
94+
for pt in extracted_2d_pts:
95+
cv2.circle(cv_image, pt, 2, (0, 0, 255), -1)
96+
97+
# Extract 3D points
98+
extracted_3d_pts = list(np.array(all_extracted_pts)[:, -3:])
99+
100+
# Create point cloud from extracted 3D points
101+
ros_extracted_pedestrian_pc2 = create_point_cloud(extracted_3d_pts)
102+
self.pub_pedestrians_pc2.publish(ros_extracted_pedestrian_pc2)
103+
91104

92105
# Used for visualization
93106
if(self.visualization):

GEMstack/onboard/perception/fusion_utils.py

Lines changed: 38 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
1-
from sensor_msgs.msg import PointCloud2
1+
from sensor_msgs.msg import PointCloud2, PointField
22
import numpy as np
33
import sensor_msgs.point_cloud2 as pc2
44
import open3d as o3d
55
import cv2
66
import json
7+
import rospy
8+
import numpy as np
9+
import struct
710

811

912
def convert_pointcloud2_to_xyz(lidar_pc2_msg: PointCloud2):
@@ -85,7 +88,7 @@ def project_points(points_3d, K):
8588
if pt[2] > 0: # only project points in front of the camera
8689
u = K[0, 0] * (pt[0] / pt[2]) + K[0, 2]
8790
v = K[1, 1] * (pt[1] / pt[2]) + K[1, 2]
88-
proj_points.append((int(u), int(v)))
91+
proj_points.append((int(u), int(v), pt[0], pt[1], pt[2]))
8992
return proj_points
9093

9194

@@ -134,10 +137,16 @@ def visualize_point_cloud(points):
134137
Args:
135138
points (np.ndarray): Nx3 array of point cloud coordinates.
136139
"""
140+
# Create a visualization window
141+
vis = o3d.visualization.Visualizer()
142+
vis.create_window()
143+
137144
pc = o3d.geometry.PointCloud()
138145
pc.points = o3d.utility.Vector3dVector(points)
139-
pc.paint_uniform_color([0.1, 0.7, 0.9]) # Light blue color
140-
o3d.visualization.draw_geometries([pc])
146+
pc.paint_uniform_color([0.1, 0.7, 0.9])
147+
148+
vis.add_geometry(pc)
149+
vis.run()
141150

142151

143152
def visualize_plane(inlier_cloud, outlier_cloud, bounding_box_2d_points):
@@ -168,4 +177,28 @@ def visualize_plane(inlier_cloud, outlier_cloud, bounding_box_2d_points):
168177
bounding_box_lines.paint_uniform_color([0, 1, 0]) # Green for bounding box edges
169178

170179
# Visualize
171-
o3d.visualization.draw_geometries([inlier_cloud, outlier_cloud, bounding_box_pcd, bounding_box_lines])
180+
o3d.visualization.draw_geometries([inlier_cloud, outlier_cloud, bounding_box_pcd, bounding_box_lines])
181+
182+
183+
def create_point_cloud(points, color=(255, 0, 0)):
184+
"""
185+
Converts a list of (x, y, z) points into a PointCloud2 message.
186+
"""
187+
header = rospy.Header()
188+
header.stamp = rospy.Time.now()
189+
header.frame_id = "map" # Change to your TF frame
190+
191+
fields = [
192+
PointField(name="x", offset=0, datatype=PointField.FLOAT32, count=1),
193+
PointField(name="y", offset=4, datatype=PointField.FLOAT32, count=1),
194+
PointField(name="z", offset=8, datatype=PointField.FLOAT32, count=1),
195+
PointField(name="rgb", offset=12, datatype=PointField.FLOAT32, count=1),
196+
]
197+
198+
# Convert RGB color to packed float32
199+
r, g, b = color
200+
packed_color = struct.unpack('f', struct.pack('I', (r << 16) | (g << 8) | b))[0]
201+
202+
point_cloud_data = [(x, y, z, packed_color) for x, y, z in points]
203+
204+
return pc2.create_cloud(header, fields, point_cloud_data)

GEMstack/onboard/perception/transform.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import rospy
22
import tf
3+
import numpy as np
34

45
def publish_tf():
56
rospy.init_node('pointcloud_tf_broadcaster')
@@ -8,8 +9,8 @@ def publish_tf():
89

910
while not rospy.is_shutdown():
1011
br.sendTransform(
11-
(0, 0, 1), # (x, y, z) translation
12-
tf.transformations.quaternion_from_euler(0, 0, 0), # (roll, pitch, yaw)
12+
(0, 1.5, 7), # (x, y, z) translation
13+
tf.transformations.quaternion_from_euler(0.5* np.pi, 0, 0), # (roll, pitch, yaw)
1314
rospy.Time.now(),
1415
"os_sensor", # Child frame (sensor)
1516
"map" # Parent frame (world)

0 commit comments

Comments
 (0)