Skip to content

Commit 27f612e

Browse files
committed
Cleaned up bounding box creation process for YOLO node and moved to another file. Added rest of publishing code to combined_detection.py
1 parent da235ed commit 27f612e

3 files changed

Lines changed: 54 additions & 44 deletions

File tree

GEMstack/onboard/perception/combined_detection.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,6 +195,8 @@ def _fuse_bounding_boxes(self,
195195

196196
for i, box in enumerate(fused_boxes_list):
197197
try:
198+
fuxed_bb_array.boxes.append(box)
199+
198200
# Cur vehicle frame
199201
pos_x = box.pose.position.x; pos_y = box.pose.position.y; pos_z = box.pose.position.z
200202
quat_x = box.pose.orientation.x; quat_y = box.pose.orientation.y; quat_z = box.pose.orientation.z; quat_w = box.pose.orientation.w
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
from jsk_recognition_msgs.msg import BoundingBox
2+
from scipy.spatial.transform import Rotation as R
3+
4+
# Seperated Sensor Fusion specific utilities into it's own file because package imports are required
5+
# (to minimize integration problems)
6+
7+
# Adds a new bounding box to a BoundingBoxArray
8+
def add_bounding_box(boxes, frame_id, stamp, x, y, z, l, w, h, yaw, conf_score, label):
9+
box_msg = BoundingBox()
10+
box_msg.header.frame_id = frame_id
11+
box_msg.header.stamp = stamp
12+
13+
# Set the pose
14+
box_msg.pose.position.x = float(x)
15+
box_msg.pose.position.y = float(y)
16+
box_msg.pose.position.z = float(z)
17+
18+
# Convert yaw to quaternion
19+
quat = R.from_euler('z', yaw).as_quat()
20+
box_msg.pose.orientation.x = float(quat[0])
21+
box_msg.pose.orientation.y = float(quat[1])
22+
box_msg.pose.orientation.z = float(quat[2])
23+
box_msg.pose.orientation.w = float(quat[3])
24+
25+
# Set the dimensions
26+
# Swapped dims[2] and dims[0]
27+
box_msg.dimensions.x = float(l) # length
28+
box_msg.dimensions.y = float(w) # width
29+
box_msg.dimensions.z = float(h) # height
30+
31+
# Add confidence score and label
32+
box_msg.value = float(conf_score)
33+
box_msg.label = label
34+
35+
boxes.boxes.append(box_msg)
36+
return boxes

GEMstack/onboard/perception/yolo_node.py

Lines changed: 16 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from pedestrian_utils import *
2+
from combined_detection_utils import add_bounding_box
23
from ultralytics import YOLO
34
import cv2
45
from scipy.spatial.transform import Rotation as R
@@ -9,8 +10,8 @@
910
import time
1011

1112
# Publisher imports:
12-
from jsk_recognition_msgs.msg import BoundingBox, BoundingBoxArray
13-
from geometry_msgs.msg import Pose, Vector3
13+
from jsk_recognition_msgs.msg import BoundingBoxArray
14+
# from geometry_msgs.msg import Pose, Vector3
1415

1516

1617
class YoloNode():
@@ -220,49 +221,20 @@ def synchronized_callback(self, image_msg, lidar_msg):
220221
yaw = np.arctan2(R_vehicle[1, 0], R_vehicle[0, 0])
221222

222223
refined_center = refined_center_vehicle
223-
224-
# Create a ROS BoundingBox message
225-
box_msg = BoundingBox()
226-
box_msg.header.frame_id = 'currentVehicleFrame'
227-
box_msg.header.stamp = lidar_msg.header.stamp
228-
229-
# Set the pose
230-
box_msg.pose.position.x = float(refined_center_vehicle[0])
231-
box_msg.pose.position.y = float(refined_center_vehicle[1])
232-
box_msg.pose.position.z = float(refined_center_vehicle[2])
233-
234-
# Convert yaw to quaternion
235-
quat = R.from_euler('z', yaw).as_quat()
236-
box_msg.pose.orientation.x = float(quat[0])
237-
box_msg.pose.orientation.y = float(quat[1])
238-
box_msg.pose.orientation.z = float(quat[2])
239-
box_msg.pose.orientation.w = float(quat[3])
240-
241-
# Set the dimensions
242-
# Swapped dims[2] and dims[0]
243-
box_msg.dimensions.x = float(dims[2]) # length
244-
box_msg.dimensions.y = float(dims[1]) # width
245-
box_msg.dimensions.z = float(dims[0]) # height
246224

247-
# if self.debug:
248-
# print("X")
249-
# print(refined_center_vehicle[0])
250-
# print("L")
251-
# print(dims[0])
252-
# print("Y")
253-
# print(refined_center_vehicle[1])
254-
# print("W")
255-
# print(dims[1])
256-
# print("Z")
257-
# print(refined_center_vehicle[2])
258-
# print("H")
259-
# print(dims[2])
260-
261-
# Add confidence score and label
262-
box_msg.value = float(conf_scores[i])
263-
box_msg.label = 0 # person/pedestrian class
264-
265-
boxes.boxes.append(box_msg)
225+
boxes = add_bounding_box(boxes=boxes,
226+
frame_id='currentVehicleFrame',
227+
stamp=lidar_msg.header.stamp,
228+
x=refined_center_vehicle[0],
229+
y=refined_center_vehicle[1],
230+
z=refined_center_vehicle[2],
231+
l=dims[2], # length
232+
w=dims[1], # width
233+
h=dims[0], # height
234+
yaw=yaw,
235+
conf_score=float(conf_scores[i]),
236+
label=0 # person/pedestrian class
237+
)
266238

267239
rospy.loginfo(f"Person detected at ({refined_center_vehicle[0]:.2f}, {refined_center_vehicle[1]:.2f}, {refined_center_vehicle[2]:.2f}) with score {conf_scores[i]:.2f}")
268240

0 commit comments

Comments
 (0)