Skip to content

Commit 0ff469a

Browse files
all testing
1 parent d289ae0 commit 0ff469a

34 files changed

Lines changed: 1765 additions & 15 deletions

.catkin_workspace

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# This file currently only serves to mark the location of a catkin workspace for tool integration
21.5 MB
Binary file not shown.

GEMstack/knowledge/vehicle/gem_e4_dynamics.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ max_accelerator_power: #Watts. Power at max accelerator pedal, by gear
1212
- 10000.0
1313
max_accelerator_power_reverse: 10000.0 #Watts. Power (backwards) in reverse gear
1414

15-
acceleration_model : kris_v1
15+
acceleration_model : hang_v1
1616
accelerator_active_range : [0.32, 1.0] #range of accelerator pedal where output acceleration is not flat
1717
brake_active_range : [0,1] #range of brake pedal where output deceleration is not flat
1818

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
import cv2
2+
import numpy as np
3+
4+
# Initialize global variables
5+
points = []
6+
7+
def order_points(pts):
8+
# Sort the points based on their x-coordinates
9+
x_sorted = pts[np.argsort(pts[:, 0]), :]
10+
11+
# Grab the left-most and right-most points from the sorted
12+
# x-coordinate points
13+
left_most = x_sorted[:2, :]
14+
right_most = x_sorted[2:, :]
15+
16+
# Now, sort the left-most coordinates according to their
17+
# y-coordinates so we can grab the top-left and bottom-left
18+
# points, respectively
19+
left_most = left_most[np.argsort(left_most[:, 1]), :]
20+
(tl, bl) = left_most
21+
22+
# Now, sort the right-most coordinates according to their
23+
# y-coordinates so we can grab the top-right and bottom-right
24+
# points, respectively
25+
right_most = right_most[np.argsort(right_most[:, 1]), :]
26+
(tr, br) = right_most
27+
28+
# Return the coordinates in top-left, top-right,
29+
# bottom-right, and bottom-left order
30+
return np.array([tl, tr, br, bl], dtype="float32")
31+
32+
def mouse_callback(event, x, y, flags, param):
33+
global points
34+
if event == cv2.EVENT_LBUTTONDOWN:
35+
if len(points) < 4:
36+
points.append((x, y))
37+
print(f"Point {len(points)} selected: ({x}, {y})")
38+
cv2.circle(param['image'], (x, y), 5, (0, 255, 0), -1)
39+
cv2.imshow("Select 4 Points", param['image'])
40+
if len(points) == 4:
41+
print("All points selected. Press any key to proceed.")
42+
cv2.waitKey(0)
43+
cv2.destroyAllWindows()
44+
elif event == cv2.EVENT_RBUTTONDOWN:
45+
points = []
46+
print("Points cleared. Please select 4 points again.")
47+
param['image'] = param['original_image'].copy()
48+
cv2.imshow("Select 4 Points", param['image'])
49+
50+
def transform_birdseye(image, src_points, dst_size, camera_in):
51+
try:
52+
if image is None or image.size == 0:
53+
raise ValueError("Invalid image data or failed to load image.")
54+
55+
# Order points
56+
src_pts = order_points(np.array(src_points, dtype="float32"))
57+
58+
# Define the output size
59+
W, H = dst_size
60+
dst_pts = np.array([
61+
[0, 0],
62+
[W - 1, 0],
63+
[W - 1, H - 1],
64+
[0, H - 1]
65+
], dtype=np.float32)
66+
67+
# Perform undistortion using intrinsics
68+
undistorted_image = cv2.undistort(image, camera_in, None)
69+
70+
# Compute perspective transform matrix
71+
M = cv2.getPerspectiveTransform(src_pts, dst_pts)
72+
print("Perspective Transform Matrix:\n", M)
73+
74+
# Perform perspective warp
75+
warped = cv2.warpPerspective(undistorted_image, M, (W, H))
76+
return warped, M
77+
78+
except Exception as e:
79+
print(f"Error during transformation: {e}")
80+
return None, None
81+
82+
def main():
83+
global points
84+
points = [] # Reset points list
85+
86+
# Camera intrinsics
87+
# camera_in = np.array([
88+
# [684.83331299, 0.0, 573.37109375],
89+
# [0.0, 684.60968018, 363.70092773],
90+
# [0.0, 0.0, 1.0]
91+
# ], dtype=np.float32)
92+
93+
camera_in = np.array([
94+
[1, 0.0, 0],
95+
[0.0, 1, 0],
96+
[0.0, 0.0, 1.0]
97+
], dtype=np.float32)
98+
99+
# Load image
100+
# image_path = "/home/aadarshhegde/Documents/GEMstack/data/parking_data-20250312T213928Z-001/parking_data/front_cam97.png"
101+
image_path = "/home/aadarshhegde/Documents/GEMstack/data/parking_data-20250312T213928Z-001/parking_data/camera_fl95.png"
102+
# image_path = '/home/aadarshhegde/Documents/GEMstack/data/parking_others/parking_data-20250312T213928Z-001/parking_data/camera_fr109.png'
103+
image = cv2.imread(image_path)
104+
105+
if image is None:
106+
print("Could not load image. Check the path.")
107+
return
108+
109+
# Clone for display
110+
display_image = image.copy()
111+
112+
# Select points using mouse
113+
print("Select 4 points for the bird's-eye view. Press 'Esc' to finish.")
114+
cv2.imshow("Select 4 Points", display_image)
115+
cv2.setMouseCallback("Select 4 Points", mouse_callback, {'image': display_image, 'original_image': image.copy()})
116+
cv2.waitKey(0)
117+
118+
if len(points) != 4:
119+
print("Error: Please select exactly 4 points.")
120+
return
121+
122+
# Output size (adjust to your needs)
123+
out_width = 800
124+
out_height = 600
125+
126+
# Perform the transformation
127+
warped, M = transform_birdseye(image, points, (out_width, out_height), camera_in)
128+
129+
if warped is not None:
130+
# Display the results
131+
cv2.imshow("Original", image)
132+
cv2.imshow("Bird's-Eye View", warped)
133+
cv2.waitKey(0)
134+
cv2.destroyAllWindows()
135+
136+
if __name__ == "__main__":
137+
main()

GEMstack/onboard/perception/cone_detection.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -505,10 +505,10 @@ def update(self, vehicle: VehicleState) -> Dict[str, Obstacle]:
505505

506506
# === Wider Mouth Parking Spot ===
507507
cones_wide = {
508-
'cone4': (32.0, 13.0, 0.5, 0.5), # Front Left (wide)
509-
'cone5': (36.0, 13.0, 0.5, 0.5), # Back Left
510-
'cone6': (29.0, 8.0, 0.5, 0.5), # Front Right (wide)
511-
'cone7': (33.0, 8.0, 0.5, 0.5) # Back Right
508+
'cone4': (82.0, 15.0, 0.5, 0.5), # Front Left (wide)
509+
'cone5': (86.0, 15.0, 0.5, 0.5), # Back Left
510+
'cone6': (79.0, 10.0, 0.5, 0.5), # Front Right (wide)
511+
'cone7': (83.0, 10.0, 0.5, 0.5) # Back Right
512512
}
513513

514514
# Populate the obstacle states

GEMstack/onboard/perception/parking_detection.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -247,16 +247,16 @@ def update(self, state: AllState):
247247
x, y, yaw = self.parking_goal
248248
goal_pose = ObjectPose(
249249
t=current_time,
250-
x=x,
251-
y=y,
250+
x=x+1,
251+
y=y+1,
252252
z=0.0,
253253
yaw=yaw,
254254
pitch=0.0,
255255
roll=0.0,
256256
frame=ObjectFrameEnum.START
257257
)
258258

259-
DISTANCE_THRESHOLD = 8.0
259+
DISTANCE_THRESHOLD = 20
260260
if self.euclidean_distance((x,y), state) > DISTANCE_THRESHOLD: # we are not close enough to the parking spot
261261
return None
262262

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import numpy as np
2+
3+
# Example: list of cone positions in vehicle frame (X forward, Y left, Z up)
4+
cones = np.array([
5+
[10.17, 10.62],
6+
[12.44, 11.22],
7+
[11.78, 7.60],
8+
[12.48, 7.63],
9+
[14.68, 7.54],
10+
[17.04, 7.53]
11+
])
12+
13+
# Step 1: Find pairs of cones closest in Y (forward distance) to vehicle
14+
# Sort by Y (smallest is closest if +Y is forward)
15+
cones_sorted = cones[cones[:, 1].argsort()]
16+
17+
# Step 2: Assume first 2 cones are mouth
18+
mouth = cones_sorted[:2]
19+
rear = cones_sorted[2:4] # next 2 furthest back
20+
21+
# Step 3: Midpoints of front and rear
22+
mouth_center = np.mean(mouth, axis=0)
23+
rear_center = np.mean(rear, axis=0)
24+
25+
# Step 4: Parking center is midpoint between mouth_center and rear_center
26+
goal_pos = (mouth_center + rear_center) / 2
27+
28+
# Step 5: Orientation (yaw) is the angle from rear to mouth
29+
delta = mouth_center - rear_center
30+
yaw = np.arctan2(delta[1], delta[0])
31+
32+
print(f"Goal Position: x={goal_pos[0]:.2f}, y={goal_pos[1]:.2f}, yaw={np.degrees(yaw):.2f} deg")
33+
34+
35+
import matplotlib.pyplot as plt
36+
37+
plt.scatter(cones[:, 0], cones[:, 1], c='red', label='Cones')
38+
plt.scatter(*goal_pos, c='blue', label='Goal')
39+
plt.arrow(goal_pos[0], goal_pos[1], 0.5*np.cos(yaw), 0.5*np.sin(yaw),
40+
head_width=0.3, color='blue')
41+
42+
plt.legend()
43+
plt.gca().set_aspect('equal')
44+
plt.title('Parking Spot and Goal Pose')
45+
plt.show()
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
import numpy as np
2+
from scipy.spatial import distance
3+
from collections import defaultdict
4+
5+
def order_points(points):
6+
"""Orders points in clockwise order around centroid"""
7+
centroid = np.mean(points, axis=0)
8+
vectors = points - centroid
9+
angles = np.arctan2(vectors[:, 1], vectors[:, 0])
10+
return points[np.argsort(angles)]
11+
12+
def find_parking_spots(cone_positions, tolerance=0.1):
13+
"""Improved parking spot detection that avoids duplicates"""
14+
cones_2d = cone_positions[:, :2]
15+
n = len(cones_2d)
16+
pairs = []
17+
18+
# Generate all possible cone pairs
19+
for i in range(n):
20+
for j in range(i+1, n):
21+
p1 = cones_2d[i]
22+
p2 = cones_2d[j]
23+
midpoint = (round((p1[0]+p2[0])/2/tolerance)*tolerance,
24+
round((p1[1]+p2[1])/2/tolerance)*tolerance)
25+
dist = round(distance.euclidean(p1, p2)/tolerance)*tolerance
26+
pairs.append((midpoint, dist, i, j))
27+
28+
# Group pairs by midpoint and distance
29+
groups = defaultdict(list)
30+
for p in pairs:
31+
groups[(p[0][0], p[0][1], p[1])].append((p[2], p[3]))
32+
33+
# Find valid rectangles
34+
processed = set()
35+
rectangles = []
36+
37+
for key in groups:
38+
group_pairs = groups[key]
39+
40+
# We need exactly 2 pairs to form a rectangle (4 unique points)
41+
if len(group_pairs) >= 2:
42+
# Combine all possible pairs in this group
43+
for i in range(len(group_pairs)):
44+
for j in range(i+1, len(group_pairs)):
45+
pair1 = group_pairs[i]
46+
pair2 = group_pairs[j]
47+
indices = set(pair1 + pair2)
48+
49+
# Only proceed if we have exactly 4 unique points
50+
if len(indices) == 4:
51+
# Check if these points form a valid rectangle
52+
points = cones_2d[list(indices)]
53+
54+
# Calculate all pairwise distances
55+
dists = distance.pdist(points)
56+
unique_dists = np.unique(np.round(dists/tolerance)*tolerance)
57+
58+
# A rectangle should have exactly 2 unique side lengths (with tolerance)
59+
if len(unique_dists) == 2:
60+
sorted_indices = tuple(sorted(indices))
61+
if sorted_indices not in processed:
62+
ordered = order_points(points)
63+
rectangles.append(ordered)
64+
processed.add(sorted_indices)
65+
66+
return rectangles
67+
68+
def point_in_polygon(point, polygon):
69+
"""Ray casting algorithm for point-in-polygon detection"""
70+
x, y = point
71+
n = len(polygon)
72+
inside = False
73+
74+
for i in range(n):
75+
xi, yi = polygon[i]
76+
xj, yj = polygon[(i+1) % n]
77+
78+
# Check if point is on edge
79+
if (np.isclose(x, xi) and np.isclose(y, yi)) or \
80+
(np.isclose(x, xj) and np.isclose(y, yj)):
81+
return True
82+
83+
# Check intersection with edge
84+
if ((yi > y) != (yj > y)):
85+
x_intersect = (y - yi) * (xj - xi) / (yj - yi) + xi
86+
if x < x_intersect:
87+
inside = not inside
88+
89+
return inside
90+
91+
def detect_parking_spots(cone_positions, object_positions):
92+
"""Main detection function with improved accuracy"""
93+
rectangles = find_parking_spots(cone_positions)
94+
parking_spots = []
95+
96+
for rect in rectangles:
97+
occupied = any(point_in_polygon(obj[:2], rect) for obj in object_positions)
98+
parking_spots.append({
99+
"corners": rect,
100+
"occupied": occupied
101+
})
102+
103+
return parking_spots
104+
105+
# Example Usage
106+
if __name__ == "__main__":
107+
cone_positions = np.array([
108+
[1, 1, 0], [1, 4, 0], [4, 1, 0], [4, 4, 0], # Spot 1
109+
[4, 1, 0], [4, 4, 0], [9, 1, 0], [9, 4, 0] # Spot 2
110+
])
111+
112+
object_positions = np.array([
113+
[2.5, 2.5, 0] # Object inside Spot 1
114+
])
115+
116+
spots = detect_parking_spots(cone_positions, object_positions)
117+
118+
for i, spot in enumerate(spots):
119+
status = "Occupied" if spot["occupied"] else "Available"
120+
print(f"Parking Spot {i+1}: {status}")
121+
print(f"Corners: {spot['corners']}\n")

0 commit comments

Comments
 (0)