Skip to content

Commit 323cba2

Browse files
authored
Merge pull request #117 from krishauser/infra_a_main
Integrate HW1 - Team A Infra
2 parents 1be8acf + 66884ba commit 323cba2

9 files changed

Lines changed: 793 additions & 53 deletions

File tree

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,4 +168,8 @@ cython_debug/
168168

169169
.vscode/
170170
setup/zed_sdk.run
171-
cuda/
171+
172+
cuda/
173+
#Ignore ROS bags
174+
*.bag
175+
Lines changed: 60 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -1,99 +1,107 @@
1-
# ROS Headers
1+
#!/usr/bin/env python3
2+
"""
3+
Capture ROS messages and save each run in its own time-stamped folder.
4+
"""
5+
26
import rospy
3-
from sensor_msgs.msg import Image,PointCloud2
7+
from sensor_msgs.msg import Image, PointCloud2
48
import sensor_msgs.point_cloud2 as pc2
59
import ctypes
610
import struct
7-
8-
# OpenCV and cv2 bridge
911
import cv2
1012
from cv_bridge import CvBridge
1113
import numpy as np
1214
import os
15+
import datetime
1316

1417
lidar_points = None
1518
camera_image = None
1619
depth_image = None
1720
bridge = CvBridge()
1821

19-
def lidar_callback(lidar : PointCloud2):
22+
def lidar_callback(lidar: PointCloud2):
2023
global lidar_points
2124
lidar_points = lidar
2225

23-
def camera_callback(img : Image):
26+
def camera_callback(img: Image):
2427
global camera_image
2528
camera_image = img
2629

27-
def depth_callback(img : Image):
30+
def depth_callback(img: Image):
2831
global depth_image
2932
depth_image = img
3033

31-
def pc2_to_numpy(pc2_msg, want_rgb = False):
34+
def pc2_to_numpy(pc2_msg, want_rgb=False):
3235
gen = pc2.read_points(pc2_msg, skip_nans=True)
3336
if want_rgb:
34-
xyzpack = np.array(list(gen),dtype=np.float32)
37+
xyzpack = np.array(list(gen), dtype=np.float32)
3538
if xyzpack.shape[1] != 4:
3639
raise ValueError("PointCloud2 does not have points")
37-
xyzrgb = np.empty((xyzpack.shape[0],6))
38-
xyzrgb[:,:3] = xyzpack[:,:3]
39-
for i,x in enumerate(xyzpack):
40-
rgb = x[3]
41-
# cast float32 to int so that bitwise operations are possible
42-
s = struct.pack('>f' ,rgb)
43-
i = struct.unpack('>l',s)[0]
44-
# you can get back the float value by the inverse operations
45-
pack = ctypes.c_uint32(i).value
46-
r = (pack & 0x00FF0000)>> 16
47-
g = (pack & 0x0000FF00)>> 8
40+
xyzrgb = np.empty((xyzpack.shape[0], 6))
41+
xyzrgb[:, :3] = xyzpack[:, :3]
42+
for i, x in enumerate(xyzpack):
43+
rgb = x[3]
44+
s = struct.pack('>f', rgb)
45+
i_val = struct.unpack('>l', s)[0]
46+
pack = ctypes.c_uint32(i_val).value
47+
r = (pack & 0x00FF0000) >> 16
48+
g = (pack & 0x0000FF00) >> 8
4849
b = (pack & 0x000000FF)
49-
#r,g,b values in the 0-255 range
50-
xyzrgb[i,3:] = (r,g,b)
50+
xyzrgb[i, 3:] = (r, g, b)
5151
return xyzrgb
5252
else:
53-
return np.array(list(gen),dtype=np.float32)[:,:3]
53+
return np.array(list(gen), dtype=np.float32)[:, :3]
5454

55-
def save_scan(lidar_fn,color_fn,depth_fn):
56-
print("Saving scan to",lidar_fn,color_fn,depth_fn)
57-
pc = pc2_to_numpy(lidar_points,want_rgb=False)
58-
np.savez(lidar_fn,pc)
59-
cv2.imwrite(color_fn,bridge.imgmsg_to_cv2(camera_image))
55+
def save_scan(lidar_fn, color_fn, depth_fn):
56+
print("Saving scan to", lidar_fn, color_fn, depth_fn)
57+
pc = pc2_to_numpy(lidar_points, want_rgb=False)
58+
np.savez(lidar_fn, pc)
59+
cv2.imwrite(color_fn, bridge.imgmsg_to_cv2(camera_image))
6060
dimage = bridge.imgmsg_to_cv2(depth_image)
6161
dimage_non_nan = dimage[np.isfinite(dimage)]
62-
print("Depth range",np.min(dimage_non_nan),np.max(dimage_non_nan))
63-
dimage = np.nan_to_num(dimage,nan=0,posinf=0,neginf=0)
64-
dimage = (dimage/4000*0xffff)
65-
print("Depth pixel range",np.min(dimage),np.max(dimage))
62+
print("Depth range", np.min(dimage_non_nan), np.max(dimage_non_nan))
63+
dimage = np.nan_to_num(dimage, nan=0, posinf=0, neginf=0)
64+
dimage = (dimage / 4000 * 0xffff)
65+
print("Depth pixel range", np.min(dimage), np.max(dimage))
6666
dimage = dimage.astype(np.uint16)
67-
cv2.imwrite(depth_fn,dimage)
67+
cv2.imwrite(depth_fn, dimage)
68+
69+
def main(base_folder='data', start_index=1):
70+
# unique folder for this capture run based on the current date/time.
71+
run_timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
72+
run_folder = os.path.join(base_folder, run_timestamp)
73+
os.makedirs(run_folder, exist_ok=True)
74+
print("Capture run folder:", run_folder)
75+
76+
rospy.init_node("capture_lidar_zed", disable_signals=True)
77+
rospy.Subscriber("/lidar1/velodyne_points", PointCloud2, lidar_callback)
78+
rospy.Subscriber("/zed2/zed_node/rgb/image_rect_color", Image, camera_callback)
79+
rospy.Subscriber("/zed2/zed_node/depth/depth_registered", Image, depth_callback)
6880

69-
def main(folder='data',start_index=1):
70-
rospy.init_node("capture_lidar_zed",disable_signals=True)
71-
lidar_sub = rospy.Subscriber("/lidar1/velodyne_points", PointCloud2, lidar_callback)
72-
camera_sub = rospy.Subscriber("/zed2/zed_node/rgb/image_rect_color", Image, camera_callback)
73-
depth_sub = rospy.Subscriber("/zed2/zed_node/depth/depth_registered", Image, depth_callback)
7481
index = start_index
7582
print("Press any key to:")
76-
print(" store lidar point clouds as npz")
77-
print(" store color images as png")
78-
print(" store depth images (m scaled by 0xffff/4000) as 16-bit tif")
83+
print(" - store lidar point clouds as npz")
84+
print(" - store color images as png")
85+
print(" - store depth images (m scaled by 0xffff/4000) as 16-bit tif")
7986
print("Press Escape or Ctrl+C to quit")
80-
while True:
81-
if camera_image:
82-
cv2.imshow("result",bridge.imgmsg_to_cv2(camera_image))
87+
88+
while not rospy.is_shutdown():
89+
if camera_image is not None:
90+
cv2.imshow("result", bridge.imgmsg_to_cv2(camera_image))
8391
key = cv2.waitKey(30)
8492
if key == -1:
85-
#nothing
86-
pass
93+
pass # No key pressed.
8794
elif key == 27:
88-
#escape
89-
break
95+
break # Escape key pressed.
9096
else:
9197
if lidar_points is None or camera_image is None or depth_image is None:
9298
print("Missing some messages?")
9399
else:
94-
files = [os.path.join(folder,'lidar{}.npz'.format(index)),
95-
os.path.join(folder,'color{}.png'.format(index)),
96-
os.path.join(folder,'depth{}.tif'.format(index))]
100+
files = [
101+
os.path.join(run_folder, 'lidar{}.npz'.format(index)),
102+
os.path.join(run_folder, 'color{}.png'.format(index)),
103+
os.path.join(run_folder, 'depth{}.tif'.format(index))
104+
]
97105
save_scan(*files)
98106
index += 1
99107

@@ -105,4 +113,4 @@ def main(folder='data',start_index=1):
105113
folder = sys.argv[1]
106114
if len(sys.argv) >= 3:
107115
start_index = int(sys.argv[2])
108-
main(folder,start_index)
116+
main(folder, start_index)
Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
#!/usr/bin/env python3
2+
"""
3+
This client interacts with S3 to upload (push) or download (pull) a specific folder.
4+
5+
Before running this client make sure you have defined .env in root with following values:
6+
AWS_ACCESS_KEY_ID=example
7+
AWS_SECRET_ACCESS_KEY=example
8+
AWS_DEFAULT_REGION=us-east-1
9+
10+
Requires:
11+
pip install boto3 python-dotenv
12+
13+
Example Usage:
14+
15+
# Push a folder named "2025-02-12_15-30-00" inside "data/" to S3:
16+
python3 GEMStack/offboard/log_management/s3.py \
17+
--action push \
18+
--folder 2025-02-12_15-30-00 \
19+
--bucket cs588 \
20+
--s3-prefix captures
21+
22+
# Pull (download) that same folder from S3 into local "download/" directory:
23+
python3 GEMStack/offboard/log_management/s3.py \
24+
--action pull \
25+
--folder 2025-02-12_15-30-00 \
26+
--bucket cs588 \
27+
--s3-prefix captures \
28+
--dest-dir download
29+
"""
30+
31+
import argparse
32+
import boto3
33+
import os
34+
import sys
35+
from dotenv import load_dotenv
36+
37+
def get_s3_client():
38+
"""
39+
Initializes the S3 client using AWS credentials from environment variables.
40+
Expects:
41+
- AWS_ACCESS_KEY_ID
42+
- AWS_SECRET_ACCESS_KEY
43+
- AWS_DEFAULT_REGION
44+
Exits if any of these are missing.
45+
"""
46+
# load environment variables from .env file
47+
# override in case local config exists for AWS
48+
load_dotenv(override=True)
49+
50+
access_key = os.environ.get('AWS_ACCESS_KEY_ID')
51+
secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
52+
region = os.environ.get('AWS_DEFAULT_REGION')
53+
54+
if not access_key or not secret_key or not region:
55+
sys.exit(
56+
"Error: AWS credentials not set. Please set AWS_ACCESS_KEY_ID, "
57+
"AWS_SECRET_ACCESS_KEY, and AWS_DEFAULT_REGION environment variables (in .env)."
58+
)
59+
60+
print(access_key, secret_key, region)
61+
return boto3.client(
62+
's3',
63+
aws_access_key_id=access_key,
64+
aws_secret_access_key=secret_key,
65+
region_name=region
66+
)
67+
68+
def check_s3_connection(s3_client, bucket):
69+
"""
70+
Verifies that we can connect to S3 and access the specified bucket.
71+
"""
72+
try:
73+
s3_client.head_bucket(Bucket=bucket)
74+
print(f"Connection check: Successfully accessed bucket '{bucket}'")
75+
except Exception as e:
76+
sys.exit(f"Error: Could not connect to S3 bucket '{bucket}': {e}")
77+
78+
def push_folder_to_s3(folder_path, bucket, s3_prefix):
79+
"""
80+
Walks through the folder and uploads each file to S3 under the given prefix.
81+
Files are stored under the key: s3_prefix/folder_name/<relative_file_path>.
82+
"""
83+
s3_client = get_s3_client()
84+
check_s3_connection(s3_client, bucket)
85+
86+
folder_name = os.path.basename(folder_path)
87+
files_uploaded = 0
88+
89+
for root, _, files in os.walk(folder_path):
90+
for file in files:
91+
local_path = os.path.join(root, file)
92+
# determine the file's path relative to the folder being pushed.
93+
relative_path = os.path.relpath(local_path, folder_path)
94+
s3_key = os.path.join(s3_prefix, folder_name, relative_path)
95+
try:
96+
print(f"Uploading {local_path} to s3://{bucket}/{s3_key}")
97+
s3_client.upload_file(local_path, bucket, s3_key)
98+
files_uploaded += 1
99+
except Exception as e:
100+
print(f"Error uploading {local_path}: {e}")
101+
102+
if files_uploaded == 0:
103+
sys.exit(f"Error: No files were uploaded from folder: {folder_path}")
104+
105+
def pull_folder_from_s3(bucket, s3_prefix, folder_name, dest_dir):
106+
"""
107+
Downloads all objects from S3 whose key begins with s3_prefix/folder_name.
108+
The folder will be recreated under `dest_dir/folder_name`.
109+
"""
110+
s3_client = get_s3_client()
111+
check_s3_connection(s3_client, bucket)
112+
113+
# this prefix is what we'll look for in the bucket.
114+
prefix = os.path.join(s3_prefix, folder_name)
115+
116+
paginator = s3_client.get_paginator('list_objects_v2')
117+
pages = paginator.paginate(Bucket=bucket, Prefix=prefix)
118+
119+
found_files = False
120+
for page in pages:
121+
if 'Contents' not in page:
122+
continue
123+
for obj in page['Contents']:
124+
key = obj['Key']
125+
# if the object key is exactly the "folder" (a directory placeholder), skip it.
126+
# e.g. sometimes you can have an empty key or just a trailing slash.
127+
if key.endswith("/"):
128+
continue
129+
130+
found_files = True
131+
# figure out the relative path to recreate the same structure locally.
132+
relative_path = os.path.relpath(key, prefix)
133+
local_path = os.path.join(dest_dir, folder_name, relative_path)
134+
135+
# ensure the local directory exists.
136+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
137+
138+
print(f"Downloading s3://{bucket}/{key} to {local_path}")
139+
try:
140+
s3_client.download_file(bucket, key, local_path)
141+
except Exception as e:
142+
print(f"Error downloading s3://{bucket}/{key}: {e}")
143+
144+
if not found_files:
145+
sys.exit(f"Error: No files found in bucket '{bucket}' with prefix '{prefix}'")
146+
147+
def main():
148+
parser = argparse.ArgumentParser(
149+
description="Push or pull a specific folder to/from an S3 bucket."
150+
)
151+
parser.add_argument("--action", choices=["push", "pull"], required=True,
152+
help="Choose whether to push (upload) or pull (download) a folder.")
153+
parser.add_argument("--folder", required=True,
154+
help="Folder name (e.g. 2025-02-12_15-30-00)")
155+
parser.add_argument("--base-dir", default="data",
156+
help="Local base directory where capture runs are stored (for push).")
157+
parser.add_argument("--dest-dir", default="download",
158+
help="Local directory to place the downloaded folder (for pull).")
159+
parser.add_argument("--bucket", required=True,
160+
help="S3 bucket name.")
161+
parser.add_argument("--s3-prefix", default="captures",
162+
help="S3 prefix (folder) where data is stored or will be uploaded.")
163+
args = parser.parse_args()
164+
165+
if args.action == "push":
166+
# pushing from local 'base_dir/folder' to S3
167+
folder_path = os.path.join(args.base_dir, args.folder)
168+
if not os.path.exists(folder_path):
169+
sys.exit(f"Error: Folder does not exist: {folder_path}")
170+
171+
# check if the folder is empty.
172+
folder_empty = True
173+
for _, _, files in os.walk(folder_path):
174+
if files:
175+
folder_empty = False
176+
break
177+
if folder_empty:
178+
sys.exit(f"Error: Folder is empty: {folder_path}")
179+
180+
push_folder_to_s3(folder_path, args.bucket, args.s3_prefix)
181+
182+
elif args.action == "pull":
183+
# pulling from S3 (prefix/folder) to local 'dest_dir/folder'
184+
pull_folder_from_s3(args.bucket, args.s3_prefix, args.folder, args.dest_dir)
185+
186+
if __name__ == '__main__':
187+
main()

0 commit comments

Comments
 (0)