-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathprocess_pointcloud.py
More file actions
89 lines (74 loc) · 3.22 KB
/
process_pointcloud.py
File metadata and controls
89 lines (74 loc) · 3.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import time
import numpy as np
import torch
import open3d as o3d
from tqdm import tqdm
import os
from dataset.projector import Projector
from utils.constants import BASE_WORKSPACE_MAX, BASE_WORKSPACE_MIN, IMAGE_SIZE
def process(colors_list, depths_list, cam_id,INTRINSICS, IMAGE_SIZE, projector, voxel_size=0.005):
points_list = []
fx, fy = INTRINSICS[cam_id][0,0], INTRINSICS[cam_id][1,1]
cx, cy = INTRINSICS[cam_id][0,2], INTRINSICS[cam_id][1,2]
for colors, depths in zip(colors_list, depths_list):
# Normalize colors
h, w = IMAGE_SIZE
scale = 1000. if 'f' not in cam_id else 4000.
colors = np.ascontiguousarray(colors.squeeze(0), dtype=np.uint8)
depths = np.ascontiguousarray(depths.squeeze(0), dtype=np.uint16)
color_o3d = o3d.geometry.Image(colors)
depth_o3d = o3d.geometry.Image(depths)
rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(
color_o3d,
depth_o3d,
depth_scale=1000.0, # mm → m
depth_trunc=3.0, # max range in meters
convert_rgb_to_intensity=False
)
intrinsic = o3d.camera.PinholeCameraIntrinsic(
width=colors.shape[1],
height=colors.shape[0],
fx=fx, fy=fy,
cx=cx, cy=cy
)
cloud = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, intrinsic)
# cloud = o3d.geometry.PointCloud.create_from_rgbd_image(rgbd, camera_intrinsics)
points = np.array(cloud.points)
colors = np.array(cloud.colors)
base_to_cam=projector.cam_to_base[cam_id]
cam_to_base=np.linalg.inv(base_to_cam)
points=transform_pointcloud_cpu(points,cam_to_base)
# Masking and Filtering BEFORE Downsampling
ws_mask = (
(points[:,0] >= BASE_WORKSPACE_MIN[0]) & (points[:,0] <= BASE_WORKSPACE_MAX[0]) &
(points[:,1] >= BASE_WORKSPACE_MIN[1]) & (points[:,1] <= BASE_WORKSPACE_MAX[1]) &
(points[:,2] >= BASE_WORKSPACE_MIN[2]) & (points[:,2] <= BASE_WORKSPACE_MAX[2])
)
points = points[ws_mask]
colors = colors[ws_mask]
cloud = o3d.geometry.PointCloud()
cloud.points = o3d.utility.Vector3dVector(points)
cloud.colors = o3d.utility.Vector3dVector(colors)
# Apply voxel downsampling
cloud_down = cloud.voxel_down_sample(voxel_size)
# Convert back to numpy arrays
points = np.asarray(cloud_down.points)
colors = np.asarray(cloud_down.colors)
points_list.append(np.concatenate([points, colors], axis=1))
return points_list
def transform_pointcloud_cpu(points, transform_mat):
"""
CPU-based point cloud transformation using NumPy.
points: (N, 3) numpy array
transform_mat: (4, 4) numpy array (homogeneous transformation matrix)
"""
# Ensure float32
points = points.astype(np.float32)
# Convert to homogeneous coordinates (N,4)
ones = np.ones((points.shape[0], 1), dtype=np.float32)
points_hom = np.hstack([points, ones]) # (N, 4)
# Apply transformation
transformed_hom = points_hom @ transform_mat.T # (N, 4)
# Drop homogeneous coordinate
transformed = transformed_hom[:, :3]
return transformed