| """ |
| Preprocessing Script for ToF-360 |
| |
| Author: Mahdi Chamseddine (mahdi.chamseddine@dfki.de) |
| Please cite our work if the code is helpful to you. |
| """ |
| from pathlib import Path |
|
|
| import cv2 |
| import numpy as np |
| import open3d as o3d |
|
|
|
|
| def map_label(label: int) -> int: |
|
|
| match label: |
| case 0 | 33 | 34: |
| return -1 |
| case 2 | 20 | 42: |
| return 0 |
| case 3 | 18: |
| return 1 |
| case 1 | 40: |
| return 2 |
| |
| |
| case 14: |
| return 4 |
| case 8: |
| return 5 |
| case 7: |
| return 6 |
| case 12: |
| return 7 |
| case 5: |
| return 8 |
| case 4: |
| return 9 |
| case 31: |
| return 10 |
| |
| |
| case _: |
| return 12 |
|
|
|
|
| def downsample(xyz: np.ndarray, voxel_size: float = 0.01) -> np.ndarray: |
| min_vals = np.min(xyz, axis=0) |
| max_vals = np.max(xyz, axis=0) |
| point_cloud = o3d.geometry.PointCloud() |
| point_cloud.points = o3d.utility.Vector3dVector(xyz) |
| _, _, indices = point_cloud.voxel_down_sample_and_trace( |
| voxel_size, min_vals, max_vals |
| ) |
| indices = [np.random.choice(idx) for idx in indices] |
| return np.asarray(indices) |
|
|
|
|
| def preprocess_scans(area_path: Path) -> None: |
| xyz_dir = area_path / "XYZ" |
| for scan_path in xyz_dir.glob("*.npy*"): |
| scan_name = scan_path.stem[: -len("_xxx")] |
| parse_scan(scan_name, area_path) |
|
|
|
|
| def parse_scan(scan_name: str, area_path: Path, debug: bool = False): |
| output_name = area_path.stem + "_" + scan_name |
| print(f"Parsing scan: {output_name}", flush=True) |
| processed_path = ( |
| area_path.parent.parent / "preprocessed" / area_path.parent.stem / output_name |
| ) |
| |
| |
| processed_path.mkdir(parents=True, exist_ok=True) |
|
|
| print(f"--- [{output_name}] reading point cloud", flush=True) |
| xyz_path = Path(area_path / "XYZ", scan_name + "_XYZ.npy") |
| temp = np.load(xyz_path) |
|
|
| temp = temp.reshape(-1, 3) / 1000 |
| coord = temp.copy() |
| coord[:, 1] = temp[:, 2] |
| coord[:, 2] = -temp[:, 1] |
| png_path = Path(area_path / "RGB", scan_name + "_rgb.png") |
| color = cv2.imread(png_path.resolve()) |
| color = cv2.cvtColor(color, cv2.COLOR_BGR2RGB).reshape(-1, 3) / 255 |
|
|
| print(f"--- [{output_name}] loading labels", flush=True) |
| semantic_path = Path(area_path / "semantics", scan_name + "_semantic.npy") |
| segment = np.load(semantic_path).reshape(-1) |
| segment = np.vectorize(map_label)(segment) |
|
|
| normal_path = Path(area_path / "normal", scan_name + "_normal.png") |
| temp = cv2.imread(normal_path.resolve()) |
| temp = cv2.cvtColor(temp, cv2.COLOR_BGR2RGB).reshape(-1, 3) * 2 / 255 |
| temp = temp - 1 |
| normal = temp.copy() |
| normal[:, 1] = temp[:, 2] |
| normal[:, 2] = -temp[:, 1] |
|
|
| print(f"--- [{output_name}] down sampling", flush=True) |
| idx = downsample(coord) |
|
|
| print(f"--- [{output_name}] saving", flush=True) |
| coord = np.ascontiguousarray(coord[idx, :], dtype=np.float32) |
| np.save(Path(processed_path, "coord.npy"), coord) |
| color = np.ascontiguousarray(color[idx, :], dtype=np.float32) |
| np.save(Path(processed_path, "color.npy"), color) |
| normal = np.ascontiguousarray(normal[idx, :], dtype=np.float32) |
| np.save(Path(processed_path, "normal.npy"), normal) |
| segment = np.ascontiguousarray(segment[idx], dtype=np.int32) |
| np.save(Path(processed_path, "segment.npy"), segment) |
|
|
|
|
| def main(): |
| |
| |
| splits = [""] |
| dataset_directory = "path/to/ToF-360/" |
|
|
| areas = ["Hospital", "Office_Room_1", "Office_Room_2", "Parking_Lot"] |
|
|
| for split in splits: |
| split_directory = dataset_directory + split |
| split_path = Path(split_directory) |
| |
| if not split_path.is_dir(): |
| print( |
| f"Error: '{split_path.resolve()}' is not a valid directory.", |
| flush=True, |
| ) |
| return |
| for area_path in split_path.iterdir(): |
| if area_path.is_dir() and area_path.stem in areas: |
| preprocess_scans(area_path) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|