|
| 1 | +mod utils; |
| 2 | +use std::time::Duration; |
| 3 | + |
| 4 | +use kiss3d::window::Window; |
| 5 | +use nalgebra::Point3; |
| 6 | +use orbbec_sdk::{ |
| 7 | + Context, Format, PermissionType, SensorType, |
| 8 | + device::DeviceProperty, |
| 9 | + filter::{AlignFilter, Filter, PointCloudFilter}, |
| 10 | + pipeline::{Config, Pipeline}, |
| 11 | +}; |
| 12 | +use std::sync::mpsc::channel; |
| 13 | +use std::thread; |
| 14 | + |
| 15 | +const DEPTH_WIDTH: u16 = 848; |
| 16 | +const DEPTH_HEIGHT: u16 = 480; |
| 17 | +const COLOR_WIDTH: u16 = 1280; |
| 18 | +const COLOR_HEIGHT: u16 = 720; |
| 19 | +const FPS: u8 = 15; |
| 20 | + |
| 21 | +const RGB_POINT_CLOUD: bool = true; // Set to false to disable color in point cloud |
| 22 | + |
| 23 | +/// Convert raw point cloud data to a vector of 3D points with color |
| 24 | +fn convert_pointcloud(data: &[u8], with_color: bool) -> Vec<(Point3<f32>, Point3<f32>)> { |
| 25 | + let point_size = if with_color { 6 } else { 3 }; // Each point has 3 coordinates (x, y, z) and optionally 3 color channels (r, g, b). |
| 26 | + let num_points = (data.len() / (point_size * 4)) as usize; // Each float is 4 bytes |
| 27 | + let mut points = Vec::with_capacity(num_points); |
| 28 | + |
| 29 | + for i in 0..num_points { |
| 30 | + let base_index = i * point_size * 4; |
| 31 | + let x = f32::from_le_bytes(data[base_index..base_index + 4].try_into().unwrap()); |
| 32 | + let y = f32::from_le_bytes(data[base_index + 4..base_index + 8].try_into().unwrap()); |
| 33 | + let z = f32::from_le_bytes(data[base_index + 8..base_index + 12].try_into().unwrap()); |
| 34 | + |
| 35 | + if with_color { |
| 36 | + let r = f32::from_le_bytes(data[base_index + 12..base_index + 16].try_into().unwrap()) |
| 37 | + / 255.0; |
| 38 | + let g = f32::from_le_bytes(data[base_index + 16..base_index + 20].try_into().unwrap()) |
| 39 | + / 255.0; |
| 40 | + let b = f32::from_le_bytes(data[base_index + 20..base_index + 24].try_into().unwrap()) |
| 41 | + / 255.0; |
| 42 | + points.push((Point3::new(x, y, z), Point3::new(r, g, b))); |
| 43 | + } else { |
| 44 | + points.push((Point3::new(x, y, z), Point3::new(0.5, 0.5, 0.5))); // Default color (gray) |
| 45 | + } |
| 46 | + } |
| 47 | + |
| 48 | + points |
| 49 | +} |
| 50 | + |
| 51 | +fn main() { |
| 52 | + // Create context and get device list |
| 53 | + let context = Context::new().unwrap(); |
| 54 | + let devices = context.query_device_list().unwrap(); |
| 55 | + |
| 56 | + if devices.is_empty() { |
| 57 | + eprintln!("No Orbbec devices found."); |
| 58 | + return; |
| 59 | + } |
| 60 | + |
| 61 | + // Get the first device available |
| 62 | + let mut device = devices.get(0).unwrap(); |
| 63 | + |
| 64 | + // Load the "High Accuracy" preset |
| 65 | + device.load_preset("High Accuracy").unwrap(); |
| 66 | + |
| 67 | + // Enable depth noise filter |
| 68 | + let hw_noise = DeviceProperty::HWNoiseRemoveFilterEnable(true); |
| 69 | + if device |
| 70 | + .is_property_supported(hw_noise, PermissionType::Write) |
| 71 | + .unwrap() |
| 72 | + { |
| 73 | + // HW filter is supported, use it instead of SW filter |
| 74 | + device.set_property(hw_noise).unwrap(); |
| 75 | + device |
| 76 | + .set_property(DeviceProperty::HWNoiseRemoveFilterThreshold(0.2)) |
| 77 | + .unwrap(); |
| 78 | + device |
| 79 | + .set_property(DeviceProperty::DepthNoiseRemovalFilter(false)) |
| 80 | + .unwrap(); |
| 81 | + println!("Using HW depth noise filter."); |
| 82 | + } else { |
| 83 | + // HW filter not supported, use SW filter |
| 84 | + device |
| 85 | + .set_property(DeviceProperty::DepthNoiseRemovalFilter(true)) |
| 86 | + .unwrap(); |
| 87 | + device |
| 88 | + .set_property(DeviceProperty::DepthNoiseRemovalFilterMaxDiff(256)) |
| 89 | + .unwrap(); |
| 90 | + device |
| 91 | + .set_property(DeviceProperty::DepthNoiseRemovalFilterMaxSpeckleSize(80)) |
| 92 | + .unwrap(); |
| 93 | + println!("Using SW depth noise filter."); |
| 94 | + } |
| 95 | + |
| 96 | + // Create pipeline |
| 97 | + let mut config = Config::new().unwrap(); |
| 98 | + let mut pipeline = Pipeline::new(&device).unwrap(); |
| 99 | + |
| 100 | + // Get depth stream profile |
| 101 | + let depth_profiles = pipeline.get_stream_profiles(SensorType::Depth).unwrap(); |
| 102 | + let depth_profile = depth_profiles |
| 103 | + .get_video_stream_profile(DEPTH_WIDTH, DEPTH_HEIGHT, Format::Y16, FPS) |
| 104 | + .unwrap(); |
| 105 | + |
| 106 | + // Get color stream profile |
| 107 | + let color_profiles = pipeline.get_stream_profiles(SensorType::Color).unwrap(); |
| 108 | + let color_profile = color_profiles |
| 109 | + .get_video_stream_profile(COLOR_WIDTH, COLOR_HEIGHT, Format::MJPG, FPS) |
| 110 | + .unwrap(); |
| 111 | + |
| 112 | + // Enable depth and color streams |
| 113 | + config.enable_stream_with_profile(&depth_profile).unwrap(); |
| 114 | + config.enable_stream_with_profile(&color_profile).unwrap(); |
| 115 | + |
| 116 | + // Create align filter and set to align to color stream |
| 117 | + let mut align_filter = AlignFilter::new().unwrap(); |
| 118 | + align_filter |
| 119 | + .set_align_to_stream_profile(&color_profile) |
| 120 | + .unwrap(); |
| 121 | + |
| 122 | + // Create point cloud filter |
| 123 | + let mut pc_filter = PointCloudFilter::new().unwrap(); |
| 124 | + pc_filter.set_color(RGB_POINT_CLOUD).unwrap(); |
| 125 | + |
| 126 | + // Enable sync mode |
| 127 | + pipeline.set_frame_sync(true).unwrap(); |
| 128 | + |
| 129 | + // Start streaming |
| 130 | + pipeline.start(&config).unwrap(); |
| 131 | + |
| 132 | + let (tx, rx) = channel(); |
| 133 | + |
| 134 | + // Spawn a thread for rendering |
| 135 | + thread::spawn(move || { |
| 136 | + let mut window = Window::new("Point Cloud"); |
| 137 | + window.set_point_size(1.0); |
| 138 | + |
| 139 | + // Rendering loop |
| 140 | + while window.render() { |
| 141 | + // Receive points from the main thread |
| 142 | + let points = rx.recv().unwrap(); |
| 143 | + |
| 144 | + // Draw all points |
| 145 | + for (point, color) in points { |
| 146 | + window.draw_point(&point, &color); |
| 147 | + } |
| 148 | + } |
| 149 | + }); |
| 150 | + |
| 151 | + // Main loop |
| 152 | + loop { |
| 153 | + // Get frameset |
| 154 | + let frameset = match pipeline |
| 155 | + .wait_for_frames(Duration::from_millis(100)) |
| 156 | + .unwrap() |
| 157 | + { |
| 158 | + Some(frameset) => frameset, |
| 159 | + None => { |
| 160 | + eprintln!("Timeout waiting for frames."); |
| 161 | + continue; |
| 162 | + } |
| 163 | + }; |
| 164 | + |
| 165 | + // Check if color frame is available |
| 166 | + if let None = frameset.get_color_frame().unwrap() { |
| 167 | + eprintln!("No color frame found."); |
| 168 | + continue; |
| 169 | + } |
| 170 | + |
| 171 | + // Check if depth frame is available |
| 172 | + if let None = frameset.get_depth_frame().unwrap() { |
| 173 | + eprintln!("No depth frame found."); |
| 174 | + continue; |
| 175 | + } |
| 176 | + |
| 177 | + // Align depth to color |
| 178 | + let aligned_frame = align_filter.process(&frameset).unwrap(); |
| 179 | + |
| 180 | + // Generate point cloud |
| 181 | + let pc_frame = pc_filter.process(&aligned_frame).unwrap(); |
| 182 | + |
| 183 | + // Convert raw data to point cloud |
| 184 | + let points = convert_pointcloud(pc_frame.raw_data(), pc_frame.has_color()); |
| 185 | + |
| 186 | + // Send points to rendering thread |
| 187 | + tx.send(points).unwrap(); |
| 188 | + } |
| 189 | +} |
0 commit comments