This repository has been archived by the owner on Feb 27, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 17
/
Copy pathcapture_images_async.rs
190 lines (163 loc) · 6.13 KB
/
capture_images_async.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
use anyhow::Result;
#[cfg(all(feature = "with-image", feature = "with-nalgebra"))]
mod example {
use anyhow::Result;
use crossbeam::channel;
use image::ImageFormat;
use kiss3d::{
light::Light,
window::{State, Window},
};
use nalgebra::Point3;
use realsense_rust::{
prelude::*, Config, Format, Pipeline, PointCloud, Resolution, StreamKind,
};
use std::time::Duration;
#[derive(Debug)]
struct PointWithColor {
vertex: Point3<f32>,
color: Point3<f32>,
}
#[derive(Debug)]
struct PcdVizState {
rx: channel::Receiver<Vec<PointWithColor>>,
points: Option<Vec<PointWithColor>>,
}
impl PcdVizState {
pub fn new(rx: channel::Receiver<Vec<PointWithColor>>) -> Self {
let state = Self { rx, points: None };
state
}
}
impl State for PcdVizState {
fn step(&mut self, window: &mut Window) {
// try to receive recent points
if let Ok(points) = self.rx.try_recv() {
self.points = Some(points);
};
// draw axis
window.draw_line(
&Point3::origin(),
&Point3::new(1.0, 0.0, 0.0),
&Point3::new(1.0, 0.0, 0.0),
);
window.draw_line(
&Point3::origin(),
&Point3::new(0.0, 1.0, 0.0),
&Point3::new(0.0, 1.0, 0.0),
);
window.draw_line(
&Point3::origin(),
&Point3::new(0.0, 0.0, 1.0),
&Point3::new(0.0, 0.0, 1.0),
);
// draw points
if let Some(points) = &self.points {
for point in points.iter() {
window.draw_point(&point.vertex, &point.color);
}
}
}
}
pub async fn main() -> Result<()> {
let (tx, rx) = channel::unbounded();
std::thread::spawn(move || {
let state = PcdVizState::new(rx);
let mut window = Window::new("point cloud");
window.set_light(Light::StickToCamera);
window.render_loop(state);
});
// pointcloud filter
let mut pointcloud = PointCloud::create()?;
// init pipeline
let pipeline = Pipeline::new()?;
let config = Config::new()?
.enable_stream(StreamKind::Depth, 0, 640, 0, Format::Z16, 30)?
.enable_stream(StreamKind::Color, 0, 640, 0, Format::Rgb8, 30)?;
let mut pipeline = pipeline.start_async(&config).await?;
// show stream info
let profile = pipeline.profile();
for (idx, stream_result) in profile.streams()?.try_into_iter()?.enumerate() {
let stream = stream_result?;
println!("stream data {}: {:#?}", idx, stream.get_data()?);
}
// process frames
for _ in 0usize..1000 {
let timeout = Duration::from_millis(1000);
let frames = match pipeline.wait_async(timeout).await? {
Some(frame) => frame,
None => {
println!("timeout error");
continue;
}
};
println!("frame number = {}", frames.number()?);
let color_frame = frames.color_frame()?.unwrap();
let depth_frame = frames.depth_frame()?.unwrap();
// save video frame
{
let image = color_frame.owned_image()?;
image.save_with_format(
format!("sync-video-example-{}.png", color_frame.number()?),
ImageFormat::Png,
)?;
}
// save depth frame
{
let Resolution { width, height } = depth_frame.resolution()?;
let distance = depth_frame.distance(width / 2, height / 2)?;
println!("distance = {}", distance);
let image = depth_frame.owned_image()?;
image.save_with_format(
format!("sync-depth-example-{}.png", depth_frame.number()?),
ImageFormat::Png,
)?;
}
// access texture data
let pixel_data = color_frame.data()?;
let frame_width = color_frame.width()? as f32;
let frame_height = color_frame.height()? as f32;
let bytes_per_pixel = color_frame.bits_per_pixel()? / 8;
let pixel_stride = color_frame.stride_in_bytes()?;
// compute point cloud
pointcloud.map_to(color_frame.clone())?;
let points_frame = pointcloud.calculate(depth_frame.clone())?;
let points = points_frame
.vertices()?
.iter()
.zip(points_frame.texture_coordinates()?)
.map(|(vertex, tex_coord)| {
let [x, y, z] = vertex.xyz;
let mut color = Point3::new(0f32, 0f32, 0f32);
let (u, v) = (tex_coord.u, tex_coord.v);
if u >= 0f32 && u < 1f32 && v >= 0f32 && v < 1f32 {
let x = (u * frame_width) as usize;
let y = (v * frame_height) as usize;
let idx = x * bytes_per_pixel + y * pixel_stride;
let r = pixel_data[idx] as f32;
let g = pixel_data[idx + 1] as f32;
let b = pixel_data[idx + 2] as f32;
color = Point3::new(r, g, b) / 255f32;
}
PointWithColor {
vertex: Point3::new(x, y, z),
color,
}
})
.collect::<Vec<_>>();
if let Err(_) = tx.send(points) {
break;
}
}
Ok(())
}
}
#[cfg(all(feature = "with-image", feature = "with-nalgebra"))]
#[tokio::main]
async fn main() -> Result<()> {
example::main().await
}
#[cfg(not(all(feature = "with-image", feature = "with-nalgebra")))]
fn main() {
panic!("please enable with-image and with-nalgebra features to run this example");
}