use opencv to capture video

This commit is contained in:
grabbit 2025-03-19 09:02:00 +08:00
parent c00f904657
commit 7bbbea6140
5 changed files with 493 additions and 244 deletions

View File

@ -8,7 +8,6 @@ description = "A Raspberry Pi based meteor detection system"
[dependencies] [dependencies]
# Hardware interfaces # Hardware interfaces
rppal = "0.22.1" # Raspberry Pi hardware access rppal = "0.22.1" # Raspberry Pi hardware access
v4l = "0.14.0" # Video4Linux2 bindings
serialport = "4.2.0" # Serial port for GPS serialport = "4.2.0" # Serial port for GPS
embedded-hal = "0.2.7" # Hardware abstraction layer embedded-hal = "0.2.7" # Hardware abstraction layer

View File

@ -8,17 +8,17 @@ use tokio::sync::broadcast;
use tokio::time; use tokio::time;
use crate::camera::frame_buffer::{Frame, FrameBuffer, SharedFrameBuffer}; use crate::camera::frame_buffer::{Frame, FrameBuffer, SharedFrameBuffer};
use crate::camera::v4l2::{V4l2Camera, V4l2CaptureStream}; use crate::camera::opencv::{OpenCVCamera, OpenCVCaptureStream};
use crate::camera::{CameraSettings, MeteorEvent, Resolution, ExposureMode}; use crate::camera::{CameraSettings, MeteorEvent, Resolution, ExposureMode};
/// Camera controller manages camera operations and frame capture /// Camera controller manages camera operations and frame capture
pub struct CameraController { pub struct CameraController {
/// Camera settings /// Camera settings
settings: CameraSettings, settings: CameraSettings,
/// The V4L2 camera driver /// The OpenCV camera driver
camera: Option<V4l2Camera>, camera: Option<OpenCVCamera>,
/// The V4L2 capture stream /// The OpenCV capture stream
stream: Option<V4l2CaptureStream>, stream: Option<OpenCVCaptureStream>,
/// Circular buffer for storing recent frames /// Circular buffer for storing recent frames
frame_buffer: SharedFrameBuffer, frame_buffer: SharedFrameBuffer,
/// Frame counter /// Frame counter
@ -64,7 +64,7 @@ impl CameraController {
/// Initialize the camera with current settings /// Initialize the camera with current settings
pub async fn initialize(&mut self) -> Result<()> { pub async fn initialize(&mut self) -> Result<()> {
// Open the camera // Open the camera
let mut camera = V4l2Camera::open(&self.settings.device) let mut camera = OpenCVCamera::open(&self.settings.device)
.context("Failed to open camera")?; .context("Failed to open camera")?;
// Configure camera parameters // Configure camera parameters

View File

@ -1,9 +1,11 @@
mod controller; mod controller;
mod v4l2; // mod v4l2;
mod opencv;
mod frame_buffer; mod frame_buffer;
pub use controller::CameraController; pub use controller::CameraController;
pub use frame_buffer::{Frame, FrameBuffer}; pub use frame_buffer::{Frame, FrameBuffer};
pub use opencv::OpenCVCamera;
use anyhow::Result; use anyhow::Result;
use chrono::DateTime; use chrono::DateTime;

248
src/camera/opencv.rs Normal file
View File

@ -0,0 +1,248 @@
use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn};
use std::path::Path;
use opencv::{core, prelude::*, videoio};
use crate::camera::{ExposureMode, Resolution};
/// OpenCV camera driver
pub struct OpenCVCamera {
/// The VideoCapture instance
capture: videoio::VideoCapture,
/// Camera width
width: u32,
/// Camera height
height: u32,
/// Whether the camera is currently streaming
is_streaming: bool,
/// Device index or path
device: String,
}
impl OpenCVCamera {
/// Open a camera device by path or index
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let path_str = path.as_ref().to_str()
.ok_or_else(|| anyhow!("Invalid path"))?;
let is_device_path = path_str.starts_with("/dev/");
let mut capture = if is_device_path {
// For device files like /dev/video0, we need to extract the number
if let Some(num_str) = path_str.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?
} else {
return Err(anyhow!("Invalid device number in path: {}", path_str));
}
} else {
return Err(anyhow!("Unsupported device path format: {}", path_str));
}
} else {
// For other paths, try to open directly (e.g., video files, URLs)
videoio::VideoCapture::from_file(path_str, videoio::CAP_ANY)?
};
if !capture.is_opened()? {
return Err(anyhow!("Failed to open camera: {}", path_str));
}
// Get initial resolution
let width = capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let height = capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
info!(
"Opened camera: {} ({}x{})",
path_str, width, height
);
Ok(Self {
capture,
width,
height,
is_streaming: false,
device: path_str.to_string(),
})
}
/// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions();
// Set resolution
self.capture.set(videoio::CAP_PROP_FRAME_WIDTH, width as f64)?;
self.capture.set(videoio::CAP_PROP_FRAME_HEIGHT, height as f64)?;
// Read back actual resolution (might be different from requested)
let actual_width = self.capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let actual_height = self.capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
if actual_width != width || actual_height != height {
warn!(
"Requested resolution {}x{} but got {}x{}",
width, height, actual_width, actual_height
);
}
self.width = actual_width;
self.height = actual_height;
info!("Set camera format: {}×{}", self.width, self.height);
Ok(())
}
/// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
self.capture.set(videoio::CAP_PROP_FPS, fps as f64)?;
// Read back actual FPS
let actual_fps = self.capture.get(videoio::CAP_PROP_FPS)?;
if (actual_fps - fps as f64).abs() > 0.1 {
warn!("Requested {} fps but got {} fps", fps, actual_fps);
}
info!("Set camera frame rate: {} fps", actual_fps);
Ok(())
}
/// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
match mode {
ExposureMode::Auto => {
// Set auto exposure mode
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.75)?; // 0.75 is auto mode in OpenCV
info!("Set camera exposure: Auto");
},
ExposureMode::Manual(exposure_time) => {
// First disable auto exposure
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.25)?; // 0.25 is manual mode in OpenCV
// Then set exposure value - might need conversion based on camera
let exposure_value = exposure_time as f64 / 10000.0; // Convert microseconds to OpenCV units
self.capture.set(videoio::CAP_PROP_EXPOSURE, exposure_value)?;
info!("Set camera exposure: Manual ({})", exposure_time);
}
}
Ok(())
}
/// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
self.capture.set(videoio::CAP_PROP_GAIN, gain as f64)?;
let actual_gain = self.capture.get(videoio::CAP_PROP_GAIN)?;
info!("Set camera gain: {} (actual: {})", gain, actual_gain);
Ok(())
}
/// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// First, set focus mode to manual
if self.capture.set(videoio::CAP_PROP_AUTOFOCUS, 0.0).is_ok() {
// Then set focus to infinity (typically maximum value)
if self.capture.set(videoio::CAP_PROP_FOCUS, 1.0).is_ok() {
info!("Locked focus at infinity");
return Ok(());
}
}
warn!("Camera does not support focus control");
Ok(())
}
/// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<OpenCVCaptureStream> {
// Ensure capture is opened
if !self.capture.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
// Create a separate VideoCapture for the stream to avoid concurrent access issues
let device = self.device.clone();
let is_device_path = device.starts_with("/dev/");
let stream_capture = if is_device_path {
if let Some(num_str) = device.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
// Open with same settings
let mut cap = videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?;
cap.set(videoio::CAP_PROP_FRAME_WIDTH, self.width as f64)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, self.height as f64)?;
cap
} else {
return Err(anyhow!("Invalid device number in path: {}", device));
}
} else {
return Err(anyhow!("Unsupported device path format: {}", device));
}
} else {
// For other paths, try to open directly
videoio::VideoCapture::from_file(&device, videoio::CAP_ANY)?
};
if !stream_capture.is_opened()? {
return Err(anyhow!("Failed to open camera stream"));
}
self.is_streaming = true;
info!("Started camera streaming");
Ok(OpenCVCaptureStream {
capture: stream_capture,
})
}
/// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> {
self.is_streaming = false;
info!("Stopped camera streaming");
Ok(())
}
/// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool {
self.is_streaming
}
/// Get current format width
pub fn width(&self) -> u32 {
self.width
}
/// Get current format height
pub fn height(&self) -> u32 {
self.height
}
}
/// Wrapper around OpenCV VideoCapture for streaming
pub struct OpenCVCaptureStream {
capture: videoio::VideoCapture,
}
impl OpenCVCaptureStream {
/// Capture a single frame from the camera
pub fn capture_frame(&mut self) -> Result<core::Mat> {
let mut frame = core::Mat::default()?;
if self.capture.read(&mut frame)? {
if frame.empty() {
return Err(anyhow!("Captured frame is empty"));
}
Ok(frame)
} else {
Err(anyhow!("Failed to capture frame"))
}
}
}
impl Drop for OpenCVCaptureStream {
fn drop(&mut self) {
debug!("OpenCV capture stream dropped");
}
}

View File

@ -1,287 +1,287 @@
use anyhow::{anyhow, Context, Result}; // use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn}; // use log::{debug, error, info, warn};
use std::path::Path; // use std::path::Path;
use v4l::buffer::Type; // use v4l::buffer::Type;
use v4l::io::traits::CaptureStream; // use v4l::io::traits::CaptureStream;
use v4l::prelude::*; // use v4l::prelude::*;
use v4l::video::Capture; // use v4l::video::Capture;
use v4l::{Format, FourCC}; // use v4l::{Format, FourCC};
use opencv::{core, imgproc, prelude::*}; // use opencv::{core, imgproc, prelude::*};
use crate::camera::{ExposureMode, Resolution}; // use crate::camera::{ExposureMode, Resolution};
/// V4L2 camera driver for star-light cameras // /// V4L2 camera driver for star-light cameras
pub struct V4l2Camera { // pub struct V4l2Camera {
/// The open device handle // /// The open device handle
device: Device, // device: Device,
/// The current camera format // /// The current camera format
format: Format, // format: Format,
/// Whether the camera is currently streaming // /// Whether the camera is currently streaming
is_streaming: bool, // is_streaming: bool,
} // }
impl V4l2Camera { // impl V4l2Camera {
/// Open a camera device by path // /// Open a camera device by path
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> { // pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let device = Device::with_path(path.as_ref()) // let device = Device::with_path(path.as_ref())
.context("Failed to open camera device")?; // .context("Failed to open camera device")?;
info!( // info!(
"Opened camera: {} ({})", // "Opened camera: {} ({})",
device.info().card, // device.info().card,
device.info().driver // device.info().driver
); // );
// Get the current format // // Get the current format
let format = device // let format = device
.format() // .format()
.context("Failed to get camera format")?; // .context("Failed to get camera format")?;
debug!("Initial camera format: {:?}", format); // debug!("Initial camera format: {:?}", format);
Ok(Self { // Ok(Self {
device, // device,
format, // format,
is_streaming: false, // is_streaming: false,
}) // })
} // }
/// Set the camera resolution and pixel format // /// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> { // pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions(); // let (width, height) = resolution.dimensions();
// Try to set format to MJPEG or YUYV first, then fall back to others // // Try to set format to MJPEG or YUYV first, then fall back to others
let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")]; // let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")];
let mut success = false; // let mut success = false;
let mut last_error = None; // let mut last_error = None;
for &fourcc in &formats { // for &fourcc in &formats {
let mut format = Format::new(width, height, fourcc); // let mut format = Format::new(width, height, fourcc);
match self.device.set_format(&mut format) { // match self.device.set_format(&mut format) {
Ok(_) => { // Ok(_) => {
self.format = format; // self.format = format;
success = true; // success = true;
break; // break;
} // }
Err(e) => { // Err(e) => {
last_error = Some(e); // last_error = Some(e);
warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap()); // warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap());
} // }
} // }
} // }
if !success { // if !success {
return Err(anyhow!( // return Err(anyhow!(
"Failed to set any supported format: {:?}", // "Failed to set any supported format: {:?}",
last_error.unwrap() // last_error.unwrap()
)); // ));
} // }
info!( // info!(
"Set camera format: {}×{} {}", // "Set camera format: {}×{} {}",
self.format.width, self.format.height, // self.format.width, self.format.height,
String::from_utf8_lossy(&self.format.fourcc.repr) // String::from_utf8_lossy(&self.format.fourcc.repr)
); // );
Ok(()) // Ok(())
} // }
/// Set the camera frame rate // /// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> { // pub fn set_fps(&mut self, fps: u32) -> Result<()> {
if let Some(params) = self.device.params() { // if let Some(params) = self.device.params() {
let mut params = params.context("Failed to get camera parameters")?; // let mut params = params.context("Failed to get camera parameters")?;
params.set_frames_per_second(fps, 1); // params.set_frames_per_second(fps, 1);
self.device // self.device
.set_params(&params) // .set_params(&params)
.context("Failed to set frame rate")?; // .context("Failed to set frame rate")?;
info!("Set camera frame rate: {} fps", fps); // info!("Set camera frame rate: {} fps", fps);
} else { // } else {
warn!("Camera does not support frame rate adjustment"); // warn!("Camera does not support frame rate adjustment");
} // }
Ok(()) // Ok(())
} // }
/// Set camera exposure mode and value // /// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> { // pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
// First, set auto/manual mode // // First, set auto/manual mode
let ctrl_id = v4l::control::id::EXPOSURE_AUTO; // let ctrl_id = v4l::control::id::EXPOSURE_AUTO;
let auto_value = match mode { // let auto_value = match mode {
ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO // ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO
ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL // ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL
}; // };
self.device // self.device
.set_control(ctrl_id, auto_value) // .set_control(ctrl_id, auto_value)
.context("Failed to set exposure mode")?; // .context("Failed to set exposure mode")?;
// If manual, set the exposure value // // If manual, set the exposure value
if let ExposureMode::Manual(exposure_time) = mode { // if let ExposureMode::Manual(exposure_time) = mode {
// Exposure time in microseconds // // Exposure time in microseconds
let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE; // let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE;
self.device // self.device
.set_control(ctrl_id, exposure_time as i64) // .set_control(ctrl_id, exposure_time as i64)
.context("Failed to set exposure time")?; // .context("Failed to set exposure time")?;
} // }
info!("Set camera exposure: {:?}", mode); // info!("Set camera exposure: {:?}", mode);
Ok(()) // Ok(())
} // }
/// Set camera gain (ISO) // /// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> { // pub fn set_gain(&mut self, gain: u8) -> Result<()> {
let ctrl_id = v4l::control::id::GAIN; // let ctrl_id = v4l::control::id::GAIN;
self.device // self.device
.set_control(ctrl_id, gain as i64) // .set_control(ctrl_id, gain as i64)
.context("Failed to set gain")?; // .context("Failed to set gain")?;
info!("Set camera gain: {}", gain); // info!("Set camera gain: {}", gain);
Ok(()) // Ok(())
} // }
/// Lock focus at infinity (if supported) // /// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> { // pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// First, set focus mode to manual // // First, set focus mode to manual
let auto_focus_id = v4l::control::id::FOCUS_AUTO; // let auto_focus_id = v4l::control::id::FOCUS_AUTO;
if let Ok(_) = self.device.set_control(auto_focus_id, 0) { // if let Ok(_) = self.device.set_control(auto_focus_id, 0) {
// Then set focus to infinity (typically maximum value) // // Then set focus to infinity (typically maximum value)
let focus_id = v4l::control::id::FOCUS_ABSOLUTE; // let focus_id = v4l::control::id::FOCUS_ABSOLUTE;
// Get the range of the control // // Get the range of the control
if let Ok(control) = self.device.control(focus_id) { // if let Ok(control) = self.device.control(focus_id) {
let max_focus = control.maximum(); // let max_focus = control.maximum();
if let Ok(_) = self.device.set_control(focus_id, max_focus) { // if let Ok(_) = self.device.set_control(focus_id, max_focus) {
info!("Locked focus at infinity (value: {})", max_focus); // info!("Locked focus at infinity (value: {})", max_focus);
return Ok(()); // return Ok(());
} // }
} // }
warn!("Failed to set focus to infinity"); // warn!("Failed to set focus to infinity");
} else { // } else {
warn!("Camera does not support focus control"); // warn!("Camera does not support focus control");
} // }
Ok(()) // Ok(())
} // }
/// Start streaming from the camera // /// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> { // pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> {
let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4) // let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4)
.context("Failed to create capture stream")?; // .context("Failed to create capture stream")?;
self.is_streaming = true; // self.is_streaming = true;
info!("Started camera streaming"); // info!("Started camera streaming");
Ok(V4l2CaptureStream { // Ok(V4l2CaptureStream {
stream: queue, // stream: queue,
format: self.format.clone(), // format: self.format.clone(),
}) // })
} // }
/// Stop streaming from the camera // /// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> { // pub fn stop_streaming(&mut self) -> Result<()> {
// The streaming will be stopped when the CaptureStream is dropped // // The streaming will be stopped when the CaptureStream is dropped
self.is_streaming = false; // self.is_streaming = false;
info!("Stopped camera streaming"); // info!("Stopped camera streaming");
Ok(()) // Ok(())
} // }
/// Check if the camera is currently streaming // /// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool { // pub fn is_streaming(&self) -> bool {
self.is_streaming // self.is_streaming
} // }
/// Get current format width // /// Get current format width
pub fn width(&self) -> u32 { // pub fn width(&self) -> u32 {
self.format.width // self.format.width
} // }
/// Get current format height // /// Get current format height
pub fn height(&self) -> u32 { // pub fn height(&self) -> u32 {
self.format.height // self.format.height
} // }
/// Get current format pixel format // /// Get current format pixel format
pub fn pixel_format(&self) -> FourCC { // pub fn pixel_format(&self) -> FourCC {
self.format.fourcc // self.format.fourcc
} // }
} // }
/// Wrapper around V4L2 capture stream // /// Wrapper around V4L2 capture stream
pub struct V4l2CaptureStream { // pub struct V4l2CaptureStream {
stream: MmapStream, // stream: MmapStream,
format: Format, // format: Format,
} // }
impl V4l2CaptureStream { // impl V4l2CaptureStream {
/// Capture a single frame from the camera // /// Capture a single frame from the camera
pub fn capture_frame(&mut self) -> Result<core::Mat> { // pub fn capture_frame(&mut self) -> Result<core::Mat> {
let buffer = self.stream.next() // let buffer = self.stream.next()
.context("Failed to capture frame")?; // .context("Failed to capture frame")?;
let width = self.format.width as i32; // let width = self.format.width as i32;
let height = self.format.height as i32; // let height = self.format.height as i32;
// Convert the buffer to an OpenCV Mat based on the pixel format // // Convert the buffer to an OpenCV Mat based on the pixel format
let mat = match self.format.fourcc { // let mat = match self.format.fourcc {
// MJPEG format // // MJPEG format
f if f == FourCC::new(b"MJPG") => { // f if f == FourCC::new(b"MJPG") => {
// Decode JPEG data // // Decode JPEG data
let data = buffer.data(); // let data = buffer.data();
let vec_data = unsafe { // let vec_data = unsafe {
std::slice::from_raw_parts(data.as_ptr(), data.len()) // std::slice::from_raw_parts(data.as_ptr(), data.len())
}.to_vec(); // }.to_vec();
let buf = core::Vector::from_slice(&vec_data); // let buf = core::Vector::from_slice(&vec_data);
let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?; // let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?;
img // img
}, // },
// YUYV format // // YUYV format
f if f == FourCC::new(b"YUYV") => { // f if f == FourCC::new(b"YUYV") => {
let data = buffer.data(); // let data = buffer.data();
// Create a Mat from the YUYV data // // Create a Mat from the YUYV data
let mut yuyv = unsafe { // let mut yuyv = unsafe {
let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel // let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel
let step = width as usize * bytes_per_pixel; // let step = width as usize * bytes_per_pixel;
core::Mat::new_rows_cols_with_data( // core::Mat::new_rows_cols_with_data(
height, // height,
width, // width,
core::CV_8UC2, // core::CV_8UC2,
data.as_ptr() as *mut _, // data.as_ptr() as *mut _,
step, // step,
)? // )?
}; // };
// Convert YUYV to BGR // // Convert YUYV to BGR
let mut bgr = core::Mat::default()?; // let mut bgr = core::Mat::default()?;
imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?; // imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?;
bgr // bgr
}, // },
// Unsupported format // // Unsupported format
_ => { // _ => {
return Err(anyhow!( // return Err(anyhow!(
"Unsupported pixel format: {}", // "Unsupported pixel format: {}",
String::from_utf8_lossy(&self.format.fourcc.repr) // String::from_utf8_lossy(&self.format.fourcc.repr)
)); // ));
} // }
}; // };
Ok(mat) // Ok(mat)
} // }
} // }
impl Drop for V4l2CaptureStream { // impl Drop for V4l2CaptureStream {
fn drop(&mut self) { // fn drop(&mut self) {
debug!("V4L2 capture stream dropped"); // debug!("V4L2 capture stream dropped");
} // }
} // }