use opencv to capture video

This commit is contained in:
grabbit 2025-03-19 09:02:00 +08:00
parent c00f904657
commit 7bbbea6140
5 changed files with 493 additions and 244 deletions

View File

@ -8,7 +8,6 @@ description = "A Raspberry Pi based meteor detection system"
[dependencies]
# Hardware interfaces
rppal = "0.22.1" # Raspberry Pi hardware access
v4l = "0.14.0" # Video4Linux2 bindings
serialport = "4.2.0" # Serial port for GPS
embedded-hal = "0.2.7" # Hardware abstraction layer

View File

@ -8,17 +8,17 @@ use tokio::sync::broadcast;
use tokio::time;
use crate::camera::frame_buffer::{Frame, FrameBuffer, SharedFrameBuffer};
use crate::camera::v4l2::{V4l2Camera, V4l2CaptureStream};
use crate::camera::opencv::{OpenCVCamera, OpenCVCaptureStream};
use crate::camera::{CameraSettings, MeteorEvent, Resolution, ExposureMode};
/// Camera controller manages camera operations and frame capture
pub struct CameraController {
/// Camera settings
settings: CameraSettings,
/// The V4L2 camera driver
camera: Option<V4l2Camera>,
/// The V4L2 capture stream
stream: Option<V4l2CaptureStream>,
/// The OpenCV camera driver
camera: Option<OpenCVCamera>,
/// The OpenCV capture stream
stream: Option<OpenCVCaptureStream>,
/// Circular buffer for storing recent frames
frame_buffer: SharedFrameBuffer,
/// Frame counter
@ -64,7 +64,7 @@ impl CameraController {
/// Initialize the camera with current settings
pub async fn initialize(&mut self) -> Result<()> {
// Open the camera
let mut camera = V4l2Camera::open(&self.settings.device)
let mut camera = OpenCVCamera::open(&self.settings.device)
.context("Failed to open camera")?;
// Configure camera parameters

View File

@ -1,9 +1,11 @@
mod controller;
mod v4l2;
// mod v4l2;
mod opencv;
mod frame_buffer;
pub use controller::CameraController;
pub use frame_buffer::{Frame, FrameBuffer};
pub use opencv::OpenCVCamera;
use anyhow::Result;
use chrono::DateTime;

248
src/camera/opencv.rs Normal file
View File

@ -0,0 +1,248 @@
use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn};
use std::path::Path;
use opencv::{core, prelude::*, videoio};
use crate::camera::{ExposureMode, Resolution};
/// OpenCV camera driver
pub struct OpenCVCamera {
/// The VideoCapture instance
capture: videoio::VideoCapture,
/// Camera width
width: u32,
/// Camera height
height: u32,
/// Whether the camera is currently streaming
is_streaming: bool,
/// Device index or path
device: String,
}
impl OpenCVCamera {
/// Open a camera device by path or index
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let path_str = path.as_ref().to_str()
.ok_or_else(|| anyhow!("Invalid path"))?;
let is_device_path = path_str.starts_with("/dev/");
let mut capture = if is_device_path {
// For device files like /dev/video0, we need to extract the number
if let Some(num_str) = path_str.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?
} else {
return Err(anyhow!("Invalid device number in path: {}", path_str));
}
} else {
return Err(anyhow!("Unsupported device path format: {}", path_str));
}
} else {
// For other paths, try to open directly (e.g., video files, URLs)
videoio::VideoCapture::from_file(path_str, videoio::CAP_ANY)?
};
if !capture.is_opened()? {
return Err(anyhow!("Failed to open camera: {}", path_str));
}
// Get initial resolution
let width = capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let height = capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
info!(
"Opened camera: {} ({}x{})",
path_str, width, height
);
Ok(Self {
capture,
width,
height,
is_streaming: false,
device: path_str.to_string(),
})
}
/// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions();
// Set resolution
self.capture.set(videoio::CAP_PROP_FRAME_WIDTH, width as f64)?;
self.capture.set(videoio::CAP_PROP_FRAME_HEIGHT, height as f64)?;
// Read back actual resolution (might be different from requested)
let actual_width = self.capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let actual_height = self.capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
if actual_width != width || actual_height != height {
warn!(
"Requested resolution {}x{} but got {}x{}",
width, height, actual_width, actual_height
);
}
self.width = actual_width;
self.height = actual_height;
info!("Set camera format: {}×{}", self.width, self.height);
Ok(())
}
/// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
self.capture.set(videoio::CAP_PROP_FPS, fps as f64)?;
// Read back actual FPS
let actual_fps = self.capture.get(videoio::CAP_PROP_FPS)?;
if (actual_fps - fps as f64).abs() > 0.1 {
warn!("Requested {} fps but got {} fps", fps, actual_fps);
}
info!("Set camera frame rate: {} fps", actual_fps);
Ok(())
}
/// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
match mode {
ExposureMode::Auto => {
// Set auto exposure mode
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.75)?; // 0.75 is auto mode in OpenCV
info!("Set camera exposure: Auto");
},
ExposureMode::Manual(exposure_time) => {
// First disable auto exposure
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.25)?; // 0.25 is manual mode in OpenCV
// Then set exposure value - might need conversion based on camera
let exposure_value = exposure_time as f64 / 10000.0; // Convert microseconds to OpenCV units
self.capture.set(videoio::CAP_PROP_EXPOSURE, exposure_value)?;
info!("Set camera exposure: Manual ({})", exposure_time);
}
}
Ok(())
}
/// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
self.capture.set(videoio::CAP_PROP_GAIN, gain as f64)?;
let actual_gain = self.capture.get(videoio::CAP_PROP_GAIN)?;
info!("Set camera gain: {} (actual: {})", gain, actual_gain);
Ok(())
}
/// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// First, set focus mode to manual
if self.capture.set(videoio::CAP_PROP_AUTOFOCUS, 0.0).is_ok() {
// Then set focus to infinity (typically maximum value)
if self.capture.set(videoio::CAP_PROP_FOCUS, 1.0).is_ok() {
info!("Locked focus at infinity");
return Ok(());
}
}
warn!("Camera does not support focus control");
Ok(())
}
/// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<OpenCVCaptureStream> {
// Ensure capture is opened
if !self.capture.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
// Create a separate VideoCapture for the stream to avoid concurrent access issues
let device = self.device.clone();
let is_device_path = device.starts_with("/dev/");
let stream_capture = if is_device_path {
if let Some(num_str) = device.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
// Open with same settings
let mut cap = videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?;
cap.set(videoio::CAP_PROP_FRAME_WIDTH, self.width as f64)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, self.height as f64)?;
cap
} else {
return Err(anyhow!("Invalid device number in path: {}", device));
}
} else {
return Err(anyhow!("Unsupported device path format: {}", device));
}
} else {
// For other paths, try to open directly
videoio::VideoCapture::from_file(&device, videoio::CAP_ANY)?
};
if !stream_capture.is_opened()? {
return Err(anyhow!("Failed to open camera stream"));
}
self.is_streaming = true;
info!("Started camera streaming");
Ok(OpenCVCaptureStream {
capture: stream_capture,
})
}
/// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> {
self.is_streaming = false;
info!("Stopped camera streaming");
Ok(())
}
/// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool {
self.is_streaming
}
/// Get current format width
pub fn width(&self) -> u32 {
self.width
}
/// Get current format height
pub fn height(&self) -> u32 {
self.height
}
}
/// Wrapper around OpenCV VideoCapture for streaming
pub struct OpenCVCaptureStream {
capture: videoio::VideoCapture,
}
impl OpenCVCaptureStream {
/// Capture a single frame from the camera
pub fn capture_frame(&mut self) -> Result<core::Mat> {
let mut frame = core::Mat::default()?;
if self.capture.read(&mut frame)? {
if frame.empty() {
return Err(anyhow!("Captured frame is empty"));
}
Ok(frame)
} else {
Err(anyhow!("Failed to capture frame"))
}
}
}
impl Drop for OpenCVCaptureStream {
fn drop(&mut self) {
debug!("OpenCV capture stream dropped");
}
}

View File

@ -1,287 +1,287 @@
use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn};
use std::path::Path;
use v4l::buffer::Type;
use v4l::io::traits::CaptureStream;
use v4l::prelude::*;
use v4l::video::Capture;
use v4l::{Format, FourCC};
// use anyhow::{anyhow, Context, Result};
// use log::{debug, error, info, warn};
// use std::path::Path;
// use v4l::buffer::Type;
// use v4l::io::traits::CaptureStream;
// use v4l::prelude::*;
// use v4l::video::Capture;
// use v4l::{Format, FourCC};
use opencv::{core, imgproc, prelude::*};
// use opencv::{core, imgproc, prelude::*};
use crate::camera::{ExposureMode, Resolution};
// use crate::camera::{ExposureMode, Resolution};
/// V4L2 camera driver for star-light cameras
pub struct V4l2Camera {
/// The open device handle
device: Device,
/// The current camera format
format: Format,
/// Whether the camera is currently streaming
is_streaming: bool,
}
// /// V4L2 camera driver for star-light cameras
// pub struct V4l2Camera {
// /// The open device handle
// device: Device,
// /// The current camera format
// format: Format,
// /// Whether the camera is currently streaming
// is_streaming: bool,
// }
impl V4l2Camera {
/// Open a camera device by path
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let device = Device::with_path(path.as_ref())
.context("Failed to open camera device")?;
// impl V4l2Camera {
// /// Open a camera device by path
// pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
// let device = Device::with_path(path.as_ref())
// .context("Failed to open camera device")?;
info!(
"Opened camera: {} ({})",
device.info().card,
device.info().driver
);
// info!(
// "Opened camera: {} ({})",
// device.info().card,
// device.info().driver
// );
// Get the current format
let format = device
.format()
.context("Failed to get camera format")?;
// // Get the current format
// let format = device
// .format()
// .context("Failed to get camera format")?;
debug!("Initial camera format: {:?}", format);
// debug!("Initial camera format: {:?}", format);
Ok(Self {
device,
format,
is_streaming: false,
})
}
// Ok(Self {
// device,
// format,
// is_streaming: false,
// })
// }
/// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions();
// /// Set the camera resolution and pixel format
// pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
// let (width, height) = resolution.dimensions();
// Try to set format to MJPEG or YUYV first, then fall back to others
let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")];
// // Try to set format to MJPEG or YUYV first, then fall back to others
// let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")];
let mut success = false;
let mut last_error = None;
// let mut success = false;
// let mut last_error = None;
for &fourcc in &formats {
let mut format = Format::new(width, height, fourcc);
// for &fourcc in &formats {
// let mut format = Format::new(width, height, fourcc);
match self.device.set_format(&mut format) {
Ok(_) => {
self.format = format;
success = true;
break;
}
Err(e) => {
last_error = Some(e);
warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap());
}
}
}
// match self.device.set_format(&mut format) {
// Ok(_) => {
// self.format = format;
// success = true;
// break;
// }
// Err(e) => {
// last_error = Some(e);
// warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap());
// }
// }
// }
if !success {
return Err(anyhow!(
"Failed to set any supported format: {:?}",
last_error.unwrap()
));
}
// if !success {
// return Err(anyhow!(
// "Failed to set any supported format: {:?}",
// last_error.unwrap()
// ));
// }
info!(
"Set camera format: {}×{} {}",
self.format.width, self.format.height,
String::from_utf8_lossy(&self.format.fourcc.repr)
);
// info!(
// "Set camera format: {}×{} {}",
// self.format.width, self.format.height,
// String::from_utf8_lossy(&self.format.fourcc.repr)
// );
Ok(())
}
// Ok(())
// }
/// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
if let Some(params) = self.device.params() {
let mut params = params.context("Failed to get camera parameters")?;
params.set_frames_per_second(fps, 1);
// /// Set the camera frame rate
// pub fn set_fps(&mut self, fps: u32) -> Result<()> {
// if let Some(params) = self.device.params() {
// let mut params = params.context("Failed to get camera parameters")?;
// params.set_frames_per_second(fps, 1);
self.device
.set_params(&params)
.context("Failed to set frame rate")?;
// self.device
// .set_params(&params)
// .context("Failed to set frame rate")?;
info!("Set camera frame rate: {} fps", fps);
} else {
warn!("Camera does not support frame rate adjustment");
}
// info!("Set camera frame rate: {} fps", fps);
// } else {
// warn!("Camera does not support frame rate adjustment");
// }
Ok(())
}
// Ok(())
// }
/// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
// First, set auto/manual mode
let ctrl_id = v4l::control::id::EXPOSURE_AUTO;
let auto_value = match mode {
ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO
ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL
};
// /// Set camera exposure mode and value
// pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
// // First, set auto/manual mode
// let ctrl_id = v4l::control::id::EXPOSURE_AUTO;
// let auto_value = match mode {
// ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO
// ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL
// };
self.device
.set_control(ctrl_id, auto_value)
.context("Failed to set exposure mode")?;
// self.device
// .set_control(ctrl_id, auto_value)
// .context("Failed to set exposure mode")?;
// If manual, set the exposure value
if let ExposureMode::Manual(exposure_time) = mode {
// Exposure time in microseconds
let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE;
self.device
.set_control(ctrl_id, exposure_time as i64)
.context("Failed to set exposure time")?;
}
// // If manual, set the exposure value
// if let ExposureMode::Manual(exposure_time) = mode {
// // Exposure time in microseconds
// let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE;
// self.device
// .set_control(ctrl_id, exposure_time as i64)
// .context("Failed to set exposure time")?;
// }
info!("Set camera exposure: {:?}", mode);
Ok(())
}
// info!("Set camera exposure: {:?}", mode);
// Ok(())
// }
/// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
let ctrl_id = v4l::control::id::GAIN;
// /// Set camera gain (ISO)
// pub fn set_gain(&mut self, gain: u8) -> Result<()> {
// let ctrl_id = v4l::control::id::GAIN;
self.device
.set_control(ctrl_id, gain as i64)
.context("Failed to set gain")?;
// self.device
// .set_control(ctrl_id, gain as i64)
// .context("Failed to set gain")?;
info!("Set camera gain: {}", gain);
Ok(())
}
// info!("Set camera gain: {}", gain);
// Ok(())
// }
/// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// First, set focus mode to manual
let auto_focus_id = v4l::control::id::FOCUS_AUTO;
if let Ok(_) = self.device.set_control(auto_focus_id, 0) {
// Then set focus to infinity (typically maximum value)
let focus_id = v4l::control::id::FOCUS_ABSOLUTE;
// /// Lock focus at infinity (if supported)
// pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// // First, set focus mode to manual
// let auto_focus_id = v4l::control::id::FOCUS_AUTO;
// if let Ok(_) = self.device.set_control(auto_focus_id, 0) {
// // Then set focus to infinity (typically maximum value)
// let focus_id = v4l::control::id::FOCUS_ABSOLUTE;
// Get the range of the control
if let Ok(control) = self.device.control(focus_id) {
let max_focus = control.maximum();
// // Get the range of the control
// if let Ok(control) = self.device.control(focus_id) {
// let max_focus = control.maximum();
if let Ok(_) = self.device.set_control(focus_id, max_focus) {
info!("Locked focus at infinity (value: {})", max_focus);
return Ok(());
}
}
// if let Ok(_) = self.device.set_control(focus_id, max_focus) {
// info!("Locked focus at infinity (value: {})", max_focus);
// return Ok(());
// }
// }
warn!("Failed to set focus to infinity");
} else {
warn!("Camera does not support focus control");
}
// warn!("Failed to set focus to infinity");
// } else {
// warn!("Camera does not support focus control");
// }
Ok(())
}
// Ok(())
// }
/// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> {
let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4)
.context("Failed to create capture stream")?;
// /// Start streaming from the camera
// pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> {
// let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4)
// .context("Failed to create capture stream")?;
self.is_streaming = true;
info!("Started camera streaming");
// self.is_streaming = true;
// info!("Started camera streaming");
Ok(V4l2CaptureStream {
stream: queue,
format: self.format.clone(),
})
}
// Ok(V4l2CaptureStream {
// stream: queue,
// format: self.format.clone(),
// })
// }
/// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> {
// The streaming will be stopped when the CaptureStream is dropped
self.is_streaming = false;
info!("Stopped camera streaming");
Ok(())
}
// /// Stop streaming from the camera
// pub fn stop_streaming(&mut self) -> Result<()> {
// // The streaming will be stopped when the CaptureStream is dropped
// self.is_streaming = false;
// info!("Stopped camera streaming");
// Ok(())
// }
/// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool {
self.is_streaming
}
// /// Check if the camera is currently streaming
// pub fn is_streaming(&self) -> bool {
// self.is_streaming
// }
/// Get current format width
pub fn width(&self) -> u32 {
self.format.width
}
// /// Get current format width
// pub fn width(&self) -> u32 {
// self.format.width
// }
/// Get current format height
pub fn height(&self) -> u32 {
self.format.height
}
// /// Get current format height
// pub fn height(&self) -> u32 {
// self.format.height
// }
/// Get current format pixel format
pub fn pixel_format(&self) -> FourCC {
self.format.fourcc
}
}
// /// Get current format pixel format
// pub fn pixel_format(&self) -> FourCC {
// self.format.fourcc
// }
// }
/// Wrapper around V4L2 capture stream
pub struct V4l2CaptureStream {
stream: MmapStream,
format: Format,
}
// /// Wrapper around V4L2 capture stream
// pub struct V4l2CaptureStream {
// stream: MmapStream,
// format: Format,
// }
impl V4l2CaptureStream {
/// Capture a single frame from the camera
pub fn capture_frame(&mut self) -> Result<core::Mat> {
let buffer = self.stream.next()
.context("Failed to capture frame")?;
// impl V4l2CaptureStream {
// /// Capture a single frame from the camera
// pub fn capture_frame(&mut self) -> Result<core::Mat> {
// let buffer = self.stream.next()
// .context("Failed to capture frame")?;
let width = self.format.width as i32;
let height = self.format.height as i32;
// let width = self.format.width as i32;
// let height = self.format.height as i32;
// Convert the buffer to an OpenCV Mat based on the pixel format
let mat = match self.format.fourcc {
// MJPEG format
f if f == FourCC::new(b"MJPG") => {
// Decode JPEG data
let data = buffer.data();
let vec_data = unsafe {
std::slice::from_raw_parts(data.as_ptr(), data.len())
}.to_vec();
// // Convert the buffer to an OpenCV Mat based on the pixel format
// let mat = match self.format.fourcc {
// // MJPEG format
// f if f == FourCC::new(b"MJPG") => {
// // Decode JPEG data
// let data = buffer.data();
// let vec_data = unsafe {
// std::slice::from_raw_parts(data.as_ptr(), data.len())
// }.to_vec();
let buf = core::Vector::from_slice(&vec_data);
let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?;
img
},
// let buf = core::Vector::from_slice(&vec_data);
// let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?;
// img
// },
// YUYV format
f if f == FourCC::new(b"YUYV") => {
let data = buffer.data();
// // YUYV format
// f if f == FourCC::new(b"YUYV") => {
// let data = buffer.data();
// Create a Mat from the YUYV data
let mut yuyv = unsafe {
let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel
let step = width as usize * bytes_per_pixel;
core::Mat::new_rows_cols_with_data(
height,
width,
core::CV_8UC2,
data.as_ptr() as *mut _,
step,
)?
};
// // Create a Mat from the YUYV data
// let mut yuyv = unsafe {
// let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel
// let step = width as usize * bytes_per_pixel;
// core::Mat::new_rows_cols_with_data(
// height,
// width,
// core::CV_8UC2,
// data.as_ptr() as *mut _,
// step,
// )?
// };
// Convert YUYV to BGR
let mut bgr = core::Mat::default()?;
imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?;
bgr
},
// // Convert YUYV to BGR
// let mut bgr = core::Mat::default()?;
// imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?;
// bgr
// },
// Unsupported format
_ => {
return Err(anyhow!(
"Unsupported pixel format: {}",
String::from_utf8_lossy(&self.format.fourcc.repr)
));
}
};
// // Unsupported format
// _ => {
// return Err(anyhow!(
// "Unsupported pixel format: {}",
// String::from_utf8_lossy(&self.format.fourcc.repr)
// ));
// }
// };
Ok(mat)
}
}
// Ok(mat)
// }
// }
impl Drop for V4l2CaptureStream {
fn drop(&mut self) {
debug!("V4L2 capture stream dropped");
}
}
// impl Drop for V4l2CaptureStream {
// fn drop(&mut self) {
// debug!("V4L2 capture stream dropped");
// }
// }