352 lines
12 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn};
use std::path::Path;
use std::sync::{Arc, Mutex};
use opencv::{core, prelude::*, videoio};
use crate::camera::{ExposureMode, Resolution};
/// OpenCV camera driver
pub struct OpenCVCamera {
/// The VideoCapture instance
capture: Arc<Mutex<videoio::VideoCapture>>,
/// Camera width
width: u32,
/// Camera height
height: u32,
/// Whether the camera is currently streaming
is_streaming: bool,
/// Device index or path
device: String,
}
impl OpenCVCamera {
/// Open a camera device by path or index
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let path_str = path.as_ref().to_str()
.ok_or_else(|| anyhow!("Invalid path"))?;
let mut capture = Self::create_capture_from_path(path_str)?;
if !capture.is_opened()? {
return Err(anyhow!("Failed to open camera: {}", path_str));
}
// Get initial resolution
let width = capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let height = capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
info!(
"Opened camera: {} ({}x{})",
path_str, width, height
);
Ok(Self {
capture: Arc::new(Mutex::new(capture)),
width,
height,
is_streaming: false,
device: path_str.to_string(),
})
}
/// Open a video file for input
pub fn open_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let path_str = path.as_ref().to_str()
.ok_or_else(|| anyhow!("Invalid path"))?;
let mut capture = videoio::VideoCapture::from_file(path_str, videoio::CAP_ANY)?;
if !capture.is_opened()? {
return Err(anyhow!("Failed to open video file: {}", path_str));
}
// Get video properties
let width = capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let height = capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
let fps = capture.get(videoio::CAP_PROP_FPS)? as u32;
let total_frames = capture.get(videoio::CAP_PROP_FRAME_COUNT)? as u32;
info!(
"Opened video file: {} ({}x{} @ {} fps, {} frames)",
path_str, width, height, fps, total_frames
);
Ok(Self {
capture: Arc::new(Mutex::new(capture)),
width,
height,
is_streaming: false,
device: path_str.to_string(),
})
}
/// Create a VideoCapture instance from a path or device index
fn create_capture_from_path(path_str: &str) -> Result<videoio::VideoCapture> {
// Try to parse as integer index first
if let Ok(device_index) = path_str.parse::<i32>() {
return Ok(videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?);
}
// Handle platform-specific device paths
#[cfg(target_os = "linux")]
{
// For Linux device files like /dev/video0
if let Some(num_str) = path_str.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
// 在Linux下使用V4L2直接访问摄像头
info!("Opening camera on Linux using V4L2: {}", path_str);
return Ok(videoio::VideoCapture::new(device_index, videoio::CAP_V4L2)?);
} else {
return Err(anyhow!("Invalid device number in path: {}", path_str));
}
}
}
#[cfg(target_os = "macos")]
{
// macOS doesn't use /dev/video* paths, but it might have a special format
// If we get a path with "camera" or "facetime" or other macOS camera identifiers
if path_str.contains("camera") || path_str.contains("facetime") || path_str.contains("avfoundation") {
// For macOS, try to extract any numbers in the path
let nums: Vec<&str> = path_str.split(|c: char| !c.is_digit(10))
.filter(|s| !s.is_empty())
.collect();
if let Some(num_str) = nums.first() {
if let Ok(device_index) = num_str.parse::<i32>() {
return Ok(videoio::VideoCapture::new(device_index, videoio::CAP_AVFOUNDATION)?);
}
}
// If we can't extract a number, try device 0 with AVFoundation
return Ok(videoio::VideoCapture::new(0, videoio::CAP_AVFOUNDATION)?);
}
}
// For URLs, video files, or any other path type
Ok(videoio::VideoCapture::from_file(path_str, videoio::CAP_ANY)?)
}
/// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions();
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
// Set resolution
capture_guard.set(videoio::CAP_PROP_FRAME_WIDTH, width as f64)?;
capture_guard.set(videoio::CAP_PROP_FRAME_HEIGHT, height as f64)?;
// Read back actual resolution (might be different from requested)
let actual_width = capture_guard.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let actual_height = capture_guard.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
if actual_width != width || actual_height != height {
warn!(
"Requested resolution {}x{} but got {}x{}",
width, height, actual_width, actual_height
);
}
self.width = actual_width;
self.height = actual_height;
info!("Set camera format: {}×{}", self.width, self.height);
Ok(())
}
/// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
capture_guard.set(videoio::CAP_PROP_FPS, fps as f64)?;
// Read back actual FPS
let actual_fps = capture_guard.get(videoio::CAP_PROP_FPS)?;
if (actual_fps - fps as f64).abs() > 0.1 {
warn!("Requested {} fps but got {} fps", fps, actual_fps);
}
info!("Set camera frame rate: {} fps", actual_fps);
Ok(())
}
/// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
match mode {
ExposureMode::Auto => {
// Set auto exposure mode
capture_guard.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.75)?; // 0.75 is auto mode in OpenCV
info!("Set camera exposure: Auto");
},
ExposureMode::Manual(exposure_time) => {
// First disable auto exposure
capture_guard.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.25)?; // 0.25 is manual mode in OpenCV
// Then set exposure value - might need conversion based on camera
let exposure_value = exposure_time as f64 / 10000.0; // Convert microseconds to OpenCV units
capture_guard.set(videoio::CAP_PROP_EXPOSURE, exposure_value)?;
info!("Set camera exposure: Manual ({})", exposure_time);
}
}
Ok(())
}
/// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
capture_guard.set(videoio::CAP_PROP_GAIN, gain as f64)?;
let actual_gain = capture_guard.get(videoio::CAP_PROP_GAIN)?;
info!("Set camera gain: {} (actual: {})", gain, actual_gain);
Ok(())
}
/// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
// First, set focus mode to manual
if capture_guard.set(videoio::CAP_PROP_AUTOFOCUS, 0.0).is_ok() {
// Then set focus to infinity (typically maximum value)
if capture_guard.set(videoio::CAP_PROP_FOCUS, 1.0).is_ok() {
info!("Locked focus at infinity");
return Ok(());
}
}
warn!("Camera does not support focus control");
Ok(())
}
/// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<OpenCVCaptureStream> {
// 获取互斥锁守卫检查相机是否打开
{
let capture_guard = self.capture.lock().unwrap();
if !capture_guard.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
}
self.is_streaming = true;
info!("Started camera streaming");
// 使用同一个相机实例来避免重复打开设备
Ok(OpenCVCaptureStream::new(self.capture.clone(), false))
}
/// Start streaming from the camera with loop option for video files
pub fn start_streaming_with_loop(&mut self, loop_video: bool) -> Result<OpenCVCaptureStream> {
// 获取互斥锁守卫检查相机是否打开
{
let capture_guard = self.capture.lock().unwrap();
if !capture_guard.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
}
self.is_streaming = true;
if loop_video {
info!("Started streaming with loop enabled");
} else {
info!("Started streaming");
}
// 使用同一个相机实例来避免重复打开设备
Ok(OpenCVCaptureStream::new(self.capture.clone(), loop_video))
}
/// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> {
self.is_streaming = false;
info!("Stopped camera streaming");
Ok(())
}
/// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool {
self.is_streaming
}
/// Get current format width
pub fn width(&self) -> u32 {
self.width
}
/// Get current format height
pub fn height(&self) -> u32 {
self.height
}
}
/// Wrapper around OpenCV VideoCapture for streaming
pub struct OpenCVCaptureStream {
capture: Arc<Mutex<videoio::VideoCapture>>,
loop_video: bool,
}
impl OpenCVCaptureStream {
/// Create a new capture stream
pub fn new(capture: Arc<Mutex<videoio::VideoCapture>>, loop_video: bool) -> Self {
Self {
capture,
loop_video,
}
}
/// Capture a single frame from the camera or video file
pub fn capture_frame(&mut self) -> Result<core::Mat> {
let mut frame = core::Mat::default();
// 获取互斥锁守卫
let mut capture_guard = self.capture.lock().unwrap();
if capture_guard.read(&mut frame)? {
if frame.empty() {
// 如果视频结束并且需要循环播放
if self.loop_video {
debug!("Video file ended, looping back to start");
// 重置到视频开始位置
capture_guard.set(videoio::CAP_PROP_POS_FRAMES, 0.0)?;
// 再次读取帧
if capture_guard.read(&mut frame)? {
if frame.empty() {
return Err(anyhow!("Frame is still empty after looping"));
}
} else {
return Err(anyhow!("Failed to read frame after looping"));
}
} else {
return Err(anyhow!("End of video reached"));
}
}
Ok(frame)
} else {
Err(anyhow!("Failed to capture frame"))
}
}
}
impl Drop for OpenCVCaptureStream {
fn drop(&mut self) {
debug!("OpenCV capture stream dropped");
}
}