501 lines
19 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

use anyhow::{anyhow, Context, Result};
use log::{debug, error, info, warn};
use std::path::Path;
use opencv::{core, prelude::*, videoio};
use crate::camera::{ExposureMode, Resolution};
/// OpenCV camera driver
pub struct OpenCVCamera {
/// The VideoCapture instance
capture: videoio::VideoCapture,
/// Camera width
width: u32,
/// Camera height
height: u32,
/// Whether the camera is currently streaming
is_streaming: bool,
/// Device index or path
device: String,
}
impl OpenCVCamera {
/// Open a camera device by path or index
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
let path_str = path.as_ref().to_str()
.ok_or_else(|| anyhow!("Invalid path"))?;
let mut capture = Self::create_capture_from_path(path_str)?;
if !capture.is_opened()? {
return Err(anyhow!("Failed to open camera: {}", path_str));
}
// Get initial resolution
let width = capture.get(videoio::CAP_PROP_FRAME_WIDTH)? as u32;
let height = capture.get(videoio::CAP_PROP_FRAME_HEIGHT)? as u32;
info!(
"Opened camera: {} ({}x{})",
path_str, width, height
);
Ok(Self {
capture,
width,
height,
is_streaming: false,
device: path_str.to_string(),
})
}
/// Create a VideoCapture instance from a path or device index
fn create_capture_from_path(path_str: &str) -> Result<videoio::VideoCapture> {
info!("Attempting to open camera with path/pipeline: {}", path_str);
// 检测是否为树莓派,以提供更多调试信息
let is_raspberry_pi = std::fs::read_to_string("/proc/cpuinfo")
.map(|content| content.contains("Raspberry Pi") || content.contains("BCM"))
.unwrap_or(false);
if is_raspberry_pi {
info!("Raspberry Pi detected, will try multiple approaches to open camera");
// 尝试列出可用的视频设备
match std::process::Command::new("v4l2-ctl").arg("--list-devices").output() {
Ok(output) => {
let devices = String::from_utf8_lossy(&output.stdout);
info!("Available video devices:\n{}", devices);
},
Err(e) => warn!("Could not list video devices: {}", e)
}
}
// 检测是否为GStreamer pipeline (通常包含感叹号)
if path_str.contains("!") {
info!("Detected GStreamer pipeline, using from_file with GSTREAMER backend");
// 如果是GStreamer pipeline尝试几种不同的方法
// 先尝试使用一个非常基本的pipeline
let simple_pipeline = "videotestsrc ! videoconvert ! appsink";
info!("First trying a very basic test pipeline to check GStreamer: {}", simple_pipeline);
match videoio::VideoCapture::from_file(simple_pipeline, videoio::CAP_GSTREAMER) {
Ok(cap) => {
if cap.is_opened().unwrap_or(false) {
info!("Basic GStreamer test pipeline works! Now trying the real pipeline");
} else {
warn!("Basic GStreamer test pipeline opened but not ready - GStreamer may not be working properly");
}
},
Err(e) => warn!("Basic GStreamer test failed: {} - GStreamer may not be available", e)
}
// 尝试使用原始的pipeline
match videoio::VideoCapture::from_file(path_str, videoio::CAP_GSTREAMER) {
Ok(cap) => {
if cap.is_opened().unwrap_or(false) {
info!("Successfully opened GStreamer pipeline");
return Ok(cap);
} else {
warn!("GStreamer pipeline opened but not ready");
}
},
Err(e) => {
warn!("Failed to open with GStreamer: {}. Will try alternative methods.", e);
}
}
// 如果在树莓派上尝试不同格式的pipeline
if is_raspberry_pi {
let alternate_pipelines = [
"v4l2src ! video/x-raw,format=YUY2 ! videoconvert ! appsink",
"v4l2src ! video/x-raw,format=BGR ! videoconvert ! appsink",
"v4l2src device=/dev/video0 ! videoconvert ! appsink",
"v4l2src device=/dev/video0 ! video/x-raw,format=YUY2 ! videoconvert ! appsink"
];
for pipeline in alternate_pipelines.iter() {
info!("Trying alternate pipeline: {}", pipeline);
match videoio::VideoCapture::from_file(pipeline, videoio::CAP_GSTREAMER) {
Ok(cap) => {
if cap.is_opened().unwrap_or(false) {
info!("Success with alternate pipeline: {}", pipeline);
return Ok(cap);
}
},
Err(e) => warn!("Failed with alternate pipeline: {}", e)
}
}
}
}
// Try to parse as integer index first
if let Ok(device_index) = path_str.parse::<i32>() {
info!("Opening camera by index: {}", device_index);
// 在树莓派上先尝试v4l2直接打开以获取更多诊断信息
if is_raspberry_pi {
// 检查设备是否实际存在
if std::path::Path::new(&format!("/dev/video{}", device_index)).exists() {
info!("Device /dev/video{} exists", device_index);
} else {
warn!("Device /dev/video{} does not exist!", device_index);
}
// 尝试v4l2-ctl获取设备信息
match std::process::Command::new("v4l2-ctl")
.args(&["-d", &format!("/dev/video{}", device_index), "-D"])
.output()
{
Ok(output) => {
let info = String::from_utf8_lossy(&output.stdout);
info!("Device info for /dev/video{}:\n{}", device_index, info);
},
Err(e) => warn!("Could not get device info: {}", e)
}
}
let cap = videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?;
if cap.is_opened()? {
return Ok(cap);
} else {
warn!("Device index {} opened but not ready", device_index);
}
}
// Handle platform-specific device paths
#[cfg(target_os = "linux")]
{
// For Linux device files like /dev/video0
if let Some(num_str) = path_str.strip_prefix("/dev/video") {
if let Ok(device_index) = num_str.parse::<i32>() {
info!("Opening Linux camera from /dev/video{}", device_index);
// 首先尝试V4L2后端这在树莓派上通常最可靠
match videoio::VideoCapture::new(device_index, videoio::CAP_V4L2) {
Ok(cap) => {
if cap.is_opened()? {
info!("Successfully opened camera with V4L2 backend");
return Ok(cap);
}
},
Err(e) => warn!("Failed to open with V4L2: {}", e)
}
// 回退到通用后端
info!("Falling back to default backend");
return Ok(videoio::VideoCapture::new(device_index, videoio::CAP_ANY)?);
} else {
return Err(anyhow!("Invalid device number in path: {}", path_str));
}
}
}
#[cfg(target_os = "macos")]
{
// macOS doesn't use /dev/video* paths, but it might have a special format
if path_str.contains("camera") || path_str.contains("facetime") || path_str.contains("avfoundation") {
// For macOS, try to extract any numbers in the path
let nums: Vec<&str> = path_str.split(|c: char| !c.is_digit(10))
.filter(|s| !s.is_empty())
.collect();
if let Some(num_str) = nums.first() {
if let Ok(device_index) = num_str.parse::<i32>() {
info!("Opening macOS camera with AVFoundation: {}", device_index);
return Ok(videoio::VideoCapture::new(device_index, videoio::CAP_AVFOUNDATION)?);
}
}
// If we can't extract a number, try device 0 with AVFoundation
info!("Falling back to default macOS camera (index 0)");
return Ok(videoio::VideoCapture::new(0, videoio::CAP_AVFOUNDATION)?);
}
}
// 最后尝试使用通用文件打开方法
info!("Using generic file opening method for: {}", path_str);
let cap = videoio::VideoCapture::from_file(path_str, videoio::CAP_ANY)?;
if !cap.is_opened()? {
warn!("Camera opened but not ready. This might indicate a configuration issue.");
}
Ok(cap)
}
/// Set the camera resolution and pixel format
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
let (width, height) = resolution.dimensions();
// 检查是否为GStreamer pipeline
let is_gstreamer_pipeline = self.device.contains("!");
if !is_gstreamer_pipeline {
// 对于非GStreamer正常设置分辨率
info!("Setting resolution to {}x{}", width, height);
self.capture.set(videoio::CAP_PROP_FRAME_WIDTH, width as f64)?;
self.capture.set(videoio::CAP_PROP_FRAME_HEIGHT, height as f64)?;
} else {
// 对于GStreamer pipeline不需要设置分辨率因为已经在pipeline中指定了
info!("Using resolution from GStreamer pipeline (cannot be changed at runtime)");
}
// 尝试读取实际分辨率
let actual_width = match self.capture.get(videoio::CAP_PROP_FRAME_WIDTH) {
Ok(w) if w > 0.0 => w as u32,
_ => {
warn!("Could not read actual width, using requested width");
width
}
};
let actual_height = match self.capture.get(videoio::CAP_PROP_FRAME_HEIGHT) {
Ok(h) if h > 0.0 => h as u32,
_ => {
warn!("Could not read actual height, using requested height");
height
}
};
if actual_width != width || actual_height != height {
warn!(
"Requested resolution {}x{} but got {}x{}",
width, height, actual_width, actual_height
);
}
self.width = actual_width;
self.height = actual_height;
info!("Camera format: {}×{}", self.width, self.height);
Ok(())
}
/// Set the camera frame rate
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
// 检查是否为GStreamer pipeline
let is_gstreamer_pipeline = self.device.contains("!");
if !is_gstreamer_pipeline {
// 对于非GStreamer正常设置FPS
info!("Setting FPS to {}", fps);
if let Err(e) = self.capture.set(videoio::CAP_PROP_FPS, fps as f64) {
warn!("Could not set FPS: {}. This might be expected for some cameras.", e);
}
} else {
// 对于GStreamer pipeline不需要设置FPS因为已经在pipeline中指定了
info!("Using FPS from GStreamer pipeline (cannot be changed at runtime)");
}
// 尝试读取实际FPS
let actual_fps = match self.capture.get(videoio::CAP_PROP_FPS) {
Ok(f) if f > 0.0 => f,
_ => {
warn!("Could not read actual FPS, using requested FPS");
fps as f64
}
};
if (actual_fps - fps as f64).abs() > 0.1 {
warn!("Requested {} fps but got {} fps", fps, actual_fps);
}
info!("Camera frame rate: {} fps", actual_fps);
Ok(())
}
/// Set camera exposure mode and value
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
match mode {
ExposureMode::Auto => {
// Set auto exposure mode
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.75)?; // 0.75 is auto mode in OpenCV
info!("Set camera exposure: Auto");
},
ExposureMode::Manual(exposure_time) => {
// First disable auto exposure
self.capture.set(videoio::CAP_PROP_AUTO_EXPOSURE, 0.25)?; // 0.25 is manual mode in OpenCV
// Then set exposure value - might need conversion based on camera
let exposure_value = exposure_time as f64 / 10000.0; // Convert microseconds to OpenCV units
self.capture.set(videoio::CAP_PROP_EXPOSURE, exposure_value)?;
info!("Set camera exposure: Manual ({})", exposure_time);
}
}
Ok(())
}
/// Set camera gain (ISO)
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
self.capture.set(videoio::CAP_PROP_GAIN, gain as f64)?;
let actual_gain = self.capture.get(videoio::CAP_PROP_GAIN)?;
info!("Set camera gain: {} (actual: {})", gain, actual_gain);
Ok(())
}
/// Lock focus at infinity (if supported)
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// First, set focus mode to manual
if self.capture.set(videoio::CAP_PROP_AUTOFOCUS, 0.0).is_ok() {
// Then set focus to infinity (typically maximum value)
if self.capture.set(videoio::CAP_PROP_FOCUS, 1.0).is_ok() {
info!("Locked focus at infinity");
return Ok(());
}
}
warn!("Camera does not support focus control");
Ok(())
}
/// Start streaming from the camera
pub fn start_streaming(&mut self) -> Result<OpenCVCaptureStream> {
// Ensure capture is opened
if !self.capture.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
info!("Creating stream from device: {}", self.device);
// 检查是否为GStreamer pipeline
let is_gstreamer_pipeline = self.device.contains("!");
if is_gstreamer_pipeline {
info!("Using GStreamer pipeline streaming approach");
}
// Create a separate VideoCapture for the stream to avoid concurrent access issues
let device = self.device.clone();
let stream_capture_result = Self::create_capture_from_path(&device);
let mut stream_capture = match stream_capture_result {
Ok(cap) => cap,
Err(e) => {
error!("Failed to create stream capture: {}", e);
if is_gstreamer_pipeline {
error!("GStreamer pipeline error. This could indicate:");
error!("1. GStreamer is not properly installed");
error!("2. Pipeline syntax is incorrect");
error!("3. The specified video device doesn't exist");
error!("4. OpenCV was not compiled with GStreamer support");
}
return Err(anyhow!("Failed to create stream capture: {}", e));
}
};
// Apply the same settings if not using a GStreamer pipeline
// (GStreamer pipeline already has configuration in the pipeline string)
if !is_gstreamer_pipeline {
// 尝试设置分辨率,但不抛出失败异常
if let Err(e) = stream_capture.set(videoio::CAP_PROP_FRAME_WIDTH, self.width as f64) {
warn!("Failed to set stream width (non-critical): {}", e);
}
if let Err(e) = stream_capture.set(videoio::CAP_PROP_FRAME_HEIGHT, self.height as f64) {
warn!("Failed to set stream height (non-critical): {}", e);
}
}
// 确保流被正确打开
match stream_capture.is_opened() {
Ok(is_open) => {
if !is_open {
return Err(anyhow!("Failed to open camera stream - camera reports not opened"));
}
},
Err(e) => {
return Err(anyhow!("Failed to check if camera stream is open: {}", e));
}
}
self.is_streaming = true;
info!("Started camera streaming successfully");
Ok(OpenCVCaptureStream {
capture: stream_capture,
})
}
/// Stop streaming from the camera
pub fn stop_streaming(&mut self) -> Result<()> {
self.is_streaming = false;
info!("Stopped camera streaming");
Ok(())
}
/// 捕获一个测试帧(不创建单独的流)
pub fn capture_test_frame(&mut self) -> Result<core::Mat> {
info!("Capturing test frame from main camera instance");
if !self.capture.is_opened()? {
return Err(anyhow!("Camera is not open"));
}
let mut frame = core::Mat::default();
// 尝试捕获帧
if self.capture.read(&mut frame)? {
if frame.empty() {
return Err(anyhow!("Captured frame is empty"));
}
info!("Successfully captured test frame, size: {}x{}", frame.cols(), frame.rows());
Ok(frame)
} else {
Err(anyhow!("Failed to capture test frame"))
}
}
/// Check if the camera is currently streaming
pub fn is_streaming(&self) -> bool {
self.is_streaming
}
/// Get current format width
pub fn width(&self) -> u32 {
self.width
}
/// Get current format height
pub fn height(&self) -> u32 {
self.height
}
}
/// Wrapper around OpenCV VideoCapture for streaming
pub struct OpenCVCaptureStream {
capture: videoio::VideoCapture,
}
impl OpenCVCaptureStream {
/// Capture a single frame from the camera
pub fn capture_frame(&mut self) -> Result<core::Mat> {
let mut frame = core::Mat::default();
if self.capture.read(&mut frame)? {
if frame.empty() {
return Err(anyhow!("Captured frame is empty"));
}
Ok(frame)
} else {
Err(anyhow!("Failed to capture frame"))
}
}
}
impl Drop for OpenCVCaptureStream {
fn drop(&mut self) {
debug!("OpenCV capture stream dropped");
}
}