- Add OpenCV as default camera backend, nokhwa as optional alternative - Make camera backends mutually exclusive via feature flags (opencv_camera, nokhwa_camera) - Remove deprecated hardware_camera feature, use nokhwa_camera instead - Add main thread camera initialization for macOS TCC authorization - Add pre-opened capture storage via static Mutex for async compatibility - Add pixel format conversion utilities (pixel_convert.rs) - Update all cfg guards from hardware_camera to nokhwa_camera macOS requires camera authorization requests on main thread. OpenCV's VideoCapture::new() is now called before tokio runtime starts, with the handle stored for later use by async code. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
364 lines
13 KiB
Rust
364 lines
13 KiB
Rust
//! OpenCV-based camera implementation
|
|
//!
|
|
//! Uses opencv::videoio::VideoCapture for camera capture.
|
|
//! Provides reliable resource management with explicit release() on shutdown.
|
|
//!
|
|
//! On macOS, camera authorization must happen on the main thread.
|
|
//! Use `open_capture_on_main_thread()` before starting tokio runtime,
|
|
//! then `set_pre_opened_capture()` to store it for later use.
|
|
|
|
use anyhow::{Context, Result};
|
|
use chrono::Utc;
|
|
use std::pin::Pin;
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
use opencv::core::Mat;
|
|
use opencv::prelude::*;
|
|
use opencv::videoio::{
|
|
VideoCapture, CAP_ANY, CAP_PROP_FRAME_WIDTH, CAP_PROP_FRAME_HEIGHT, CAP_PROP_FPS,
|
|
};
|
|
|
|
// Thread-safe storage for pre-opened capture (macOS main thread requirement)
|
|
static PRE_OPENED_CAPTURE: Mutex<Option<VideoCapture>> = Mutex::new(None);
|
|
|
|
/// Store a pre-opened capture for later use by OpenCVCamera::new()
|
|
/// Call this from main thread BEFORE starting tokio runtime
|
|
pub fn set_pre_opened_capture(capture: VideoCapture) {
|
|
*PRE_OPENED_CAPTURE.lock().unwrap() = Some(capture);
|
|
}
|
|
|
|
/// Take the pre-opened capture (can only be called once)
|
|
fn take_pre_opened_capture() -> Option<VideoCapture> {
|
|
PRE_OPENED_CAPTURE.lock().unwrap().take()
|
|
}
|
|
|
|
use super::interface::{
|
|
CameraConfig, CameraInterface, CameraMetadata, CameraType, CapturedFrame, FrameMetadata,
|
|
};
|
|
use crate::memory::frame_data::{FrameData, FrameFormat};
|
|
|
|
/// OpenCV-based camera implementation
|
|
pub struct OpenCVCamera {
|
|
capture: Option<VideoCapture>,
|
|
config: CameraConfig,
|
|
frame_count: u64,
|
|
is_running: bool,
|
|
actual_width: u32,
|
|
actual_height: u32,
|
|
actual_fps: f64,
|
|
}
|
|
|
|
// Safety: OpenCVCamera is only accessed from one thread at a time through &mut self
|
|
// The VideoCapture raw pointer is not shared across threads
|
|
unsafe impl Send for OpenCVCamera {}
|
|
unsafe impl Sync for OpenCVCamera {}
|
|
|
|
impl OpenCVCamera {
|
|
/// Create a new OpenCV camera instance
|
|
/// Will use pre-opened capture from main thread if available (macOS)
|
|
pub fn new(config: CameraConfig) -> Result<Self> {
|
|
// Check for pre-opened capture from main thread (macOS authorization)
|
|
if let Some(capture) = take_pre_opened_capture() {
|
|
println!("🎥 Using pre-opened camera from main thread (macOS)");
|
|
return Self::with_capture(capture, config);
|
|
}
|
|
|
|
println!("🎥 Creating OpenCV camera...");
|
|
Ok(Self {
|
|
capture: None,
|
|
config,
|
|
frame_count: 0,
|
|
is_running: false,
|
|
actual_width: 0,
|
|
actual_height: 0,
|
|
actual_fps: 0.0,
|
|
})
|
|
}
|
|
|
|
/// Open camera on main thread (required for macOS authorization)
|
|
/// Call this BEFORE starting tokio runtime
|
|
pub fn open_capture_on_main_thread(device_id: i32) -> Result<VideoCapture> {
|
|
println!("🎥 Opening camera on main thread (macOS authorization)...");
|
|
println!(" Device ID: {}", device_id);
|
|
|
|
let cam = VideoCapture::new(device_id, CAP_ANY)
|
|
.context("Failed to create VideoCapture")?;
|
|
|
|
if !cam.is_opened()? {
|
|
anyhow::bail!("Failed to open camera device {}", device_id);
|
|
}
|
|
|
|
println!("✅ Camera opened successfully on main thread");
|
|
Ok(cam)
|
|
}
|
|
|
|
/// Create camera with pre-opened capture handle
|
|
pub fn with_capture(capture: VideoCapture, config: CameraConfig) -> Result<Self> {
|
|
// Get actual resolution from the opened capture
|
|
let actual_width = capture.get(CAP_PROP_FRAME_WIDTH)? as u32;
|
|
let actual_height = capture.get(CAP_PROP_FRAME_HEIGHT)? as u32;
|
|
let actual_fps = capture.get(CAP_PROP_FPS)?;
|
|
|
|
println!("🎥 Creating OpenCV camera with pre-opened capture...");
|
|
println!(" Resolution: {}x{} @ {:.1} FPS", actual_width, actual_height, actual_fps);
|
|
|
|
Ok(Self {
|
|
capture: Some(capture),
|
|
config,
|
|
frame_count: 0,
|
|
is_running: false,
|
|
actual_width,
|
|
actual_height,
|
|
actual_fps,
|
|
})
|
|
}
|
|
|
|
/// Get device ID from config
|
|
fn get_device_id(&self) -> i32 {
|
|
match &self.config.camera_type {
|
|
CameraType::Production { device_id, .. } => {
|
|
device_id.parse().unwrap_or(0)
|
|
}
|
|
_ => 0,
|
|
}
|
|
}
|
|
|
|
/// Convert OpenCV Mat to FrameData
|
|
fn mat_to_frame_data(&self, mat: &Mat) -> Result<FrameData> {
|
|
let rows = mat.rows();
|
|
let cols = mat.cols();
|
|
let channels = mat.channels();
|
|
|
|
if rows <= 0 || cols <= 0 {
|
|
anyhow::bail!("Invalid frame dimensions: {}x{}", cols, rows);
|
|
}
|
|
|
|
let width = cols as u32;
|
|
let height = rows as u32;
|
|
|
|
// Get raw data from Mat
|
|
let data_ptr = mat.data();
|
|
let data_len = (rows * cols * channels) as usize;
|
|
|
|
if data_ptr.is_null() {
|
|
anyhow::bail!("Mat data pointer is null");
|
|
}
|
|
|
|
// Copy data from Mat (OpenCV uses BGR format by default)
|
|
let bgr_data = unsafe { std::slice::from_raw_parts(data_ptr, data_len) };
|
|
|
|
// Convert BGR to grayscale for meteor detection
|
|
let grayscale_data = if channels == 3 {
|
|
bgr_to_grayscale(bgr_data, width as usize, height as usize)
|
|
} else if channels == 1 {
|
|
bgr_data.to_vec()
|
|
} else {
|
|
// For other formats, just use first channel or convert
|
|
bgr_data.iter().step_by(channels as usize).copied().collect()
|
|
};
|
|
|
|
Ok(FrameData::new(
|
|
grayscale_data,
|
|
width,
|
|
height,
|
|
FrameFormat::Grayscale,
|
|
))
|
|
}
|
|
}
|
|
|
|
/// Convert BGR to grayscale using standard luminance formula
|
|
fn bgr_to_grayscale(bgr_data: &[u8], width: usize, height: usize) -> Vec<u8> {
|
|
let mut grayscale = Vec::with_capacity(width * height);
|
|
|
|
for pixel in bgr_data.chunks_exact(3) {
|
|
let b = pixel[0] as f32;
|
|
let g = pixel[1] as f32;
|
|
let r = pixel[2] as f32;
|
|
// ITU-R BT.601 luminance formula
|
|
let gray = (0.299 * r + 0.587 * g + 0.114 * b) as u8;
|
|
grayscale.push(gray);
|
|
}
|
|
|
|
grayscale
|
|
}
|
|
|
|
impl CameraInterface for OpenCVCamera {
|
|
fn initialize(
|
|
&mut self,
|
|
) -> Pin<Box<dyn std::future::Future<Output = Result<()>> + Send + '_>> {
|
|
Box::pin(async move {
|
|
// Check if capture was pre-opened on main thread (macOS authorization)
|
|
if self.capture.is_some() {
|
|
println!("🎥 OpenCV camera already opened (main thread initialization)");
|
|
println!(" Resolution: {}x{} @ {:.1} FPS",
|
|
self.actual_width, self.actual_height, self.actual_fps);
|
|
|
|
// Configure resolution and FPS
|
|
if let Some(cam) = self.capture.as_mut() {
|
|
let _ = cam.set(CAP_PROP_FRAME_WIDTH, self.config.resolution.0 as f64);
|
|
let _ = cam.set(CAP_PROP_FRAME_HEIGHT, self.config.resolution.1 as f64);
|
|
let _ = cam.set(CAP_PROP_FPS, self.config.fps);
|
|
|
|
// Read back actual values
|
|
self.actual_width = cam.get(CAP_PROP_FRAME_WIDTH)? as u32;
|
|
self.actual_height = cam.get(CAP_PROP_FRAME_HEIGHT)? as u32;
|
|
self.actual_fps = cam.get(CAP_PROP_FPS)?;
|
|
}
|
|
|
|
println!("✅ OpenCV camera ready");
|
|
println!(" Actual resolution: {}x{}", self.actual_width, self.actual_height);
|
|
println!(" Actual FPS: {:.1}", self.actual_fps);
|
|
|
|
self.is_running = true;
|
|
return Ok(());
|
|
}
|
|
|
|
// Fallback: open camera in async context (works on Linux/Windows)
|
|
let device_id = self.get_device_id();
|
|
println!("🎥 Initializing OpenCV camera...");
|
|
println!(" Device ID: {}", device_id);
|
|
println!(" Target resolution: {}x{}", self.config.resolution.0, self.config.resolution.1);
|
|
println!(" Target FPS: {}", self.config.fps);
|
|
|
|
// Create VideoCapture
|
|
// Note: VideoCapture::new is blocking, but we're in an async context
|
|
// On macOS, this may fail if not called from main thread
|
|
let mut cam = VideoCapture::new(device_id, CAP_ANY)
|
|
.context("Failed to create VideoCapture")?;
|
|
|
|
if !cam.is_opened()? {
|
|
anyhow::bail!("Failed to open camera device {}", device_id);
|
|
}
|
|
|
|
// Set resolution and FPS
|
|
let _ = cam.set(CAP_PROP_FRAME_WIDTH, self.config.resolution.0 as f64);
|
|
let _ = cam.set(CAP_PROP_FRAME_HEIGHT, self.config.resolution.1 as f64);
|
|
let _ = cam.set(CAP_PROP_FPS, self.config.fps);
|
|
|
|
// Read back actual values
|
|
self.actual_width = cam.get(CAP_PROP_FRAME_WIDTH)? as u32;
|
|
self.actual_height = cam.get(CAP_PROP_FRAME_HEIGHT)? as u32;
|
|
self.actual_fps = cam.get(CAP_PROP_FPS)?;
|
|
|
|
println!("✅ OpenCV camera initialized successfully");
|
|
println!(" Actual resolution: {}x{}", self.actual_width, self.actual_height);
|
|
println!(" Actual FPS: {:.1}", self.actual_fps);
|
|
|
|
self.capture = Some(cam);
|
|
self.is_running = true;
|
|
|
|
Ok(())
|
|
})
|
|
}
|
|
|
|
fn capture_frame(
|
|
&mut self,
|
|
) -> Pin<Box<dyn std::future::Future<Output = Result<CapturedFrame>> + Send + '_>> {
|
|
Box::pin(async move {
|
|
let cam = self.capture.as_mut()
|
|
.ok_or_else(|| anyhow::anyhow!("Camera not initialized"))?;
|
|
|
|
let mut mat = Mat::default();
|
|
|
|
// Read frame (blocking call)
|
|
let success = cam.read(&mut mat)?;
|
|
|
|
if !success || mat.empty() {
|
|
anyhow::bail!("Failed to capture frame - empty or read failed");
|
|
}
|
|
|
|
self.frame_count += 1;
|
|
|
|
// Convert Mat to FrameData
|
|
let frame_data = self.mat_to_frame_data(&mat)?;
|
|
|
|
// Log progress periodically
|
|
if self.frame_count == 1 || self.frame_count % 30 == 0 {
|
|
println!("📸 Frame {} captured ({}x{})",
|
|
self.frame_count, frame_data.width, frame_data.height);
|
|
}
|
|
|
|
Ok(CapturedFrame::new(
|
|
Arc::new(frame_data),
|
|
self.frame_count,
|
|
Utc::now(),
|
|
FrameMetadata::default(),
|
|
))
|
|
})
|
|
}
|
|
|
|
fn get_metadata(&self) -> CameraMetadata {
|
|
CameraMetadata {
|
|
camera_id: self.get_device_id().to_string(),
|
|
camera_type: "OpenCV".to_string(),
|
|
supported_formats: vec![FrameFormat::Grayscale, FrameFormat::RGB888],
|
|
max_resolution: (1920, 1080),
|
|
current_resolution: (self.actual_width, self.actual_height),
|
|
target_fps: self.actual_fps,
|
|
is_real_time: true,
|
|
total_frames: None,
|
|
}
|
|
}
|
|
|
|
fn is_running(&self) -> bool {
|
|
self.is_running
|
|
}
|
|
|
|
fn frame_count(&self) -> u64 {
|
|
self.frame_count
|
|
}
|
|
|
|
fn shutdown(
|
|
&mut self,
|
|
) -> Pin<Box<dyn std::future::Future<Output = Result<()>> + Send + '_>> {
|
|
Box::pin(async move {
|
|
println!("🛑 Shutting down OpenCV camera...");
|
|
|
|
if let Some(mut cam) = self.capture.take() {
|
|
// Explicitly release the camera resource
|
|
// This is the key advantage over nokhwa - reliable resource cleanup
|
|
cam.release().context("Failed to release VideoCapture")?;
|
|
println!("✅ OpenCV camera released successfully");
|
|
}
|
|
|
|
self.is_running = false;
|
|
Ok(())
|
|
})
|
|
}
|
|
}
|
|
|
|
impl Drop for OpenCVCamera {
|
|
fn drop(&mut self) {
|
|
// Ensure camera is released even if shutdown wasn't called
|
|
if let Some(mut cam) = self.capture.take() {
|
|
let _ = cam.release();
|
|
println!("🗑️ OpenCV camera dropped and released");
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_bgr_to_grayscale() {
|
|
// Test BGR to grayscale conversion
|
|
let bgr = vec![
|
|
0, 0, 255, // Pure red -> should be ~76
|
|
0, 255, 0, // Pure green -> should be ~150
|
|
255, 0, 0, // Pure blue -> should be ~29
|
|
255, 255, 255, // White -> should be 255
|
|
];
|
|
|
|
let gray = bgr_to_grayscale(&bgr, 4, 1);
|
|
|
|
assert_eq!(gray.len(), 4);
|
|
// Check approximate values (luminance formula)
|
|
assert!(gray[0] > 70 && gray[0] < 80, "Red should be ~76, got {}", gray[0]);
|
|
assert!(gray[1] > 145 && gray[1] < 155, "Green should be ~150, got {}", gray[1]);
|
|
assert!(gray[2] > 25 && gray[2] < 35, "Blue should be ~29, got {}", gray[2]);
|
|
assert_eq!(gray[3], 255, "White should be 255");
|
|
}
|
|
}
|