meteor_detect/src/camera/controller.rs
2025-04-05 01:04:52 +08:00

344 lines
11 KiB
Rust

use anyhow::{Context, Result};
use chrono::Utc;
use log::{debug, error, info, warn};
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use tokio::sync::broadcast;
use tokio::time;
use crate::camera::frame_buffer::{Frame, FrameBuffer, SharedFrameBuffer};
use crate::camera::opencv::{OpenCVCamera, OpenCVCaptureStream};
use crate::camera::{CameraSettings, ExposureMode, MeteorEvent, Resolution};
/// Camera controller manages camera operations and frame capture
pub struct CameraController {
/// Camera settings
settings: CameraSettings,
/// The OpenCV camera driver
camera: Option<OpenCVCamera>,
/// The OpenCV capture stream
stream: Option<OpenCVCaptureStream>,
/// Circular buffer for storing recent frames
frame_buffer: SharedFrameBuffer,
/// Frame counter
frame_count: u64,
/// Whether the camera is currently running
is_running: bool,
/// Channel for broadcasting new frames
frame_tx: broadcast::Sender<Frame>,
/// Path to save event videos
events_dir: PathBuf,
}
impl CameraController {
/// Create a new camera controller with the given configuration
pub async fn new(config: &crate::Config) -> Result<Self> {
// Extract camera settings from config (placeholder for now)
let settings = config.clone().camera;
// Create frame buffer with capacity for 10 minutes of video at settings.fps
let buffer_capacity = (10 * 60 * settings.fps) as usize;
let frame_buffer = Arc::new(FrameBuffer::new(buffer_capacity));
// Create broadcast channel for frames
let (frame_tx, _) = broadcast::channel(30);
// Create events directory if it doesn't exist
let events_dir = PathBuf::from("events");
std::fs::create_dir_all(&events_dir).context("Failed to create events directory")?;
Ok(Self {
settings,
camera: None,
stream: None,
frame_buffer,
frame_count: 0,
is_running: false,
frame_tx,
events_dir,
})
}
/// Initialize the camera with current settings
pub async fn initialize(&mut self) -> Result<()> {
// Open the camera
let mut camera =
OpenCVCamera::open(&self.settings.device).context("Failed to open camera")?;
// Configure camera parameters
camera
.set_format(self.settings.resolution)
.context("Failed to set camera format")?;
camera
.set_fps(self.settings.fps)
.context("Failed to set camera FPS")?;
camera
.set_exposure(self.settings.exposure)
.context("Failed to set camera exposure")?;
camera
.set_gain(self.settings.gain)
.context("Failed to set camera gain")?;
if self.settings.focus_locked {
camera
.lock_focus_at_infinity()
.context("Failed to lock focus at infinity")?;
}
self.camera = Some(camera);
info!("Camera initialized successfully");
Ok(())
}
/// Start camera capture in a background task
pub async fn start_capture(&mut self) -> Result<()> {
if self.is_running {
warn!("Camera capture is already running");
return Ok(());
}
let camera = self
.camera
.as_mut()
.ok_or_else(|| anyhow::anyhow!("Camera not initialized"))?;
// Start the camera streaming
let stream = camera
.start_streaming()
.context("Failed to start camera streaming")?;
self.stream = Some(stream);
self.is_running = true;
// Clone necessary values for the capture task
let frame_buffer = self.frame_buffer.clone();
let frame_tx = self.frame_tx.clone();
let fps = self.settings.fps;
let mut stream = self
.stream
.take()
.expect("Stream just initialized but is None");
let mut frame_count = self.frame_count;
// Start capture task
tokio::spawn(async move {
let frame_interval = Duration::from_secs_f64(1.0 / fps as f64);
let mut interval = time::interval(frame_interval);
info!("Starting camera capture at {} fps", fps);
loop {
interval.tick().await;
match stream.capture_frame() {
Ok(mat) => {
// Create a new frame with timestamp
let frame = Frame::new(mat, Utc::now(), frame_count);
frame_count += 1;
// Add to frame buffer
if let Err(e) = frame_buffer.push(frame.clone()) {
error!("Failed to add frame to buffer: {}", e);
}
// Broadcast frame to listeners
let _ = frame_tx.send(frame);
}
Err(e) => {
error!("Failed to capture frame: {}", e);
// Small delay to avoid tight error loop
time::sleep(Duration::from_millis(100)).await;
}
}
}
});
info!("Camera capture started");
Ok(())
}
/// Stop camera capture
pub async fn stop_capture(&mut self) -> Result<()> {
if !self.is_running {
warn!("Camera capture is not running");
return Ok(());
}
// The stream will be stopped when dropped
self.stream = None;
if let Some(camera) = &mut self.camera {
camera.stop_streaming()?;
}
self.is_running = false;
info!("Camera capture stopped");
Ok(())
}
/// Get a subscriber to receive new frames
pub fn subscribe_to_frames(&self) -> broadcast::Receiver<Frame> {
self.frame_tx.subscribe()
}
/// Get a clone of the frame buffer
pub fn get_frame_buffer(&self) -> SharedFrameBuffer {
self.frame_buffer.clone()
}
/// Check if the camera is currently running
pub fn is_running(&self) -> bool {
self.is_running
}
/// Update camera settings
pub async fn update_settings(&mut self, new_settings: CameraSettings) -> Result<()> {
// If camera is running, we need to stop it first
let was_running = self.is_running();
if was_running {
self.stop_capture().await?;
}
// Update settings
self.settings = new_settings;
// Re-initialize camera with new settings
if let Some(mut camera) = self.camera.take() {
// Configure camera parameters
camera.set_format(self.settings.resolution)?;
camera.set_fps(self.settings.fps)?;
camera.set_exposure(self.settings.exposure)?;
camera.set_gain(self.settings.gain)?;
if self.settings.focus_locked {
camera.lock_focus_at_infinity()?;
}
self.camera = Some(camera);
} else {
self.initialize().await?;
}
// Restart if it was running
if was_running {
self.start_capture().await?;
}
info!("Camera settings updated");
Ok(())
}
/// Save a meteor event with video
pub async fn save_meteor_event(
&self,
timestamp: chrono::DateTime<Utc>,
confidence: f32,
bounding_box: (u32, u32, u32, u32),
seconds_before: i64,
seconds_after: i64,
) -> Result<MeteorEvent> {
// Extract frames from the buffer
let frames =
self.frame_buffer
.extract_event_frames(timestamp, seconds_before, seconds_after);
if frames.is_empty() {
return Err(anyhow::anyhow!("No frames found for event"));
}
// Create a unique ID for the event
let event_id = uuid::Uuid::new_v4();
// Create a directory for the event
let event_dir = self.events_dir.join(event_id.to_string());
std::fs::create_dir_all(&event_dir).context("Failed to create event directory")?;
// Save frames to the event directory
for (i, frame) in frames.iter().enumerate() {
let frame_path = event_dir.join(format!("frame_{:04}.jpg", i));
frame.save_to_file(&frame_path)?;
}
// Create a video file name
let video_path = event_dir.join("event.mp4").to_string_lossy().to_string();
// TODO: Call FFmpeg to convert frames to video
// This would be done by spawning an external process
// Create and return the event
let event = MeteorEvent {
id: event_id,
timestamp,
confidence,
bounding_box,
video_path,
};
info!("Saved meteor event: {}", event_id);
Ok(event)
}
/// Get the current status of the camera
pub async fn get_status(&self) -> Result<serde_json::Value> {
let frame_buffer_stats = {
let buffer = self.frame_buffer.clone();
let length = buffer.len();
let capacity = buffer.capacity();
serde_json::json!({
"length": length,
"capacity": capacity,
"utilization_percent": if capacity > 0 { (length as f64 / capacity as f64) * 100.0 } else { 0.0 }
})
};
let recent_frame = {
// Get the most recent frame (index 0)
if let Some(frame) = self.frame_buffer.get(0) {
let timestamp = frame.timestamp.to_rfc3339();
serde_json::json!({
"timestamp": timestamp,
"frame_number": frame.index
})
} else {
serde_json::json!(null)
}
};
let camera_info = {
serde_json::json!({
"device": self.settings.device,
"resolution": format!("{:?}", self.settings.resolution),
"fps": self.settings.fps,
"exposure_mode": format!("{:?}", self.settings.exposure),
"gain": self.settings.gain,
"focus_locked": self.settings.focus_locked
})
};
let status = serde_json::json!({
"running": self.is_running,
"frame_count": self.frame_count,
"settings": {
"device": self.settings.device,
"resolution": format!("{:?}", self.settings.resolution),
"fps": self.settings.fps
},
"camera_info": camera_info,
"frame_buffer": frame_buffer_stats,
"recent_frame": recent_frame,
"events_dir": self.events_dir.to_string_lossy()
});
debug!("Camera status: {}", status);
Ok(status)
}
}