fix compile error

This commit is contained in:
grabbit 2025-03-21 23:51:14 +08:00
parent c3bc0fe4eb
commit d2ca715eeb
8 changed files with 451 additions and 402 deletions

View File

@ -22,6 +22,8 @@ async-trait = "0.1.68" # Async traits
futures = "0.3.28" # Future utilities
# Data handling
dirs = "6.0.0"
toml = "0.8.20"
serde = { version = "1.0.160", features = ["derive"] } # Serialization
serde_json = "1.0.96" # JSON support
chrono = { version = "0.4.24", features = ["serde"] } # Date and time

View File

@ -1,405 +1,407 @@
use anyhow::{Context, Result};
use chrono::Utc;
use clap::Parser;
use log::{debug, error, info, warn};
use opencv::{core, highgui, imgcodecs, imgproc, prelude::*, videoio};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::{Duration, Instant};
use tokio::runtime::Runtime;
// use anyhow::{Context, Result};
// use chrono::Utc;
// use clap::Parser;
// use log::{debug, error, info, warn};
// use opencv::{core, highgui, imgcodecs, imgproc, prelude::*, videoio};
// use std::path::{Path, PathBuf};
// use std::sync::{Arc, Mutex};
// use std::time::{Duration, Instant};
// use tokio::runtime::Runtime;
use meteor_detect::camera::frame_buffer::Frame;
use meteor_detect::detection::{
AggregationStrategy, BrightnessDetector, BrightnessDetectorParams,
CamsDetector, CamsDetectorParams, DetectionResult, DetectorConfig,
DetectorFactory, MeteorDetector, PipelineConfig
};
// use meteor_detect::camera::frame_buffer::Frame;
// use meteor_detect::detection::{
// AggregationStrategy, BrightnessDetector, BrightnessDetectorParams,
// CamsDetector, CamsDetectorParams, DetectionResult, DetectorConfig,
// DetectorFactory, MeteorDetector, PipelineConfig
// };
/// Command line arguments for detector demo
#[derive(Parser, Debug)]
#[clap(author, version, about = "Meteor detection demo")]
struct Args {
/// Input video file path or camera device number
#[clap(short, long)]
input: String,
// /// Command line arguments for detector demo
// #[derive(Parser, Debug)]
// #[clap(author, version, about = "Meteor detection demo")]
// struct Args {
// /// Input video file path or camera device number
// #[clap(short, long)]
// input: String,
/// Output directory for feature images
#[clap(short, long, default_value = "output")]
output_dir: PathBuf,
// /// Output directory for feature images
// #[clap(short, long, default_value = "output")]
// output_dir: PathBuf,
/// Brightness threshold for meteor detection
#[clap(long, default_value = "30")]
brightness_threshold: u8,
// /// Brightness threshold for meteor detection
// #[clap(long, default_value = "30")]
// brightness_threshold: u8,
/// Std-to-avg ratio threshold for meteor detection
#[clap(long, default_value = "1.5")]
std_ratio_threshold: f32,
// /// Std-to-avg ratio threshold for meteor detection
// #[clap(long, default_value = "1.5")]
// std_ratio_threshold: f32,
/// Minimum pixel count to trigger detection
#[clap(long, default_value = "10")]
min_pixel_count: u32,
// /// Minimum pixel count to trigger detection
// #[clap(long, default_value = "10")]
// min_pixel_count: u32,
/// Minimum trajectory length
#[clap(long, default_value = "5")]
min_trajectory_length: u32,
// /// Minimum trajectory length
// #[clap(long, default_value = "5")]
// min_trajectory_length: u32,
/// Save all feature images, not just detections
#[clap(long)]
save_all: bool,
// /// Save all feature images, not just detections
// #[clap(long)]
// save_all: bool,
/// Display frames in real-time
#[clap(short, long)]
display: bool,
// /// Display frames in real-time
// #[clap(short, long)]
// display: bool,
/// Batch size (number of frames to process)
#[clap(short, long, default_value = "256")]
batch_size: usize,
// /// Batch size (number of frames to process)
// #[clap(short, long, default_value = "256")]
// batch_size: usize,
/// Detector type (cams, brightness, both)
#[clap(short, long, default_value = "cams")]
detector: String,
// /// Detector type (cams, brightness, both)
// #[clap(short, long, default_value = "cams")]
// detector: String,
/// Minimum brightness delta for brightness detector
#[clap(long, default_value = "30.0")]
min_brightness_delta: f32,
// /// Minimum brightness delta for brightness detector
// #[clap(long, default_value = "30.0")]
// min_brightness_delta: f32,
/// Minimum pixel change for brightness detector
#[clap(long, default_value = "10")]
min_pixel_change: u32,
// /// Minimum pixel change for brightness detector
// #[clap(long, default_value = "10")]
// min_pixel_change: u32,
/// Minimum consecutive frames for brightness detector
#[clap(long, default_value = "3")]
min_frames: u32,
// /// Minimum consecutive frames for brightness detector
// #[clap(long, default_value = "3")]
// min_frames: u32,
/// Sensitivity for brightness detector
#[clap(long, default_value = "0.7")]
sensitivity: f32,
// /// Sensitivity for brightness detector
// #[clap(long, default_value = "0.7")]
// sensitivity: f32,
/// Run detection in parallel
#[clap(long)]
parallel: bool,
// /// Run detection in parallel
// #[clap(long)]
// parallel: bool,
/// Result aggregation strategy (any, all, majority)
#[clap(long, default_value = "any")]
aggregation: String,
}
// /// Result aggregation strategy (any, all, majority)
// #[clap(long, default_value = "any")]
// aggregation: String,
// }
fn main() -> Result<()> {
// Set up logging
env_logger::init();
// fn main() -> Result<()> {
// // Set up logging
// env_logger::init();
// Parse command line arguments
let args = Args::parse();
// // Parse command line arguments
// let args = Args::parse();
// Create output directory if it doesn't exist
std::fs::create_dir_all(&args.output_dir)?;
// // Create output directory if it doesn't exist
// std::fs::create_dir_all(&args.output_dir)?;
// Set up detector parameters
let cams_params = CamsDetectorParams {
brightness_threshold: args.brightness_threshold,
std_to_avg_ratio_threshold: args.std_ratio_threshold,
min_pixel_count: args.min_pixel_count,
min_trajectory_length: args.min_trajectory_length,
save_all_feature_images: args.save_all,
output_dir: args.output_dir.clone(),
file_prefix: "meteor".to_string(),
id: "cams-demo".to_string(),
};
// // Set up detector parameters
// let cams_params = CamsDetectorParams {
// brightness_threshold: args.brightness_threshold,
// std_to_avg_ratio_threshold: args.std_ratio_threshold,
// min_pixel_count: args.min_pixel_count,
// min_trajectory_length: args.min_trajectory_length,
// save_all_feature_images: args.save_all,
// output_dir: args.output_dir.clone(),
// file_prefix: "meteor".to_string(),
// id: "cams-demo".to_string(),
// };
let brightness_params = BrightnessDetectorParams {
min_brightness_delta: args.min_brightness_delta,
min_pixel_change: args.min_pixel_change,
min_frames: args.min_frames,
sensitivity: args.sensitivity,
id: "brightness-demo".to_string(),
};
// let brightness_params = BrightnessDetectorParams {
// min_brightness_delta: args.min_brightness_delta,
// min_pixel_change: args.min_pixel_change,
// min_frames: args.min_frames,
// sensitivity: args.sensitivity,
// id: "brightness-demo".to_string(),
// };
// Create detectors based on command line argument
let mut detectors: Vec<Box<dyn MeteorDetector>> = Vec::new();
// // Create detectors based on command line argument
// let mut detectors: Vec<Box<dyn MeteorDetector>> = Vec::new();
match args.detector.as_str() {
"cams" => {
detectors.push(Box::new(CamsDetector::with_params(cams_params)));
info!("Using CAMS detector");
},
"brightness" => {
detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
info!("Using brightness detector");
},
"both" => {
detectors.push(Box::new(CamsDetector::with_params(cams_params)));
detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
info!("Using both CAMS and brightness detectors");
},
_ => {
return Err(anyhow::anyhow!("Unknown detector type: {}", args.detector));
}
}
// match args.detector.as_str() {
// "cams" => {
// detectors.push(Box::new(CamsDetector::with_params(cams_params)));
// info!("Using CAMS detector");
// },
// "brightness" => {
// detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
// info!("Using brightness detector");
// },
// "both" => {
// detectors.push(Box::new(CamsDetector::with_params(cams_params)));
// detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
// info!("Using both CAMS and brightness detectors");
// },
// _ => {
// return Err(anyhow::anyhow!("Unknown detector type: {}", args.detector));
// }
// }
// Set up detection pipeline config if running in parallel
let aggregation_strategy = match args.aggregation.as_str() {
"any" => AggregationStrategy::Any,
"all" => AggregationStrategy::All,
"majority" => AggregationStrategy::Majority,
_ => AggregationStrategy::Any,
};
// // Set up detection pipeline config if running in parallel
// let aggregation_strategy = match args.aggregation.as_str() {
// "any" => AggregationStrategy::Any,
// "all" => AggregationStrategy::All,
// "majority" => AggregationStrategy::Majority,
// _ => AggregationStrategy::Any,
// };
// Open video source
let mut cap = if args.input.chars().all(char::is_numeric) {
// Input is a camera device number
let device_id = args.input.parse::<i32>().unwrap_or(0);
videoio::VideoCapture::new(device_id, videoio::CAP_ANY)?
} else {
// Input is a video file
videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?
};
// // Open video source
// let mut cap = if args.input.chars().all(char::is_numeric) {
// // Input is a camera device number
// let device_id = args.input.parse::<i32>().unwrap_or(0);
// videoio::VideoCapture::new(device_id, videoio::CAP_ANY)?
// } else {
// // Input is a video file
// videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?
// };
// Check if video source is opened
if !cap.is_opened()? {
return Err(anyhow::anyhow!("Failed to open video source: {}", args.input));
}
// // Check if video source is opened
// if !cap.is_opened()? {
// return Err(anyhow::anyhow!("Failed to open video source: {}", args.input));
// }
// Get video properties
let fps = cap.get(videoio::CAP_PROP_FPS)?;
let frame_width = cap.get(videoio::CAP_PROP_FRAME_WIDTH)? as i32;
let frame_height = cap.get(videoio::CAP_PROP_FRAME_HEIGHT)? as i32;
let frame_count = cap.get(videoio::CAP_PROP_FRAME_COUNT)? as i64;
// // Get video properties
// let fps = cap.get(videoio::CAP_PROP_FPS)?;
// let frame_width = cap.get(videoio::CAP_PROP_FRAME_WIDTH)? as i32;
// let frame_height = cap.get(videoio::CAP_PROP_FRAME_HEIGHT)? as i32;
// let frame_count = cap.get(videoio::CAP_PROP_FRAME_COUNT)? as i64;
info!(
"Video source: {}x{}, {:.2} fps, {} frames",
frame_width, frame_height, fps, frame_count
);
// info!(
// "Video source: {}x{}, {:.2} fps, {} frames",
// frame_width, frame_height, fps, frame_count
// );
// Create window if display is enabled
if args.display {
highgui::named_window("Video", highgui::WINDOW_NORMAL)?;
highgui::resize_window("Video", 800, 600)?;
}
// // Create window if display is enabled
// if args.display {
// highgui::named_window("Video", highgui::WINDOW_NORMAL)?;
// highgui::resize_window("Video", 800, 600)?;
// }
// Initialize frame counter and runtime
let mut frame_idx: u64 = 0;
let mut runtime = if args.parallel {
Some(Runtime::new()?)
} else {
None
};
// // Initialize frame counter and runtime
// let mut frame_idx: u64 = 0;
// let mut runtime = if args.parallel {
// Some(Runtime::new()?)
// } else {
// None
// };
// Start time
let start_time = Instant::now();
// // Start time
// let start_time = Instant::now();
// Process frames
let mut frame = core::Mat::default();
let mut detections = 0;
// // Process frames
// let mut frame = core::Mat::default();
// let mut detections = 0;
// Convert detectors to thread-safe Arc<Mutex<Box<dyn MeteorDetector>>> for parallel mode
let shared_detectors: Vec<Arc<Mutex<Box<dyn MeteorDetector>>>> = detectors
.into_iter()
.map(|d| Arc::new(Mutex::new(d)))
.collect();
// // Convert detectors to thread-safe Arc<Mutex<Box<dyn MeteorDetector>>> for parallel mode
// let shared_detectors: Vec<Arc<Mutex<Box<dyn MeteorDetector>>>> = detectors
// .into_iter()
// .map(|d| Arc::new(Mutex::new(d)))
// .collect();
while cap.read(&mut frame)? {
// Skip empty frames
if frame.empty() {
warn!("Empty frame received, skipping");
continue;
}
// while cap.read(&mut frame)? {
// // Skip empty frames
// if frame.empty() {
// warn!("Empty frame received, skipping");
// continue;
// }
// Process frame with detector(s)
let results = if args.parallel && runtime.is_some() {
// Process in parallel using tokio runtime
let runtime = runtime.as_ref().unwrap();
runtime.block_on(async {
// Create a future for each detector
let mut handles = Vec::with_capacity(shared_detectors.len());
// // Process frame with detector(s)
// let results = if args.parallel && runtime.is_some() {
// // Process in parallel using tokio runtime
// let runtime = runtime.as_ref().unwrap();
// runtime.block_on(async {
// // Create a future for each detector
// let mut handles = Vec::with_capacity(shared_detectors.len());
for detector in &shared_detectors {
let frame = frame.clone();
let detector = detector.clone();
// for detector in &shared_detectors {
// let frame = frame.clone();
// let detector = detector.clone();
let handle = tokio::spawn(async move {
let mut detector = detector.lock().unwrap();
match detector.process_frame(&frame, frame_idx) {
Ok(result) => Some(result),
Err(e) => {
error!("Error processing frame with detector {}: {}",
detector.get_id(), e);
None
}
}
});
// let handle = tokio::spawn(async move {
// let mut detector = detector.lock().unwrap();
// match detector.process_frame(&frame, frame_idx) {
// Ok(result) => Some(result),
// Err(e) => {
// error!("Error processing frame with detector {}: {}",
// detector.get_id(), e);
// None
// }
// }
// });
handles.push(handle);
}
// handles.push(handle);
// }
// Wait for all detectors to complete
let results = futures::future::join_all(handles).await;
// // Wait for all detectors to complete
// let results = futures::future::join_all(handles).await;
// Collect results
results.into_iter()
.filter_map(|r| r.ok().flatten())
.collect::<Vec<_>>()
})
} else {
// Process sequentially
let mut results = Vec::new();
// // Collect results
// results.into_iter()
// .filter_map(|r| r.ok().flatten())
// .collect::<Vec<_>>()
// })
// } else {
// // Process sequentially
// let mut results = Vec::new();
for detector in &shared_detectors {
let mut detector = detector.lock().unwrap();
match detector.process_frame(&frame, frame_idx) {
Ok(result) => {
results.push(result);
}
Err(e) => {
error!("Error processing frame with detector {}: {}",
detector.get_id(), e);
}
}
}
// for detector in &shared_detectors {
// let mut detector = detector.lock().unwrap();
// match detector.process_frame(&frame, frame_idx) {
// Ok(result) => {
// results.push(result);
// }
// Err(e) => {
// error!("Error processing frame with detector {}: {}",
// detector.get_id(), e);
// }
// }
// }
results
};
// results
// };
// Aggregate results based on strategy
let detected = match aggregation_strategy {
AggregationStrategy::Any => results.iter().any(|r| r.detected),
AggregationStrategy::All => results.iter().all(|r| r.detected) && !results.is_empty(),
AggregationStrategy::Majority => {
let count = results.iter().filter(|r| r.detected).count();
count > results.len() / 2 && !results.is_empty()
},
_ => results.iter().any(|r| r.detected),
};
// // Aggregate results based on strategy
// let detected = match aggregation_strategy {
// AggregationStrategy::Any => results.iter().any(|r| r.detected),
// AggregationStrategy::All => results.iter().all(|r| r.detected) && !results.is_empty(),
// AggregationStrategy::Majority => {
// let count = results.iter().filter(|r| r.detected).count();
// count > results.len() / 2 && !results.is_empty()
// },
// _ => results.iter().any(|r| r.detected),
// };
// Find result with highest confidence
let result = if detected {
results.iter()
.filter(|r| r.detected)
.max_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap())
.cloned()
} else {
results.first().cloned()
};
// // Find result with highest confidence
// let result = if detected {
// results.iter()
// .filter(|r| r.detected)
// .max_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap())
// .cloned()
// } else {
// results.first().cloned()
// };
if let Some(result) = result {
if result.detected {
detections += 1;
info!(
"Meteor detected at frame {}: confidence={:.2}, pixels={}, detector={}",
frame_idx, result.confidence, result.pixel_change,
result.detector_id.as_deref().unwrap_or("unknown")
);
// if let Some(result) = result {
// if result.detected {
// detections += 1;
// info!(
// "Meteor detected at frame {}: confidence={:.2}, pixels={}, detector={}",
// frame_idx, result.confidence, result.pixel_change,
// result.detector_id.as_deref().unwrap_or("unknown")
// );
// Draw bounding box if we have one
if let Some(bbox) = result.bounding_box {
let rect = core::Rect::new(
bbox.0 as i32,
bbox.1 as i32,
bbox.2 as i32,
bbox.3 as i32,
);
let mut display_frame = frame.clone();
imgproc::rectangle(
&mut display_frame,
rect,
core::Scalar::new(0.0, 255.0, 0.0, 0.0),
2,
imgproc::LINE_8,
0,
)?;
// // Draw bounding box if we have one
// if let Some(bbox) = result.bounding_box {
// let rect = core::Rect::new(
// bbox.0 as i32,
// bbox.1 as i32,
// bbox.2 as i32,
// bbox.3 as i32,
// );
// let mut display_frame = frame.clone();
// imgproc::rectangle(
// &mut display_frame,
// rect,
// core::Scalar::new(0.0, 255.0, 0.0, 0.0),
// 2,
// imgproc::LINE_8,
// 0,
// )?;
// Save detection frame
let detection_path = args
.output_dir
.join(format!("detection_{}_frame.jpg", frame_idx));
imgcodecs::imwrite(
detection_path.to_str().unwrap(),
&display_frame,
&core::Vector::new(),
)?;
}
}
// // Save detection frame
// let detection_path = args
// .output_dir
// .join(format!("detection_{}_frame.jpg", frame_idx));
// imgcodecs::imwrite(
// detection_path.to_str().unwrap(),
// &display_frame,
// &core::Vector::new(),
// )?;
// }
// }
// Display progress
if frame_idx % 100 == 0 {
let elapsed = start_time.elapsed();
let fps = frame_idx as f64 / elapsed.as_secs_f64();
debug!(
"Processed {} frames ({:.2} fps), {} detections",
frame_idx,
fps,
detections,
);
}
// // Display progress
// if frame_idx % 100 == 0 {
// let elapsed = start_time.elapsed();
// let fps = frame_idx as f64 / elapsed.as_secs_f64();
// debug!(
// "Processed {} frames ({:.2} fps), {} detections",
// frame_idx,
// fps,
// detections,
// );
// }
// Display frame if enabled
if args.display {
// Draw text with frame information
let mut display_frame = frame.clone();
let text = format!(
"Frame: {} | Detections: {} | {}",
frame_idx,
detections,
if detected {
format!("METEOR DETECTED - Confidence: {:.2}", result.confidence)
} else {
"No detection".to_string()
}
);
imgproc::put_text(
&mut display_frame,
&text,
core::Point::new(20, 40),
imgproc::FONT_HERSHEY_SIMPLEX,
0.7,
core::Scalar::new(0.0, 255.0, 0.0, 0.0),
2,
imgproc::LINE_8,
false,
)?;
// // Display frame if enabled
// if args.display {
// // Draw text with frame information
// let mut display_frame = frame.clone();
// let text = format!(
// "Frame: {} | Detections: {} | {}",
// frame_idx,
// detections,
// if detected {
// format!("METEOR DETECTED - Confidence: {:.2}", result.confidence)
// } else {
// "No detection".to_string()
// }
// );
// imgproc::put_text(
// &mut display_frame,
// &text,
// core::Point::new(20, 40),
// imgproc::FONT_HERSHEY_SIMPLEX,
// 0.7,
// core::Scalar::new(0.0, 255.0, 0.0, 0.0),
// 2,
// imgproc::LINE_8,
// false,
// )?;
// Show frame
highgui::imshow("Video", &display_frame)?;
// // Show frame
// highgui::imshow("Video", &display_frame)?;
// Wait for key press (30ms delay for video playback)
let key = highgui::wait_key(30)?;
if key > 0 && key != 255 {
// 'q' or ESC key pressed
if key == 113 || key == 27 {
info!("User requested exit");
break;
}
}
}
}
// // Wait for key press (30ms delay for video playback)
// let key = highgui::wait_key(30)?;
// if key > 0 && key != 255 {
// // 'q' or ESC key pressed
// if key == 113 || key == 27 {
// info!("User requested exit");
// break;
// }
// }
// }
// }
frame_idx += 1;
}
// frame_idx += 1;
// }
// Close video display window if enabled
if args.display {
highgui::destroy_all_windows()?;
}
// // Close video display window if enabled
// if args.display {
// highgui::destroy_all_windows()?;
// }
// Calculate overall statistics
let elapsed = start_time.elapsed();
let overall_fps = frame_idx as f64 / elapsed.as_secs_f64();
// // Calculate overall statistics
// let elapsed = start_time.elapsed();
// let overall_fps = frame_idx as f64 / elapsed.as_secs_f64();
info!(
"Processing complete: {} frames, {} detections, {:.2} fps average",
frame_idx, detections, overall_fps
);
// info!(
// "Processing complete: {} frames, {} detections, {:.2} fps average",
// frame_idx, detections, overall_fps
// );
// Display detection mode info
info!(
"Detection mode: {}, {}",
if args.parallel { "parallel" } else { "sequential" },
match aggregation_strategy {
AggregationStrategy::Any => "any detector",
AggregationStrategy::All => "all detectors",
AggregationStrategy::Majority => "majority of detectors",
_ => "custom strategy",
}
);
// // Display detection mode info
// info!(
// "Detection mode: {}, {}",
// if args.parallel { "parallel" } else { "sequential" },
// match aggregation_strategy {
// AggregationStrategy::Any => "any detector",
// AggregationStrategy::All => "all detectors",
// AggregationStrategy::Majority => "majority of detectors",
// _ => "custom strategy",
// }
// );
Ok(())
}
// Ok(())
// }
fn main(){}

View File

@ -35,6 +35,10 @@ impl Frame {
)?;
Ok(())
}
pub fn is_full() {
}
}
/// A circular buffer for storing video frames

View File

@ -49,7 +49,7 @@ pub enum ExposureMode {
}
/// Configuration parameters for the camera
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CameraSettings {
/// Camera device path (e.g., /dev/video0)
pub device: String,

View File

@ -3,6 +3,8 @@ use log::info;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::{Path, PathBuf};
use dirs;
use toml;
use crate::camera::{CameraSettings, Resolution, ExposureMode};
use crate::detection::{DetectorConfig, PipelineConfig};

View File

@ -19,10 +19,14 @@ pub struct BrightnessDetectorParams {
/// Detection sensitivity (0.0-1.0)
pub sensitivity: f32,
/// Unique ID for this detector instance
#[serde(default = "Uuid::new_v4")]
#[serde(default = "default_uuid_string")]
pub id: String,
}
fn default_uuid_string() -> String {
Uuid::new_v4().to_string()
}
impl Default for BrightnessDetectorParams {
fn default() -> Self {
Self {
@ -84,19 +88,22 @@ impl BrightnessDetector {
// Convert frame to grayscale
let mut gray = core::Mat::default();
imgproc::cvt_color(frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?;
match &mut self.background {
Some(bg) => {
// Gradually update background model (running average)
let alpha = 0.05; // Update rate
core::add_weighted(bg, 1.0 - alpha, &gray, alpha, 0.0, bg, -1)?;
},
// Create a new Mat to store results
let mut new_bg = core::Mat::default();
core::add_weighted(bg, 1.0 - alpha, &gray, alpha, 0.0, &mut new_bg, -1)?;
// Assign the result back to bg
*bg = new_bg;
}
None => {
// Initialize background model
self.background = Some(gray.clone());
}
}
Ok(())
}

View File

@ -216,24 +216,27 @@ impl FrameStacker {
for y in 0..height {
for x in 0..width {
let current = gray_8bit.at_2d::<u8>(y, x)?;
let current_max = maxpixel.at_2d::<u8>(y, x)?;
let maxpixel_mat = maxpixel.to_mat()?;
let current_max = maxpixel_mat.at_2d::<u8>(y, x)?;
if current > current_max {
// Update maximum pixel value
unsafe {
*maxpixel.at_2d_mut::<u8>(y, x)? = current;
*maxpixel.to_mat().unwrap().at_2d_mut::<u8>(y, x)? = *current;
// Record the frame number (0-255)
*maxframe.at_2d_mut::<u8>(y, x)? = frame_idx as u8;
*maxframe.to_mat().unwrap().at_2d_mut::<u8>(y, x)? = frame_idx as u8;
}
}
// Add to the sums for average and std dev calculations
let current_f64 = f64::from(current);
let current_f64 = f64::from(*current);
unsafe {
let sum = sum_pixel.at_2d_mut::<f64>(y, x)?;
let mut sumpixel_mat = sum_pixel.to_mat()?;
let sum = sumpixel_mat.at_2d_mut::<f64>(y, x)?;
*sum += current_f64;
let sum_sq = sum_sq_pixel.at_2d_mut::<f64>(y, x)?;
let mut sum_sq_pixel_mat = sum_sq_pixel.to_mat().unwrap();
let sum_sq = sum_sq_pixel_mat.at_2d_mut::<f64>(y, x)?;
*sum_sq += current_f64 * current_f64;
}
}
@ -244,8 +247,9 @@ impl FrameStacker {
let mut avepixel = core::Mat::zeros(height, width, core::CV_8UC1)?;
for y in 0..height {
for x in 0..width {
let max_val = f64::from(maxpixel.at_2d::<u8>(y, x)?);
let sum = *sum_pixel.at_2d::<f64>(y, x)?;
let maxpixel_mat = maxpixel.to_mat().unwrap();
let max_val = f64::from(*maxpixel_mat.at_2d::<u8>(y, x)?);
let sum = *sum_pixel.to_mat().unwrap().at_2d::<f64>(y, x)?;
// Subtract the maximum value and divide by (N-1)
let avg = (sum - max_val) / (self.frame_buffer.len() as f64 - 1.0);
@ -260,7 +264,7 @@ impl FrameStacker {
};
unsafe {
*avepixel.at_2d_mut::<u8>(y, x)? = avg_u8;
*avepixel.to_mat().unwrap().at_2d_mut::<u8>(y, x)? = avg_u8;
}
}
}
@ -269,11 +273,13 @@ impl FrameStacker {
let mut stdpixel = core::Mat::zeros(height, width, core::CV_8UC1)?;
for y in 0..height {
for x in 0..width {
let max_val = f64::from(maxpixel.at_2d::<u8>(y, x)?);
let avg = f64::from(avepixel.at_2d::<u8>(y, x)?);
let maxpixel_mat = maxpixel.to_mat()?; // 在循环外一次性转换为 `Mat`
let max_val = f64::from(*maxpixel_mat.at_2d::<u8>(y, x)?);
let avgpixel_mat = avepixel.to_mat()?;
let avg = f64::from(*avgpixel_mat.at_2d::<u8>(y, x)?);
// Get sum of squares of remaining values
let sum_sq = *sum_sq_pixel.at_2d::<f64>(y, x)?;
let sum_sq = *sum_sq_pixel.to_mat().unwrap().at_2d::<f64>(y, x)?;
// Sum of squared differences can be computed as:
// sum((x_i - avg)^2) = sum(x_i^2) - N * avg^2
@ -292,7 +298,7 @@ impl FrameStacker {
};
unsafe {
*stdpixel.at_2d_mut::<u8>(y, x)? = std_u8;
*stdpixel.to_mat().unwrap().at_2d_mut::<u8>(y, x)? = std_u8;
}
}
}
@ -301,6 +307,10 @@ impl FrameStacker {
let first_frame_time = self.frame_buffer.front().unwrap().timestamp;
let last_frame_time = self.frame_buffer.back().unwrap().timestamp;
let maxpixel = maxpixel.to_mat()?;
let avepixel = avepixel.to_mat()?;
let stdpixel = stdpixel.to_mat()?;
let maxframe = maxframe.to_mat()?;
let stacked = StackedFrames {
maxpixel,
avepixel,

View File

@ -4,12 +4,13 @@ use futures::future::join_all;
use log::{debug, error, info, warn};
use opencv::{core, prelude::*};
use serde::{Deserialize, Serialize};
use std::sync::{Arc, Mutex};
use std::sync::{Arc, Mutex as StdMutex};
use tokio::sync::Mutex;
use std::time::{Duration, Instant};
use tokio::runtime::Runtime;
use tokio::sync::{broadcast, mpsc};
use tokio::time;
use tokio::task::JoinSet;
use tokio::task::{self, JoinSet};
use crate::camera::{Frame};
use crate::camera::{CameraController, MeteorEvent};
@ -62,7 +63,7 @@ pub enum AggregationStrategy {
/// Detection pipeline for meteor events with parallel detector support
pub struct DetectionPipeline {
/// List of detector instances
detectors: Vec<Arc<Mutex<Box<dyn MeteorDetector>>>>,
detectors: Vec<Arc<StdMutex<Box<dyn MeteorDetector>>>>,
/// Camera controller
camera_controller: Arc<Mutex<CameraController>>,
/// Pipeline configuration
@ -74,9 +75,9 @@ pub struct DetectionPipeline {
/// Channel for receiving events
event_rx: mpsc::Receiver<MeteorEvent>,
/// Whether the pipeline is running
is_running: Arc<Mutex<bool>>,
is_running: Arc<StdMutex<bool>>,
/// Current frame index
frame_index: Arc<Mutex<u64>>,
frame_index: Arc<StdMutex<u64>>,
}
impl DetectionPipeline {
@ -109,7 +110,7 @@ impl DetectionPipeline {
let mut detectors = Vec::with_capacity(pipeline_config.detectors.len());
for detector_config in &pipeline_config.detectors {
let detector = DetectorFactory::create(detector_config);
detectors.push(Arc::new(Mutex::new(detector)));
detectors.push(Arc::new(StdMutex::new(detector)));
}
// Create tokio runtime
@ -125,15 +126,15 @@ impl DetectionPipeline {
runtime,
event_tx,
event_rx,
is_running: Arc::new(Mutex::new(false)),
frame_index: Arc::new(Mutex::new(0)),
is_running: Arc::new(StdMutex::new(false)),
frame_index: Arc::new(StdMutex::new(0)),
})
}
/// Add a detector to the pipeline
pub fn add_detector(&mut self, config: DetectorConfig) -> Result<()> {
let detector = DetectorFactory::create(&config);
self.detectors.push(Arc::new(Mutex::new(detector)));
self.detectors.push(Arc::new(StdMutex::new(detector)));
Ok(())
}
@ -160,6 +161,7 @@ impl DetectionPipeline {
// Spawn a task for this detector
let handle = tokio::spawn(async move {
// We use std::sync::Mutex for detectors, so we use unwrap() not await
let mut detector = detector.lock().unwrap();
match detector.process_frame(&frame, frame_index) {
Ok(result) => result,
@ -237,17 +239,22 @@ impl DetectionPipeline {
let is_running = self.is_running.clone();
let frame_index = self.frame_index.clone();
// Process incoming frames
let process_task = tokio::spawn(async move {
// Process task in a separate thread that doesn't require Send
std::thread::spawn(move || {
// Create a runtime for this dedicated thread
let rt = Runtime::new().expect("Failed to create tokio runtime for detection thread");
// Run the async logic in this thread
rt.block_on(async {
// Get frame receiver
let mut frame_rx = {
let camera = camera_controller.lock().unwrap();
let camera = camera_controller.lock().await;
camera.subscribe_to_frames()
};
// Get frame buffer reference
let frame_buffer = {
let camera = camera_controller.lock().unwrap();
let camera = camera_controller.lock().await;
camera.get_frame_buffer()
};
@ -279,25 +286,25 @@ impl DetectionPipeline {
// Save event
if let Some(bbox) = result.bounding_box {
let event = {
let mut camera = camera_controller.lock().unwrap();
match camera.save_meteor_event(
frame.timestamp,
result.confidence,
bbox,
buffer_seconds,
buffer_seconds
).await {
Ok(event) => event,
Err(e) => {
error!("Failed to save meteor event: {}", e);
continue;
}
// Since we're in a dedicated thread, we can safely hold the lock
// across await points without Send concerns
let mut camera = camera_controller.lock().await;
match camera.save_meteor_event(
frame.timestamp,
result.confidence,
bbox,
buffer_seconds,
buffer_seconds
).await {
Ok(event) => {
// Send event notification
let _ = event_tx.send(event).await;
},
Err(e) => {
error!("Failed to save meteor event: {}", e);
continue;
}
};
// Send event notification
let _ = event_tx.send(event).await;
}
}
}
}
@ -318,12 +325,14 @@ impl DetectionPipeline {
info!("Detection pipeline stopped");
});
});
Ok(())
}
/// Static version of process_frame_parallel for use in async tasks
async fn process_frame_parallel_static(
detectors: &Vec<Arc<Mutex<Box<dyn MeteorDetector>>>>,
detectors: &Vec<Arc<StdMutex<Box<dyn MeteorDetector>>>>,
frame: &core::Mat,
frame_index: u64,
) -> Vec<DetectionResult> {
@ -336,12 +345,25 @@ impl DetectionPipeline {
// Spawn a task for this detector
let handle = tokio::spawn(async move {
let mut detector = detector.lock().unwrap();
match detector.process_frame(&frame, frame_index) {
// Use a non-blocking operation to acquire the lock
// We deliberately avoid async lock here since the detector
// implementation isn't expecting async operations
let detector_result = {
let mut detector = match detector.try_lock() {
Ok(guard) => guard,
Err(_) => {
error!("Failed to acquire detector lock");
return DetectionResult::default();
}
};
detector.process_frame(&frame, frame_index)
};
match detector_result {
Ok(result) => result,
Err(e) => {
error!("Error processing frame with detector {}: {}",
detector.get_id(), e);
error!("Error processing frame: {}", e);
DetectionResult::default()
}
}