initial commiy
This commit is contained in:
commit
844f2552f2
33
.gitignore
vendored
Normal file
33
.gitignore
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
# Rust build artifacts
|
||||
/target/
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
|
||||
# Generated by Cargo
|
||||
# For binary executables
|
||||
*.pdb
|
||||
|
||||
# Backup files
|
||||
*.bak
|
||||
*.swp
|
||||
*~
|
||||
|
||||
# Local configuration
|
||||
config.toml
|
||||
.env
|
||||
|
||||
# Data directories
|
||||
/data/
|
||||
/events/
|
||||
|
||||
# IDE specific files
|
||||
.idea/
|
||||
.vscode/
|
||||
*.iml
|
||||
|
||||
# Operating system files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Log files
|
||||
*.log
|
||||
55
Cargo.toml
Normal file
55
Cargo.toml
Normal file
@ -0,0 +1,55 @@
|
||||
[package]
|
||||
name = "meteor_detect"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["Meteor Detection Team"]
|
||||
description = "A Raspberry Pi based meteor detection system"
|
||||
|
||||
[dependencies]
|
||||
# Hardware interfaces
|
||||
rppal = "0.14.1" # Raspberry Pi hardware access
|
||||
v4l = "0.14.0" # Video4Linux2 bindings
|
||||
serialport = "4.2.0" # Serial port for GPS
|
||||
embedded-hal = "0.2.7" # Hardware abstraction layer
|
||||
|
||||
# Video processing
|
||||
opencv = { version = "0.79.0", features = ["contrib"] } # OpenCV bindings
|
||||
image = "0.24.6" # Image processing
|
||||
|
||||
# Concurrency and async
|
||||
tokio = { version = "1.28.0", features = ["full"] } # Async runtime
|
||||
async-trait = "0.1.68" # Async traits
|
||||
futures = "0.3.28" # Future utilities
|
||||
|
||||
# Data handling
|
||||
serde = { version = "1.0.160", features = ["derive"] } # Serialization
|
||||
serde_json = "1.0.96" # JSON support
|
||||
chrono = { version = "0.4.24", features = ["serde"] } # Date and time
|
||||
rusqlite = { version = "0.29.0", features = ["bundled"] } # SQLite
|
||||
|
||||
# Networking and communication
|
||||
rumqttc = "0.20.0" # MQTT client
|
||||
actix-web = "4.3.1" # Web framework for REST API
|
||||
reqwest = { version = "0.11.17", features = ["json"] } # HTTP client
|
||||
gstreamer = "0.20.0" # GStreamer bindings for media streaming
|
||||
gstreamer-rtsp-server = "0.20.0" # RTSP server
|
||||
|
||||
# Logging and monitoring
|
||||
log = "0.4.17" # Logging facade
|
||||
env_logger = "0.10.0" # Logger implementation
|
||||
sysinfo = "0.29.0" # System information
|
||||
|
||||
# Utilities
|
||||
anyhow = "1.0.70" # Error handling
|
||||
thiserror = "1.0.40" # Error definitions
|
||||
config = "0.13.3" # Configuration management
|
||||
uuid = { version = "1.3.3", features = ["v4", "serde"] } # UUIDs
|
||||
clap = { version = "4.2.5", features = ["derive"] } # Command line argument parsing
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.4.0" # Benchmarking
|
||||
mockall = "0.11.4" # Mocking for tests
|
||||
|
||||
[[example]]
|
||||
name = "cams_detector_demo"
|
||||
path = "examples/cams_detector_demo.rs"
|
||||
130
README.md
Normal file
130
README.md
Normal file
@ -0,0 +1,130 @@
|
||||
# 流星监控系统 (Meteor Detection System)
|
||||
|
||||
一个基于树莓派的实时流星监测系统,使用星光级摄像头进行捕获和分析。
|
||||
|
||||
## 功能特点
|
||||
|
||||
- **实时视频捕获**:支持星光级摄像头,可动态调整参数
|
||||
- **自动流星检测**:使用计算机视觉算法分析视频帧
|
||||
- **GPS时间与位置同步**:提供精确的事件元数据
|
||||
- **本地环形缓存存储**:持续记录最近视频
|
||||
- **检测事件云端上传**:包含元数据的事件片段上传
|
||||
- **远程控制接口**:通过REST API和MQTT协议
|
||||
- **系统健康监控**:自动恢复和故障处理
|
||||
|
||||
## 项目结构
|
||||
|
||||
```
|
||||
meteor_detect/
|
||||
├── src/
|
||||
│ ├── camera/ # 摄像头控制与视频采集
|
||||
│ │ ├── controller.rs # 摄像头控制器
|
||||
│ │ ├── frame_buffer.rs # 视频帧缓冲区
|
||||
│ │ └── v4l2.rs # V4L2摄像头驱动
|
||||
│ ├── gps/ # GPS和时间同步
|
||||
│ │ ├── controller.rs # GPS控制器
|
||||
│ │ └── nmea.rs # NMEA解析器
|
||||
│ ├── sensors/ # 环境传感器
|
||||
│ │ ├── controller.rs # 传感器控制器
|
||||
│ │ └── dht22.rs # 温湿度传感器驱动
|
||||
│ ├── detection/ # 流星检测算法
|
||||
│ ├── storage/ # 数据存储管理
|
||||
│ ├── communication/ # 通信与远程控制
|
||||
│ ├── monitoring/ # 系统监控
|
||||
│ ├── config.rs # 配置管理
|
||||
│ └── main.rs # 应用程序入口
|
||||
├── Cargo.toml # 项目配置与依赖
|
||||
├── config-example.toml # 配置文件示例
|
||||
├── build.sh # 构建脚本
|
||||
└── README.md # 项目文档
|
||||
```
|
||||
|
||||
## 硬件要求
|
||||
|
||||
- **树莓派**:3B+/4B/5 (推荐)
|
||||
- **星光级摄像头**:支持IMX477/IMX462传感器
|
||||
- **GPS模块**:支持PPS信号输出(精确授时)
|
||||
- **可选传感器**:温湿度(DHT22)、光敏传感器
|
||||
- **存储**:32GB+高速microSD卡
|
||||
|
||||
## 软件依赖
|
||||
|
||||
- **Rust 1.70+** (2021 edition)
|
||||
- **OpenCV 4.x**
|
||||
- **V4L2** 摄像头工具
|
||||
- **SQLite 3**
|
||||
|
||||
## 快速开始
|
||||
|
||||
### 安装依赖
|
||||
|
||||
使用我们的安装脚本一键安装所有依赖:
|
||||
|
||||
```bash
|
||||
./build.sh setup
|
||||
```
|
||||
|
||||
或手动安装:
|
||||
|
||||
```bash
|
||||
# 安装Rust
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
|
||||
# 安装系统依赖
|
||||
sudo apt update
|
||||
sudo apt install -y git curl build-essential pkg-config \
|
||||
libssl-dev libv4l-dev v4l-utils \
|
||||
libopencv-dev libsqlite3-dev
|
||||
```
|
||||
|
||||
### 编译项目
|
||||
|
||||
```bash
|
||||
# 调试版本
|
||||
./build.sh build
|
||||
|
||||
# 发布版本(优化性能)
|
||||
./build.sh build-release
|
||||
```
|
||||
|
||||
### 配置系统
|
||||
|
||||
```bash
|
||||
# 创建默认配置
|
||||
./build.sh create-config
|
||||
|
||||
# 编辑配置文件
|
||||
nano ~/.config/meteor_detect/config.toml
|
||||
```
|
||||
|
||||
### 运行系统
|
||||
|
||||
```bash
|
||||
# 使用构建脚本
|
||||
./build.sh run
|
||||
|
||||
# 或直接运行
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
## 配置选项
|
||||
|
||||
配置文件位于`~/.config/meteor_detect/config.toml`,包含以下主要配置项:
|
||||
|
||||
- **摄像头参数**:分辨率、曝光、增益
|
||||
- **GPS设置**:端口、波特率、PPS配置
|
||||
- **检测参数**:灵敏度、触发阈值
|
||||
- **存储策略**:保留时间、压缩设置
|
||||
- **通信选项**:MQTT服务器、API配置
|
||||
|
||||
详细配置示例请参考`config-example.toml`文件。
|
||||
|
||||
## 开发指南
|
||||
|
||||
- 使用`cargo test`运行测试
|
||||
- 使用`./build.sh clean`清理构建文件
|
||||
- 查看[设计文档](docs/design.md)了解系统架构
|
||||
|
||||
## 许可证
|
||||
|
||||
MIT License
|
||||
159
build.sh
Executable file
159
build.sh
Executable file
@ -0,0 +1,159 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Meteor Detection System build script
|
||||
|
||||
# Define colors for output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Default configuration
|
||||
CONFIG_DIR="$HOME/.config/meteor_detect"
|
||||
|
||||
# Print help message
|
||||
function print_help {
|
||||
echo -e "${YELLOW}Meteor Detection System Build Script${NC}"
|
||||
echo ""
|
||||
echo "Usage: $0 [command]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " build Build the application in debug mode"
|
||||
echo " build-release Build the application in release mode"
|
||||
echo " run Run the application"
|
||||
echo " clean Clean build artifacts"
|
||||
echo " setup Install development dependencies"
|
||||
echo " test Run tests"
|
||||
echo " create-config Create a default configuration file"
|
||||
echo " help Show this help message"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Build the application in debug mode
|
||||
function build_debug {
|
||||
echo -e "${GREEN}Building in debug mode...${NC}"
|
||||
cargo build
|
||||
}
|
||||
|
||||
# Build the application in release mode
|
||||
function build_release {
|
||||
echo -e "${GREEN}Building in release mode...${NC}"
|
||||
cargo build --release
|
||||
}
|
||||
|
||||
# Run the application
|
||||
function run_app {
|
||||
echo -e "${GREEN}Running application...${NC}"
|
||||
cargo run
|
||||
}
|
||||
|
||||
# Clean build artifacts
|
||||
function clean {
|
||||
echo -e "${GREEN}Cleaning build artifacts...${NC}"
|
||||
cargo clean
|
||||
}
|
||||
|
||||
# Run tests
|
||||
function run_tests {
|
||||
echo -e "${GREEN}Running tests...${NC}"
|
||||
cargo test
|
||||
}
|
||||
|
||||
# Install development dependencies
|
||||
function setup {
|
||||
echo -e "${GREEN}Installing development dependencies...${NC}"
|
||||
|
||||
# Check if running on Raspberry Pi
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
if [[ "$ID" == "raspbian" ]]; then
|
||||
echo -e "${YELLOW}Detected Raspberry Pi OS${NC}"
|
||||
|
||||
# Install system dependencies
|
||||
echo -e "${GREEN}Installing system dependencies...${NC}"
|
||||
sudo apt update
|
||||
sudo apt install -y git curl build-essential pkg-config \
|
||||
libssl-dev libv4l-dev v4l-utils \
|
||||
libopencv-dev libsqlite3-dev
|
||||
|
||||
# Install Rust if not already installed
|
||||
if ! command -v rustc &> /dev/null; then
|
||||
echo -e "${GREEN}Installing Rust...${NC}"
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
source $HOME/.cargo/env
|
||||
fi
|
||||
|
||||
# Create configuration directory
|
||||
mkdir -p "$CONFIG_DIR"
|
||||
else
|
||||
echo -e "${YELLOW}Not running on Raspberry Pi OS, skipping system dependencies${NC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check for Rust installation
|
||||
if ! command -v rustc &> /dev/null; then
|
||||
echo -e "${RED}Rust is not installed. Please install Rust from https://rustup.rs/${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update Rust toolchain
|
||||
echo -e "${GREEN}Updating Rust toolchain...${NC}"
|
||||
rustup update
|
||||
|
||||
echo -e "${GREEN}Setup complete!${NC}"
|
||||
}
|
||||
|
||||
# Create default configuration
|
||||
function create_config {
|
||||
echo -e "${GREEN}Creating default configuration...${NC}"
|
||||
|
||||
mkdir -p "$CONFIG_DIR"
|
||||
|
||||
if [ -f "$CONFIG_DIR/config.toml" ]; then
|
||||
echo -e "${YELLOW}Configuration file already exists at $CONFIG_DIR/config.toml${NC}"
|
||||
read -p "Overwrite? (y/N) " confirm
|
||||
if [[ "$confirm" != "y" && "$confirm" != "Y" ]]; then
|
||||
echo "Aborted"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
cp config-example.toml "$CONFIG_DIR/config.toml"
|
||||
echo -e "${GREEN}Default configuration created at $CONFIG_DIR/config.toml${NC}"
|
||||
}
|
||||
|
||||
# Main script entry point
|
||||
case "$1" in
|
||||
"build")
|
||||
build_debug
|
||||
;;
|
||||
"build-release")
|
||||
build_release
|
||||
;;
|
||||
"run")
|
||||
run_app
|
||||
;;
|
||||
"clean")
|
||||
clean
|
||||
;;
|
||||
"setup")
|
||||
setup
|
||||
;;
|
||||
"test")
|
||||
run_tests
|
||||
;;
|
||||
"create-config")
|
||||
create_config
|
||||
;;
|
||||
"help"|"")
|
||||
print_help
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unknown command: $1${NC}"
|
||||
print_help
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
exit 0
|
||||
165
config-example.toml
Normal file
165
config-example.toml
Normal file
@ -0,0 +1,165 @@
|
||||
# Meteor Detection System Configuration
|
||||
|
||||
# Unique identifier for this detector (will be auto-generated if not specified)
|
||||
device_id = "meteor-detector-01"
|
||||
|
||||
# Logging level (trace, debug, info, warn, error)
|
||||
log_level = "info"
|
||||
|
||||
# Camera settings
|
||||
[camera]
|
||||
# Camera device path
|
||||
device = "/dev/video0"
|
||||
# Resolution (options: HD1080p, HD720p, VGA)
|
||||
resolution = "HD720p"
|
||||
# Frames per second
|
||||
fps = 30
|
||||
# Exposure mode (Auto or Manual exposure time in microseconds)
|
||||
exposure = "Auto"
|
||||
# Gain/ISO setting (0-255)
|
||||
gain = 128
|
||||
# Whether to lock focus at infinity
|
||||
focus_locked = true
|
||||
|
||||
# GPS and time synchronization
|
||||
[gps]
|
||||
# Whether to enable GPS functionality
|
||||
enable_gps = true
|
||||
# Serial port for GPS module
|
||||
port = "/dev/ttyAMA0"
|
||||
# Baud rate
|
||||
baud_rate = 9600
|
||||
# Whether to use PPS signal for precise timing
|
||||
use_pps = true
|
||||
# GPIO pin for PPS signal (BCM numbering)
|
||||
pps_pin = 18
|
||||
# Allow system to run without GPS (using fallback position)
|
||||
allow_degraded_mode = true
|
||||
|
||||
# Camera orientation
|
||||
[gps.camera_orientation]
|
||||
# Azimuth/heading in degrees (0 = North, 90 = East)
|
||||
azimuth = 0.0
|
||||
# Elevation/pitch in degrees (0 = horizontal, 90 = straight up)
|
||||
elevation = 90.0
|
||||
|
||||
# Fallback GPS position (used when GPS is not available)
|
||||
[gps.fallback_position]
|
||||
# Latitude in degrees (positive is North, negative is South)
|
||||
latitude = 34.0522
|
||||
# Longitude in degrees (positive is East, negative is West)
|
||||
longitude = -118.2437
|
||||
# Altitude in meters above sea level
|
||||
altitude = 85.0
|
||||
|
||||
# Environmental sensors
|
||||
[sensors]
|
||||
# Whether to use DHT22 temperature/humidity sensor
|
||||
use_dht22 = true
|
||||
# GPIO pin for DHT22 data (BCM numbering)
|
||||
dht22_pin = 4
|
||||
# Whether to use light sensor
|
||||
use_light_sensor = true
|
||||
# GPIO pin for light sensor analog input
|
||||
light_sensor_pin = 0
|
||||
# Sampling interval in seconds
|
||||
sampling_interval = 10
|
||||
# Whether to allow operation without sensors (using fallback values)
|
||||
allow_degraded_mode = true
|
||||
# Default temperature value when sensor is unavailable (Celsius)
|
||||
fallback_temperature = 25.0
|
||||
# Default humidity value when sensor is unavailable (0-100%)
|
||||
fallback_humidity = 50.0
|
||||
# Default sky brightness value when sensor is unavailable (0-1)
|
||||
fallback_sky_brightness = 0.05
|
||||
|
||||
# Storage settings
|
||||
[storage]
|
||||
# Directory for storing raw video data
|
||||
raw_video_dir = "data/raw"
|
||||
# Directory for storing event video clips
|
||||
event_video_dir = "data/events"
|
||||
# Maximum disk space to use for storage (in MB)
|
||||
max_disk_usage_mb = 10000
|
||||
# Number of days to keep event data
|
||||
event_retention_days = 30
|
||||
# Whether to compress video files
|
||||
compress_video = true
|
||||
|
||||
# Detection settings
|
||||
[detection]
|
||||
# Minimum brightness change to trigger detection
|
||||
min_brightness_delta = 30.0
|
||||
# Minimum number of pixels changed to trigger detection
|
||||
min_pixel_change = 10
|
||||
# Minimum number of consecutive frames to confirm event
|
||||
min_frames = 3
|
||||
# Number of seconds to save before/after event
|
||||
event_buffer_seconds = 10
|
||||
# Detection sensitivity (0.0-1.0)
|
||||
sensitivity = 0.7
|
||||
|
||||
# Detection pipeline configuration (for multiple detectors)
|
||||
[detection.pipeline]
|
||||
# Maximum number of parallel detector workers
|
||||
max_parallel_workers = 4
|
||||
# Aggregation strategy: "any", "all", "majority"
|
||||
aggregation_strategy = "any"
|
||||
|
||||
# Detector configurations
|
||||
# You can include multiple detectors by adding more [[detection.pipeline.detectors]] sections
|
||||
|
||||
# Brightness detector configuration
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "brightness"
|
||||
# Unique ID for this detector
|
||||
id = "brightness-main"
|
||||
# Minimum brightness change to trigger detection
|
||||
min_brightness_delta = 30.0
|
||||
# Minimum number of pixels changed to trigger detection
|
||||
min_pixel_change = 10
|
||||
# Minimum number of consecutive frames to confirm event
|
||||
min_frames = 3
|
||||
# Detection sensitivity (0.0-1.0)
|
||||
sensitivity = 0.7
|
||||
|
||||
# CAMS detector configuration
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "cams"
|
||||
# Unique ID for this detector
|
||||
id = "cams-main"
|
||||
# Brightness threshold for meteor detection in maxpixel image
|
||||
brightness_threshold = 30
|
||||
# Minimum ratio of stdpixel to avepixel for meteor detection
|
||||
std_to_avg_ratio_threshold = 1.5
|
||||
# Minimum number of pixels that must exceed thresholds
|
||||
min_pixel_count = 10
|
||||
# Minimum trajectory length (pixels) to be considered a meteor
|
||||
min_trajectory_length = 5
|
||||
# Whether to save feature images for all batches (not just detections)
|
||||
save_all_feature_images = false
|
||||
# Directory to save feature images
|
||||
output_dir = "output/cams"
|
||||
# Prefix for saved files
|
||||
file_prefix = "meteor"
|
||||
|
||||
# Communication settings
|
||||
[communication]
|
||||
# MQTT broker URL
|
||||
mqtt_broker = "mqtt://localhost:1883"
|
||||
# MQTT client ID (will be auto-generated if not specified)
|
||||
mqtt_client_id = "meteor-detector-01"
|
||||
# MQTT credentials (optional)
|
||||
mqtt_username = ""
|
||||
mqtt_password = ""
|
||||
# Topic for event notifications
|
||||
event_topic = "meteor/events"
|
||||
# Topic for system status
|
||||
status_topic = "meteor/status"
|
||||
# HTTP API port
|
||||
api_port = 8080
|
||||
# Whether to enable SSL for HTTP API
|
||||
api_use_ssl = false
|
||||
# Path to SSL certificate and key (only needed if api_use_ssl = true)
|
||||
api_cert_path = ""
|
||||
api_key_path = ""
|
||||
88
docs/cams_ftp_format.md
Normal file
88
docs/cams_ftp_format.md
Normal file
@ -0,0 +1,88 @@
|
||||
# CAMS FTP Format Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The CAMS FTP (Cameras for All-sky Meteor Surveillance File Transfer Protocol) format is a data compression technique used in meteor detection systems. It was developed by Peter Jenniskens at the SETI Institute for the NASA-sponsored CAMS meteor survey. The format allows for efficient storage and processing of video data for meteor detection while preserving critical temporal and statistical information.
|
||||
|
||||
## Format Description
|
||||
|
||||
The CAMS FTP format compresses video data by taking a batch of 256 consecutive frames and generating 4 special images that retain the essential data needed for meteor detection:
|
||||
|
||||
1. **maxpixel**: Records the maximum brightness value for each pixel position across all 256 frames
|
||||
2. **avepixel**: Calculates the average brightness of each pixel after excluding the maximum value
|
||||
3. **stdpixel**: Computes the standard deviation of each pixel after excluding the maximum value
|
||||
4. **maxframe**: Records which frame (0-255) contained the maximum brightness value for each pixel
|
||||
|
||||
By generating these four images, the storage requirement is reduced from 256 frames to just 4 images while preserving the most important information for meteor detection.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
Our implementation in `src/detection/frame_stacker.rs` provides a complete solution for generating CAMS FTP format images:
|
||||
|
||||
### FrameStackerConfig
|
||||
|
||||
Configuration options for the frame stacker:
|
||||
|
||||
- `frames_per_stack`: Number of frames to stack (typically 256 for CAMS format)
|
||||
- `save_stacked_frames`: Whether to save the produced images to disk
|
||||
- `output_directory`: Directory for storing the output images
|
||||
- `write_fits`: Option to write in FITS astronomical format instead of PNG
|
||||
- `max_pixel_value`: Maximum pixel value (typically 255 for 8-bit images)
|
||||
|
||||
### Processing Algorithm
|
||||
|
||||
The frame stacking algorithm works as follows:
|
||||
|
||||
1. The system collects 256 consecutive frames (or the configured number)
|
||||
2. For each pixel position across all frames:
|
||||
- **maxpixel**: Records the highest brightness value
|
||||
- **maxframe**: Records which frame contained that maximum value
|
||||
- **avepixel**: Calculates the average brightness after excluding the maximum value
|
||||
- **stdpixel**: Computes the standard deviation after excluding the maximum value
|
||||
|
||||
### Mathematical Formulation
|
||||
|
||||
Let's denote the pixel value at position (x,y) in frame i as p[i](x,y), where i ranges from 0 to 255:
|
||||
|
||||
- **maxpixel(x,y)** = max{p[0](x,y), p[1](x,y), ..., p[255](x,y)}
|
||||
- **maxframe(x,y)** = argmax{p[i](x,y)} (the i value where p[i](x,y) equals maxpixel(x,y))
|
||||
- **avepixel(x,y)** = (sum{p[i](x,y)} - maxpixel(x,y)) / 255
|
||||
- **stdpixel(x,y)** = sqrt((sum{(p[i](x,y) - avepixel(x,y))²} - (maxpixel(x,y) - avepixel(x,y))²) / 255)
|
||||
|
||||
In the standard deviation calculation, we exclude the maximum value to focus on the background variation.
|
||||
|
||||
### Special Considerations
|
||||
|
||||
1. **Multiple Maximum Values**: If multiple frames have the same maximum value, our implementation uses the first occurrence.
|
||||
|
||||
2. **Handling Color Frames**: If input frames are not already grayscale, our system automatically converts them.
|
||||
|
||||
3. **Numerical Precision**: For accurate calculations, sums are accumulated in 64-bit floating point.
|
||||
|
||||
4. **Rounding and Clipping**: Results are rounded and clipped to 8-bit values (0-255) for the output images.
|
||||
|
||||
## Usage for Meteor Detection
|
||||
|
||||
The CAMS FTP format is specifically designed for meteor detection:
|
||||
|
||||
- **maxpixel**: Shows the meteor streak across the sky
|
||||
- **maxframe**: Provides timing information for the meteor's trajectory
|
||||
- **avepixel**: Represents the background sky
|
||||
- **stdpixel**: Helps distinguish noise from real meteors
|
||||
|
||||
Meteors typically appear as bright streaks against the background in the maxpixel image, with sequential values in the maxframe image. By analyzing these patterns, our detection algorithms can identify meteor events while filtering out noise, aircraft, and other false positives.
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
Processing 256 frames for CAMS FTP format can be computationally intensive. Our implementation:
|
||||
|
||||
- Uses efficient pixel-wise operations
|
||||
- Handles memory carefully for larger frame sizes
|
||||
- Processes in a background thread to avoid blocking the main application
|
||||
|
||||
For real-time applications on resource-constrained devices (like Raspberry Pi), consider reducing resolution or frame rate if processing cannot keep up with the incoming video.
|
||||
|
||||
## References
|
||||
|
||||
1. Jenniskens, P., Gural, P. S., Grigsby, B., et al. (2011). "CAMS: Cameras for Allsky Meteor Surveillance to establish minor meteor showers." Icarus, 216(1), 40-61.
|
||||
2. CAMS Project Website: [http://cams.seti.org/](http://cams.seti.org/)
|
||||
172
docs/design.md
Normal file
172
docs/design.md
Normal file
@ -0,0 +1,172 @@
|
||||
# 流星监控系统 - 设计文档
|
||||
|
||||
## 系统架构概述
|
||||
|
||||
流星监控系统是一个基于树莓派的嵌入式系统,专为实时监测和记录流星事件而设计。系统采用模块化设计,各组件间通过明确的接口进行通信,保证系统的可扩展性和可维护性。
|
||||
|
||||
## 系统架构图
|
||||
|
||||
```
|
||||
+------------------------------------------+
|
||||
| 应用层 (Application) |
|
||||
+------------------------------------------+
|
||||
| | | |
|
||||
v v v v
|
||||
+------------+ +------------+ +-----------+ +-----------+
|
||||
| 检测算法模块 | | 数据存储模块 | | 通信模块 | | 监控模块 |
|
||||
+------------+ +------------+ +-----------+ +-----------+
|
||||
| | | |
|
||||
v v v v
|
||||
+------------------------------------------+
|
||||
| 硬件抽象层 (HAL) |
|
||||
+------------------------------------------+
|
||||
| | | |
|
||||
v v v v
|
||||
+------------+ +------------+ +-----------+
|
||||
| 摄像头控制 | | GPS模块 | | 传感器模块 |
|
||||
+------------+ +------------+ +-----------+
|
||||
| | |
|
||||
v v v
|
||||
+------------------------------------------+
|
||||
| 硬件 (Hardware) |
|
||||
+------------------------------------------+
|
||||
```
|
||||
|
||||
## 模块详细说明
|
||||
|
||||
### 1. 硬件抽象层 (HAL)
|
||||
|
||||
#### 1.1 摄像头模块 (Camera)
|
||||
|
||||
摄像头模块负责与星光级摄像头进行交互,提供以下功能:
|
||||
|
||||
- 摄像头初始化与参数设置
|
||||
- 视频流采集与帧缓存管理
|
||||
- 视频质量控制与自动参数调整
|
||||
|
||||
主要组件:
|
||||
- `CameraController`: 摄像头控制器,管理摄像头生命周期和操作
|
||||
- `FrameBuffer`: 帧缓冲区,管理环形视频缓存
|
||||
- `V4L2Driver`: 底层摄像头驱动,通过V4L2与硬件交互
|
||||
|
||||
#### 1.2 GPS模块 (GPS)
|
||||
|
||||
GPS模块负责获取精确的时间和位置信息:
|
||||
|
||||
- NMEA数据解析
|
||||
- PPS信号处理提供精确授时
|
||||
- 摄像头指向信息存储
|
||||
|
||||
主要组件:
|
||||
- `GpsController`: GPS控制器,管理GPS设备和数据处理
|
||||
- `NmeaParser`: NMEA协议解析器,解析GPS原始数据
|
||||
|
||||
#### 1.3 传感器模块 (Sensors)
|
||||
|
||||
传感器模块负责采集环境数据:
|
||||
|
||||
- 温湿度信息采集
|
||||
- 光照强度测量
|
||||
- 传感器数据同步与缓存
|
||||
|
||||
主要组件:
|
||||
- `SensorController`: 传感器控制器,统一管理各类传感器
|
||||
- `Dht22Sensor`: DHT22温湿度传感器驱动
|
||||
- `LightSensor`: 光照强度传感器(基于摄像头或独立传感器)
|
||||
|
||||
### 2. 应用层 (Application)
|
||||
|
||||
#### 2.1 检测算法模块 (Detection)
|
||||
|
||||
基于计算机视觉的流星检测算法:
|
||||
|
||||
- 背景建模与帧差分析
|
||||
- 亮度变化检测
|
||||
- 运动轨迹提取与分析
|
||||
- 事件触发与分类
|
||||
|
||||
主要组件:
|
||||
- `DetectionPipeline`: 检测流水线,协调检测过程
|
||||
- `BackgroundSubtractionDetector`: 基于背景差分的检测器
|
||||
- `MeteorTracker`: 流星轨迹跟踪器
|
||||
|
||||
#### 2.2 数据存储模块 (Storage)
|
||||
|
||||
管理本地和远程数据存储:
|
||||
|
||||
- 原始视频环形缓存
|
||||
- 事件视频片段存储
|
||||
- 元数据管理与数据库操作
|
||||
- 存储空间管理与清理
|
||||
|
||||
主要组件:
|
||||
- `StorageManager`: 存储管理器,协调各种存储操作
|
||||
|
||||
#### 2.3 通信模块 (Communication)
|
||||
|
||||
提供远程控制和数据传输功能:
|
||||
|
||||
- MQTT事件发布与订阅
|
||||
- HTTP REST API接口
|
||||
- 远程配置与状态查询
|
||||
|
||||
主要组件:
|
||||
- `CommunicationManager`: 通信管理器,协调各种通信方式
|
||||
|
||||
#### 2.4 系统监控模块 (Monitoring)
|
||||
|
||||
监控系统健康状态和资源使用:
|
||||
|
||||
- CPU/内存/存储监控
|
||||
- 温度监控与过热保护
|
||||
- 日志管理与错误报告
|
||||
- 故障自愈机制
|
||||
|
||||
主要组件:
|
||||
- `SystemMonitor`: 系统监控器,收集和分析系统状态
|
||||
|
||||
### 3. 配置管理 (Configuration)
|
||||
|
||||
统一的配置管理系统:
|
||||
|
||||
- 基于TOML的配置文件
|
||||
- 配置验证与默认值处理
|
||||
- 运行时配置更新
|
||||
|
||||
主要组件:
|
||||
- `Config`: 配置结构体,包含所有系统配置
|
||||
|
||||
## 数据流
|
||||
|
||||
1. **采集流**: 摄像头 -> 帧缓冲区 -> 检测算法 -> 事件检测
|
||||
2. **存储流**: 事件检测 -> 视频片段提取 -> 本地存储 -> 云端上传
|
||||
3. **通知流**: 事件检测 -> MQTT发布 -> 云端服务/移动应用
|
||||
4. **控制流**: REST API -> 配置更新 -> 各模块应用配置
|
||||
|
||||
## 错误处理与恢复策略
|
||||
|
||||
1. **分层错误处理**: 每个模块内部处理自身错误,不影响系统整体运行
|
||||
2. **优雅降级**: 核心功能优先保证,次要功能在资源受限时自动降级
|
||||
3. **自动重启**: 关键组件故障时自动重启
|
||||
4. **故障隔离**: 组件间通过消息传递通信,避免故障传播
|
||||
|
||||
## 性能考量
|
||||
|
||||
1. **实时性**: 检测流水线延迟控制在100ms以内
|
||||
2. **资源使用**: 针对树莓派有限资源优化,控制内存和CPU使用
|
||||
3. **存储效率**: 自动管理存储空间,防止磁盘占满
|
||||
4. **功耗控制**: 根据电源状态调整性能和功能
|
||||
|
||||
## 扩展性
|
||||
|
||||
1. **模块化设计**: 各功能模块可独立更新和替换
|
||||
2. **插件架构**: 检测算法支持插件式扩展
|
||||
3. **配置驱动**: 大部分功能通过配置启用/禁用,无需修改代码
|
||||
4. **API优先**: 所有功能都通过内部API访问,便于扩展
|
||||
|
||||
## 安全性考虑
|
||||
|
||||
1. **通信加密**: 所有外部通信使用TLS加密
|
||||
2. **访问控制**: API访问需要认证和授权
|
||||
3. **固件签名**: 系统更新包通过数字签名验证
|
||||
4. **安全默认值**: 默认配置优先考虑安全性
|
||||
184
docs/detector_api.md
Normal file
184
docs/detector_api.md
Normal file
@ -0,0 +1,184 @@
|
||||
# 流星探测器接口与并行管道
|
||||
|
||||
本文档描述了流星探测系统中的探测器抽象和并行处理管道。
|
||||
|
||||
## 探测器接口
|
||||
|
||||
所有流星探测器实现一个统一的接口 `MeteorDetector`,这使得它们可以互相替换或者同时使用。
|
||||
|
||||
```rust
|
||||
pub trait MeteorDetector: Send + Sync {
|
||||
// 处理单帧图像,返回探测结果
|
||||
fn process_frame(&mut self, frame: &core::Mat, frame_index: u64) -> Result<DetectionResult>;
|
||||
|
||||
// 重置探测器状态
|
||||
fn reset(&mut self);
|
||||
|
||||
// 获取探测器的配置
|
||||
fn get_config(&self) -> DetectorConfig;
|
||||
|
||||
// 获取探测器的唯一标识符
|
||||
fn get_id(&self) -> &str;
|
||||
}
|
||||
```
|
||||
|
||||
### 内置探测器
|
||||
|
||||
系统目前提供两种探测器实现:
|
||||
|
||||
1. **BrightnessDetector**:基于帧间亮度对比的探测器,适合检测较快的、亮度变化明显的流星。
|
||||
|
||||
2. **CamsDetector**:基于CAMS FTP格式的探测器,使用256帧的统计特征(maxpixel、avepixel、stdpixel、maxframe)来检测流星。
|
||||
|
||||
### 探测器配置
|
||||
|
||||
探测器通过枚举类型 `DetectorConfig` 进行配置:
|
||||
|
||||
```rust
|
||||
pub enum DetectorConfig {
|
||||
Brightness(BrightnessDetectorParams),
|
||||
Cams(CamsDetectorParams),
|
||||
}
|
||||
```
|
||||
|
||||
每种探测器都有各自的参数结构体,包含调整其行为的各种参数。
|
||||
|
||||
## 并行探测管道
|
||||
|
||||
`DetectionPipeline` 提供了一个并行执行多个探测器的框架,它能够:
|
||||
|
||||
1. 同时运行多个探测器检测同一帧图像
|
||||
2. 聚合多个探测器的结果
|
||||
3. 根据聚合策略决定最终探测结果
|
||||
|
||||
### 管道配置
|
||||
|
||||
```rust
|
||||
pub struct PipelineConfig {
|
||||
// 要使用的探测器列表
|
||||
pub detectors: Vec<DetectorConfig>,
|
||||
// 最大并行工作线程数
|
||||
pub max_parallel_workers: usize,
|
||||
// 事件缓冲时间(秒)
|
||||
pub event_buffer_seconds: u32,
|
||||
// 结果聚合策略
|
||||
pub aggregation_strategy: AggregationStrategy,
|
||||
}
|
||||
```
|
||||
|
||||
### 聚合策略
|
||||
|
||||
系统支持多种聚合策略,用于从多个探测器的结果中得出最终结论:
|
||||
|
||||
```rust
|
||||
pub enum AggregationStrategy {
|
||||
// 任何探测器报告检测都视为有效
|
||||
Any,
|
||||
// 所有探测器都必须报告检测才视为有效
|
||||
All,
|
||||
// 多数探测器报告检测才视为有效
|
||||
Majority,
|
||||
// 自定义阈值:例如 Threshold(0.6) 表示至少 60% 的探测器报告检测
|
||||
Threshold(f32),
|
||||
}
|
||||
```
|
||||
|
||||
## 使用示例
|
||||
|
||||
### 创建单个探测器
|
||||
|
||||
```rust
|
||||
// 使用默认参数创建亮度探测器
|
||||
let brightness_detector = BrightnessDetector::new();
|
||||
|
||||
// 使用自定义参数创建CAMS探测器
|
||||
let params = CamsDetectorParams {
|
||||
brightness_threshold: 30,
|
||||
std_to_avg_ratio_threshold: 1.5,
|
||||
min_pixel_count: 10,
|
||||
min_trajectory_length: 5,
|
||||
save_all_feature_images: false,
|
||||
output_dir: PathBuf::from("output"),
|
||||
file_prefix: "meteor".to_string(),
|
||||
id: "cams-1".to_string(),
|
||||
};
|
||||
let cams_detector = CamsDetector::with_params(params);
|
||||
```
|
||||
|
||||
### 创建并行管道
|
||||
|
||||
```rust
|
||||
// 定义两个探测器的配置
|
||||
let detector_configs = vec![
|
||||
DetectorConfig::Brightness(BrightnessDetectorParams::default()),
|
||||
DetectorConfig::Cams(CamsDetectorParams::default()),
|
||||
];
|
||||
|
||||
// 创建管道配置
|
||||
let pipeline_config = PipelineConfig {
|
||||
detectors: detector_configs,
|
||||
max_parallel_workers: 4,
|
||||
event_buffer_seconds: 10,
|
||||
aggregation_strategy: AggregationStrategy::Any,
|
||||
};
|
||||
|
||||
// 创建探测管道
|
||||
let pipeline = DetectionPipeline::new(
|
||||
camera_controller,
|
||||
&config,
|
||||
Some(pipeline_config),
|
||||
)?;
|
||||
|
||||
// 启动管道
|
||||
pipeline.run().await?;
|
||||
```
|
||||
|
||||
### 在配置文件中定义
|
||||
|
||||
在 `config.toml` 中可以定义整个探测管道:
|
||||
|
||||
```toml
|
||||
[detection]
|
||||
min_brightness_delta = 30.0
|
||||
min_pixel_change = 10
|
||||
min_frames = 3
|
||||
event_buffer_seconds = 10
|
||||
sensitivity = 0.7
|
||||
|
||||
[detection.pipeline]
|
||||
max_parallel_workers = 4
|
||||
aggregation_strategy = "any" # 可以是 "any", "all", "majority"
|
||||
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "brightness"
|
||||
min_brightness_delta = 30.0
|
||||
min_pixel_change = 10
|
||||
min_frames = 3
|
||||
sensitivity = 0.7
|
||||
id = "brightness-1"
|
||||
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "cams"
|
||||
brightness_threshold = 30
|
||||
std_to_avg_ratio_threshold = 1.5
|
||||
min_pixel_count = 10
|
||||
min_trajectory_length = 5
|
||||
save_all_feature_images = false
|
||||
output_dir = "output/cams"
|
||||
file_prefix = "meteor"
|
||||
id = "cams-1"
|
||||
```
|
||||
|
||||
## 性能考量
|
||||
|
||||
- 并行执行可以提高处理速度,但也会增加内存和CPU使用
|
||||
- 对于资源受限的设备(如树莓派),应考虑减少并行探测器数量或使用顺序执行
|
||||
- 某些探测器(如CamsDetector)可能需要积累多帧才能完成检测
|
||||
|
||||
## 扩展
|
||||
|
||||
要添加新的探测器类型:
|
||||
|
||||
1. 创建一个新的结构体并实现 `MeteorDetector` trait
|
||||
2. 在 `DetectorConfig` 枚举中添加新的变体
|
||||
3. 更新 `DetectorFactory::create()` 方法以支持创建新的探测器
|
||||
217
docs/detector_configuration.md
Normal file
217
docs/detector_configuration.md
Normal file
@ -0,0 +1,217 @@
|
||||
# 流星探测器配置指南
|
||||
|
||||
本文档详细介绍了如何配置和选择使用不同的流星探测器。系统支持两种类型的探测器:
|
||||
|
||||
1. **亮度探测器 (BrightnessDetector)** - 基于帧间亮度对比的简单探测器
|
||||
2. **CAMS探测器 (CamsDetector)** - 基于CAMS FTP格式的帧堆叠探测器
|
||||
|
||||
## 亮度探测器 (BrightnessDetector)
|
||||
|
||||
亮度探测器通过分析连续视频帧之间的亮度变化来检测流星。当亮度变化超过特定阈值且持续一定的帧数时,它会触发检测事件。
|
||||
|
||||
### 配置参数
|
||||
|
||||
亮度探测器有以下配置参数:
|
||||
|
||||
| 参数 | 类型 | 默认值 | 说明 |
|
||||
|-----------------------|-------|--------|-------------------------------------|
|
||||
| `min_brightness_delta` | f32 | 30.0 | 最小亮度变化阈值 (0-255) |
|
||||
| `min_pixel_change` | u32 | 10 | 触发检测的最小像素变化数 |
|
||||
| `min_frames` | u32 | 3 | 最小连续帧数要求 |
|
||||
| `sensitivity` | f32 | 0.7 | 灵敏度系数 (0.0-1.0) |
|
||||
| `id` | String | 自动生成 | 探测器唯一标识 |
|
||||
|
||||
### 参数建议
|
||||
|
||||
1. **城市光害环境**:
|
||||
- `min_brightness_delta`: 40-50 (更高的阈值减少误报)
|
||||
- `min_pixel_change`: 15-20
|
||||
- `min_frames`: 4-5 (更多帧确认减少闪光和噪声误报)
|
||||
- `sensitivity`: 0.6-0.7
|
||||
|
||||
2. **郊外黑暗环境**:
|
||||
- `min_brightness_delta`: 20-30 (较低的阈值捕获较暗的流星)
|
||||
- `min_pixel_change`: 8-10
|
||||
- `min_frames`: 2-3 (较少帧确认捕获快速流星)
|
||||
- `sensitivity`: 0.8-0.9
|
||||
|
||||
### 适用场景
|
||||
|
||||
亮度探测器适合以下场景:
|
||||
- 资源受限的设备(如树莓派)
|
||||
- 需要实时检测的场景
|
||||
- 检测明亮、快速的流星
|
||||
- 当系统内存有限,无法进行256帧的堆叠时
|
||||
|
||||
## CAMS探测器 (CamsDetector)
|
||||
|
||||
CAMS探测器基于CAMS (Cameras for Allsky Meteor Surveillance) FTP格式,通过收集256帧视频并生成4种特征图像(maxpixel、avepixel、stdpixel、maxframe)来检测流星。这种方法可以检测出非常暗的流星,但需要更多的计算资源和内存。
|
||||
|
||||
### 配置参数
|
||||
|
||||
CAMS探测器有以下配置参数:
|
||||
|
||||
| 参数 | 类型 | 默认值 | 说明 |
|
||||
|-----------------------------|---------|--------|----------------------------------|
|
||||
| `brightness_threshold` | u8 | 30 | maxpixel图像亮度阈值 (0-255) |
|
||||
| `std_to_avg_ratio_threshold` | f32 | 1.5 | stdpixel与avepixel的比率阈值 |
|
||||
| `min_pixel_count` | u32 | 10 | 满足条件的最小像素数 |
|
||||
| `min_trajectory_length` | u32 | 5 | 最小轨迹长度 |
|
||||
| `save_all_feature_images` | bool | false | 是否保存所有特征图像(不仅是检测到的) |
|
||||
| `output_dir` | PathBuf | "output" | 输出目录 |
|
||||
| `file_prefix` | String | "meteor" | 文件前缀 |
|
||||
| `id` | String | 自动生成 | 探测器唯一标识 |
|
||||
|
||||
### 参数建议
|
||||
|
||||
1. **城市光害环境**:
|
||||
- `brightness_threshold`: 35-45
|
||||
- `std_to_avg_ratio_threshold`: 1.8-2.0 (更高的比率减少光害影响)
|
||||
- `min_pixel_count`: 12-15
|
||||
- `min_trajectory_length`: 6-8
|
||||
|
||||
2. **郊外黑暗环境**:
|
||||
- `brightness_threshold`: 20-30
|
||||
- `std_to_avg_ratio_threshold`: 1.3-1.5
|
||||
- `min_pixel_count`: 8-10
|
||||
- `min_trajectory_length`: 4-5
|
||||
|
||||
### 适用场景
|
||||
|
||||
CAMS探测器适合以下场景:
|
||||
- 资源充足的设备(如台式机或高性能单板机)
|
||||
- 科学研究级别的流星检测
|
||||
- 需要检测暗弱流星的场景
|
||||
- 对检测准确性要求较高的场景
|
||||
|
||||
## 在配置文件中设置探测器
|
||||
|
||||
可以在`config.toml`文件中配置探测管道及其使用的探测器:
|
||||
|
||||
```toml
|
||||
# 基本检测参数(用于兼容老版本)
|
||||
[detection]
|
||||
min_brightness_delta = 30.0
|
||||
min_pixel_change = 10
|
||||
min_frames = 3
|
||||
event_buffer_seconds = 10
|
||||
sensitivity = 0.7
|
||||
|
||||
# 探测管道配置
|
||||
[detection.pipeline]
|
||||
max_parallel_workers = 4
|
||||
# 聚合策略:any (任一探测器), all (所有探测器), majority (多数探测器)
|
||||
aggregation_strategy = "any"
|
||||
|
||||
# 亮度探测器配置
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "brightness"
|
||||
min_brightness_delta = 30.0
|
||||
min_pixel_change = 10
|
||||
min_frames = 3
|
||||
sensitivity = 0.7
|
||||
id = "brightness-main"
|
||||
|
||||
# CAMS探测器配置
|
||||
[[detection.pipeline.detectors]]
|
||||
type = "cams"
|
||||
brightness_threshold = 30
|
||||
std_to_avg_ratio_threshold = 1.5
|
||||
min_pixel_count = 10
|
||||
min_trajectory_length = 5
|
||||
save_all_feature_images = false
|
||||
output_dir = "output/cams"
|
||||
file_prefix = "meteor"
|
||||
id = "cams-main"
|
||||
```
|
||||
|
||||
## 在命令行中使用示例程序
|
||||
|
||||
示例程序`cams_detector_demo`提供了一个灵活的命令行界面,可以测试不同的探测器:
|
||||
|
||||
```bash
|
||||
# 使用亮度探测器
|
||||
cargo run --example cams_detector_demo -- \
|
||||
--input your_video.mp4 \
|
||||
--detector brightness \
|
||||
--min-brightness-delta 35.0 \
|
||||
--min-pixel-change 12 \
|
||||
--min-frames 4 \
|
||||
--sensitivity 0.8 \
|
||||
--display
|
||||
|
||||
# 使用CAMS探测器
|
||||
cargo run --example cams_detector_demo -- \
|
||||
--input your_video.mp4 \
|
||||
--detector cams \
|
||||
--brightness-threshold 30 \
|
||||
--std-ratio-threshold 1.5 \
|
||||
--min-pixel-count 10 \
|
||||
--min-trajectory-length 5 \
|
||||
--save-all \
|
||||
--display
|
||||
|
||||
# 同时使用两种探测器(并行处理)
|
||||
cargo run --example cams_detector_demo -- \
|
||||
--input your_video.mp4 \
|
||||
--detector both \
|
||||
--parallel \
|
||||
--aggregation majority \
|
||||
--display
|
||||
```
|
||||
|
||||
## 如何选择探测器
|
||||
|
||||
以下是选择探测器的建议:
|
||||
|
||||
1. **根据硬件资源**:
|
||||
- 对于资源受限的设备(如树莓派),优先使用亮度探测器
|
||||
- 对于高性能设备,可以使用CAMS探测器或同时使用两者
|
||||
|
||||
2. **根据观测环境**:
|
||||
- 在光污染严重的城市环境,亮度探测器可能产生较多误报,CAMS探测器更稳定
|
||||
- 在暗黑的郊外环境,两种探测器都能有效工作
|
||||
|
||||
3. **根据探测目标**:
|
||||
- 如果主要关注明亮的火流星,亮度探测器足够
|
||||
- 如果希望捕获微弱流星或进行科学统计,CAMS探测器更合适
|
||||
|
||||
4. **最佳做法**:
|
||||
- 如果资源允许,建议同时使用两种探测器并设置为"any"聚合策略提高捕获率
|
||||
- 如果需要减少误报,可以设置为"all"聚合策略
|
||||
- 使用"majority"策略在增加更多探测器时提供平衡
|
||||
|
||||
## 检查当前配置
|
||||
|
||||
可以查看系统日志了解当前使用的探测器配置:
|
||||
|
||||
```bash
|
||||
# 查看日志中的探测器信息
|
||||
grep "detector" meteor_detect.log
|
||||
|
||||
# 运行示例程序时使用更详细的日志级别
|
||||
RUST_LOG=debug cargo run --example cams_detector_demo -- --input video.mp4
|
||||
```
|
||||
|
||||
也可以编写一个简单的脚本来检查当前配置:
|
||||
|
||||
```rust
|
||||
// 打印当前探测器配置
|
||||
for detector in pipeline.get_detector_configs() {
|
||||
match detector {
|
||||
DetectorConfig::Brightness(params) => {
|
||||
println!("Brightness detector: {}", params.id);
|
||||
println!(" min_brightness_delta: {}", params.min_brightness_delta);
|
||||
println!(" min_pixel_change: {}", params.min_pixel_change);
|
||||
println!(" min_frames: {}", params.min_frames);
|
||||
println!(" sensitivity: {}", params.sensitivity);
|
||||
},
|
||||
DetectorConfig::Cams(params) => {
|
||||
println!("CAMS detector: {}", params.id);
|
||||
println!(" brightness_threshold: {}", params.brightness_threshold);
|
||||
println!(" std_to_avg_ratio_threshold: {}", params.std_to_avg_ratio_threshold);
|
||||
println!(" min_pixel_count: {}", params.min_pixel_count);
|
||||
println!(" min_trajectory_length: {}", params.min_trajectory_length);
|
||||
}
|
||||
}
|
||||
}
|
||||
130
docs/enhancements.md
Normal file
130
docs/enhancements.md
Normal file
@ -0,0 +1,130 @@
|
||||
# Meteor Detection System Enhancements
|
||||
|
||||
This document provides an overview of the recent enhancements made to the meteor detection system, focusing on advanced features for video processing, frame analysis, and data sharing.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Meteor Detection System Enhancements](#meteor-detection-system-enhancements)
|
||||
- [Table of Contents](#table-of-contents)
|
||||
- [CAMS FTP Format Implementation](#cams-ftp-format-implementation)
|
||||
- [Watermark Overlay System](#watermark-overlay-system)
|
||||
- [Frame Hook System](#frame-hook-system)
|
||||
- [RTSP Streaming](#rtsp-streaming)
|
||||
- [System Integration](#system-integration)
|
||||
- [Configuration Options](#configuration-options)
|
||||
- [Conclusion](#conclusion)
|
||||
|
||||
## CAMS FTP Format Implementation
|
||||
|
||||
The CAMS FTP format is a specialized technique for meteor detection that compresses 256 consecutive video frames into 4 feature images that preserve key information needed for analysis:
|
||||
|
||||
- **maxpixel**: Records maximum brightness value across frames
|
||||
- **avepixel**: Calculates average brightness (excluding maximum)
|
||||
- **stdpixel**: Computes standard deviation (excluding maximum)
|
||||
- **maxframe**: Records which frame contained the maximum value
|
||||
|
||||
Our implementation provides a complete, optimized solution for generating these feature images, allowing for efficient storage and analysis of large volumes of video data.
|
||||
|
||||
[Detailed Documentation: CAMS FTP Format](cams_ftp_format.md)
|
||||
|
||||
## Watermark Overlay System
|
||||
|
||||
The watermark overlay system adds critical contextual information directly onto video frames, including:
|
||||
|
||||
- Precise timestamps
|
||||
- GPS coordinates and camera orientation
|
||||
- Environmental data (temperature, humidity)
|
||||
- Custom text
|
||||
|
||||
This feature ensures that all scientific observations are properly annotated with metadata for validation and analysis. The system is highly configurable, allowing for different positioning, formatting, and content.
|
||||
|
||||
[Detailed Documentation: Watermark Overlay](watermark_overlay.md)
|
||||
|
||||
## Frame Hook System
|
||||
|
||||
The frame hook system provides a flexible framework for processing video frames at different stages in the pipeline. It allows for:
|
||||
|
||||
- Adding modular frame processing capabilities
|
||||
- Enabling/disabling processing features at runtime
|
||||
- Implementing custom processing hooks
|
||||
- Organizing hooks in a priority-based execution order
|
||||
|
||||
This extensible architecture enables easy addition of new video processing features without modifying core application code.
|
||||
|
||||
[Detailed Documentation: Hook System](hook_system.md)
|
||||
|
||||
## RTSP Streaming
|
||||
|
||||
The RTSP streaming system broadcasts live video feeds over a network using the industry-standard Real-Time Streaming Protocol. Features include:
|
||||
|
||||
- Multiple quality presets (Low, Medium, High, Custom)
|
||||
- Configurable network settings (port, mount point)
|
||||
- Optional authentication
|
||||
- Runtime configuration updates
|
||||
|
||||
This enables remote monitoring, collaborative observation, and integration with other systems that support standard video streaming protocols.
|
||||
|
||||
[Detailed Documentation: RTSP Streaming](rtsp_streaming.md)
|
||||
|
||||
## System Integration
|
||||
|
||||
All of these enhancements are fully integrated into the meteor detection system:
|
||||
|
||||
1. **Data Flow**:
|
||||
- Camera frames → Frame hooks (including watermark) → Detection pipeline → CAMS format processing → RTSP streaming
|
||||
|
||||
2. **Shared Resources**:
|
||||
- GPS data is used for both watermark overlay and meteor trajectory calculations
|
||||
- Environmental data is used for both watermarking and sensor calibration
|
||||
- Frame buffer is shared between detection and streaming systems
|
||||
|
||||
3. **Configuration**:
|
||||
- All new features are configurable through the central configuration system
|
||||
- Default settings provide sensible values for most deployments
|
||||
- Settings can be changed at runtime for many features
|
||||
|
||||
## Configuration Options
|
||||
|
||||
The enhancements are configured through the application's main configuration file:
|
||||
|
||||
```toml
|
||||
# Configuration for watermark overlay
|
||||
[watermark]
|
||||
enabled = true
|
||||
position = "BottomLeft" # "TopLeft", "TopRight", "BottomLeft", "BottomRight", or [x, y] custom position
|
||||
font_scale = 0.6
|
||||
thickness = 1
|
||||
color = [255, 255, 255, 255] # White (BGRA)
|
||||
background = true
|
||||
background_color = [0, 0, 0, 128] # Semi-transparent black (BGRA)
|
||||
padding = 8
|
||||
content = ["Timestamp", "GpsCoordinates", "Environment"] # Can also include "CameraOrientation" or "Custom"
|
||||
time_format = "%Y-%m-%d %H:%M:%S%.3f"
|
||||
coordinate_format = "decimal" # "decimal" or "dms"
|
||||
temperature_format = "C" # "C" or "F"
|
||||
|
||||
# Configuration for RTSP streaming
|
||||
[rtsp]
|
||||
enabled = true
|
||||
port = 8554
|
||||
mount_point = "/meteor"
|
||||
quality = "Medium" # "Low", "Medium", "High", or "Custom"
|
||||
custom_width = 1280 # Only used if quality = "Custom"
|
||||
custom_height = 720 # Only used if quality = "Custom"
|
||||
custom_bitrate = 1500 # In kbps, only used if quality = "Custom"
|
||||
custom_framerate = 30 # Only used if quality = "Custom"
|
||||
username = "admin" # Optional
|
||||
password = "password" # Optional
|
||||
|
||||
# Configuration for frame stacking
|
||||
[detection.stacker]
|
||||
frames_per_stack = 256
|
||||
save_stacked_frames = true
|
||||
output_directory = "data/stacked"
|
||||
write_fits = false
|
||||
max_pixel_value = 255
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
These enhancements significantly expand the capabilities of the meteor detection system, adding critical features for scientific observation, data preservation, and collaboration. The modular design ensures that these features can be further extended and customized as needed, while the comprehensive documentation provides clear guidance for both users and developers.
|
||||
267
docs/hook_system.md
Normal file
267
docs/hook_system.md
Normal file
@ -0,0 +1,267 @@
|
||||
# Frame Hook System
|
||||
|
||||
## Overview
|
||||
|
||||
The Frame Hook System provides a flexible and extensible mechanism for processing video frames at various stages in the meteor detection pipeline. It allows for modular frame processing capabilities such as overlay addition, filtering, enhancement, and analysis without modifying the core detection pipeline.
|
||||
|
||||
## Purpose
|
||||
|
||||
This hook-based architecture offers several key benefits:
|
||||
|
||||
1. **Modularity**: New frame processing capabilities can be added without modifying existing code
|
||||
2. **Configurability**: Individual hooks can be enabled or disabled as needed
|
||||
3. **Reusability**: Common frame processing tasks can be encapsulated and reused
|
||||
4. **Prioritization**: Hooks can be ordered to ensure correct processing sequence
|
||||
5. **Extensibility**: Third-party hooks can be easily integrated
|
||||
|
||||
## Core Components
|
||||
|
||||
### FrameHook Trait
|
||||
|
||||
The foundation of the hook system is the `FrameHook` trait, defined in `src/hooks/mod.rs`:
|
||||
|
||||
```rust
|
||||
pub trait FrameHook: Send + Sync {
|
||||
fn process_frame(&mut self, frame: &mut core::Mat, timestamp: DateTime<Utc>) -> Result<()>;
|
||||
fn get_id(&self) -> &str;
|
||||
fn get_name(&self) -> &str;
|
||||
fn get_description(&self) -> &str;
|
||||
fn is_enabled(&self) -> bool;
|
||||
fn set_enabled(&mut self, enabled: bool);
|
||||
}
|
||||
```
|
||||
|
||||
This trait defines the essential capabilities that all hooks must implement:
|
||||
|
||||
- **Processing Function**: The `process_frame` method receives a frame and applies transformations
|
||||
- **Identification**: Methods to get the hook's ID, name, and description
|
||||
- **State Management**: Methods to check and change the hook's enabled state
|
||||
|
||||
### HookManager
|
||||
|
||||
The `HookManager` class manages a collection of hooks and is responsible for:
|
||||
|
||||
- Registering new hooks
|
||||
- Removing hooks
|
||||
- Executing hooks in sequence on frames
|
||||
- Providing access to hooks for configuration
|
||||
|
||||
```rust
|
||||
pub struct HookManager {
|
||||
hooks: Vec<SharedFrameHook>,
|
||||
}
|
||||
```
|
||||
|
||||
The manager maintains a vector of thread-safe hook references (`SharedFrameHook`) to allow concurrent access.
|
||||
|
||||
### BasicFrameHook
|
||||
|
||||
For simple use cases, the `BasicFrameHook` implementation provides a convenient way to create hooks with closures:
|
||||
|
||||
```rust
|
||||
pub struct BasicFrameHook {
|
||||
id: String,
|
||||
name: String,
|
||||
description: String,
|
||||
enabled: bool,
|
||||
processor: Box<dyn Fn(&mut core::Mat, DateTime<Utc>) -> Result<()> + Send + Sync>,
|
||||
}
|
||||
```
|
||||
|
||||
This allows for creating hooks without implementing the full trait:
|
||||
|
||||
```rust
|
||||
let hook = BasicFrameHook::new(
|
||||
"brightness",
|
||||
"Brightness Adjustment",
|
||||
"Adjusts frame brightness by a configurable amount",
|
||||
true,
|
||||
|frame, _| {
|
||||
// Increase brightness by 30
|
||||
frame.convert_to(frame, -1, 1.0, 30.0)?;
|
||||
Ok(())
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Watermark Overlay Hook
|
||||
|
||||
The watermark overlay system is integrated using the hook system:
|
||||
|
||||
```rust
|
||||
// Create a watermark hook
|
||||
let watermark_hook = hooks::BasicFrameHook::new(
|
||||
"watermark",
|
||||
"Watermark Overlay",
|
||||
"Adds timestamp, GPS, and sensor data overlay to frames",
|
||||
config.watermark.enabled,
|
||||
move |frame, timestamp| {
|
||||
let mut watermark_instance = watermark.lock().unwrap();
|
||||
watermark_instance.apply(frame, timestamp)?;
|
||||
Ok(())
|
||||
}
|
||||
);
|
||||
|
||||
// Register with the hook manager
|
||||
hook_manager.lock().await.register_hook(Box::new(watermark_hook));
|
||||
```
|
||||
|
||||
### Image Enhancement Hook
|
||||
|
||||
A hook for enhancing low-light frames could be implemented as:
|
||||
|
||||
```rust
|
||||
let enhancement_hook = hooks::BasicFrameHook::new(
|
||||
"enhance",
|
||||
"Low-light Enhancement",
|
||||
"Enhances visibility in low-light conditions",
|
||||
true,
|
||||
|frame, _| {
|
||||
// Convert to YUV color space
|
||||
let mut yuv = core::Mat::default();
|
||||
imgproc::cvt_color(frame, &mut yuv, imgproc::COLOR_BGR2YUV, 0)?;
|
||||
|
||||
// Split channels
|
||||
let mut channels = types::VectorOfMat::new();
|
||||
core::split(&yuv, &mut channels)?;
|
||||
|
||||
// Apply CLAHE to Y channel
|
||||
let clahe = imgproc::create_clahe(2.0, core::Size::new(8, 8))?;
|
||||
clahe.apply(&channels.get(0)?, &mut channels.get_mut(0)?)?;
|
||||
|
||||
// Merge channels back
|
||||
core::merge(&channels, &mut yuv)?;
|
||||
|
||||
// Convert back to BGR
|
||||
imgproc::cvt_color(&yuv, frame, imgproc::COLOR_YUV2BGR, 0)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
);
|
||||
|
||||
hook_manager.lock().await.register_hook(Box::new(enhancement_hook));
|
||||
```
|
||||
|
||||
### Debug Information Hook
|
||||
|
||||
A hook for adding debug information to frames during development:
|
||||
|
||||
```rust
|
||||
let debug_hook = hooks::BasicFrameHook::new(
|
||||
"debug",
|
||||
"Debug Overlay",
|
||||
"Adds debug information for development",
|
||||
cfg!(debug_assertions),
|
||||
|frame, timestamp| {
|
||||
// Add frame rate information
|
||||
static mut LAST_FRAME: Option<DateTime<Utc>> = None;
|
||||
static mut FRAME_COUNTER: u32 = 0;
|
||||
static mut FRAME_RATE: f64 = 0.0;
|
||||
|
||||
unsafe {
|
||||
FRAME_COUNTER += 1;
|
||||
|
||||
if let Some(last) = LAST_FRAME {
|
||||
let duration = timestamp - last;
|
||||
if duration.num_milliseconds() > 1000 {
|
||||
FRAME_RATE = FRAME_COUNTER as f64 / duration.num_seconds() as f64;
|
||||
FRAME_COUNTER = 0;
|
||||
LAST_FRAME = Some(timestamp);
|
||||
}
|
||||
} else {
|
||||
LAST_FRAME = Some(timestamp);
|
||||
}
|
||||
|
||||
imgproc::put_text(
|
||||
frame,
|
||||
&format!("FPS: {:.1}", FRAME_RATE),
|
||||
core::Point::new(10, 30),
|
||||
imgproc::FONT_HERSHEY_SIMPLEX,
|
||||
1.0,
|
||||
core::Scalar::new(0.0, 255.0, 0.0, 255.0),
|
||||
2,
|
||||
imgproc::LINE_AA,
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Hook Prioritization
|
||||
|
||||
Although not explicitly implemented in the current version, the hook system design allows for future extension to support ordered hook execution:
|
||||
|
||||
```rust
|
||||
// Future enhancement - add priority to hooks
|
||||
hook_manager.register_hook_with_priority(Box::new(preprocessing_hook), 10);
|
||||
hook_manager.register_hook_with_priority(Box::new(enhancement_hook), 20);
|
||||
hook_manager.register_hook_with_priority(Box::new(watermark_hook), 30);
|
||||
```
|
||||
|
||||
### Conditional Hooks
|
||||
|
||||
Hooks can be created that only activate under certain conditions:
|
||||
|
||||
```rust
|
||||
let night_mode_hook = hooks::BasicFrameHook::new(
|
||||
"night_mode",
|
||||
"Night Mode Enhancement",
|
||||
"Enhances frames during night time",
|
||||
true,
|
||||
move |frame, timestamp| {
|
||||
// Only apply during night hours (8 PM to 6 AM)
|
||||
let hour = timestamp.hour();
|
||||
if hour >= 20 || hour < 6 {
|
||||
// Apply night-time enhancement
|
||||
// ...
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Dynamic Hook Configuration
|
||||
|
||||
The hook system allows for runtime configuration changes:
|
||||
|
||||
```rust
|
||||
// Find a hook by ID and update its configuration
|
||||
if let Some(hook) = hook_manager.lock().await.get_hook("watermark") {
|
||||
let mut hook = hook.lock().unwrap();
|
||||
hook.set_enabled(new_config.watermark.enabled);
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The hook system is designed with performance in mind:
|
||||
|
||||
1. **Minimal Overhead**: Hooks that are disabled have almost zero impact on performance
|
||||
2. **Thread Safety**: The use of Arc<Mutex<>> enables safe concurrent access
|
||||
3. **Lazy Evaluation**: Hooks are only processed when frames are being processed
|
||||
4. **Efficient Registration**: Hook registration is a one-time cost at startup
|
||||
|
||||
For resource-constrained environments, consider:
|
||||
- Limiting the number of active hooks
|
||||
- Optimizing individual hook processing functions
|
||||
- Using condition checks to skip processing when not needed
|
||||
|
||||
## Extending the System
|
||||
|
||||
The hook system can be extended in several ways:
|
||||
|
||||
1. **Adding New Hooks**: Implement the `FrameHook` trait for new functionality
|
||||
2. **Hook Factory**: Create a factory pattern for generating hooks from configuration
|
||||
3. **Hook Pipelines**: Group hooks into pipelines for different processing scenarios
|
||||
4. **Hook Events**: Add event callbacks for hook lifecycle events
|
||||
|
||||
## Conclusion
|
||||
|
||||
The Frame Hook System provides a powerful and flexible architecture for extending the frame processing capabilities of the meteor detection system. By breaking down frame processing into modular hooks, it enables a clean separation of concerns and allows for easy addition of new features without modifying existing code.
|
||||
197
docs/rtsp_streaming.md
Normal file
197
docs/rtsp_streaming.md
Normal file
@ -0,0 +1,197 @@
|
||||
# RTSP Streaming System
|
||||
|
||||
## Overview
|
||||
|
||||
The RTSP (Real-Time Streaming Protocol) Streaming System enables the meteor detection system to broadcast live video feeds over a network. This allows for remote monitoring, collaborative observation, and integration with other systems that support standard video streaming protocols.
|
||||
|
||||
## Key Features
|
||||
|
||||
1. **Standardized Protocol**: Uses RTSP, a widely supported streaming protocol compatible with many client applications including VLC, FFmpeg, and GStreamer
|
||||
|
||||
2. **Configurable Quality**: Supports multiple quality presets to balance between bandwidth usage and video detail:
|
||||
- Low (480p, low bitrate)
|
||||
- Medium (720p, medium bitrate)
|
||||
- High (original resolution, high bitrate)
|
||||
- Custom (user-defined resolution, bitrate, and framerate)
|
||||
|
||||
3. **Network Control**: Provides configuration for server port and mount point for flexible deployment scenarios
|
||||
|
||||
4. **Optional Authentication**: Supports username/password authentication for secure access control
|
||||
|
||||
5. **Adaptive Behavior**: Can respond to configuration changes at runtime without application restart
|
||||
|
||||
## Implementation Details
|
||||
|
||||
The RTSP streaming system is implemented in `src/streaming/rtsp.rs` and consists of:
|
||||
|
||||
### RtspConfig
|
||||
|
||||
Configuration struct that controls streaming behavior:
|
||||
|
||||
```rust
|
||||
pub struct RtspConfig {
|
||||
pub enabled: bool,
|
||||
pub port: u16,
|
||||
pub mount_point: String,
|
||||
pub quality: StreamQuality,
|
||||
pub custom_width: Option<u32>,
|
||||
pub custom_height: Option<u32>,
|
||||
pub custom_bitrate: Option<u32>,
|
||||
pub custom_framerate: Option<u32>,
|
||||
pub username: Option<String>,
|
||||
pub password: Option<String>,
|
||||
}
|
||||
```
|
||||
|
||||
### StreamQuality
|
||||
|
||||
Enum specifying predefined quality levels:
|
||||
|
||||
```rust
|
||||
pub enum StreamQuality {
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Custom,
|
||||
}
|
||||
```
|
||||
|
||||
### RtspServer
|
||||
|
||||
The core server class that manages the stream:
|
||||
|
||||
```rust
|
||||
pub struct RtspServer {
|
||||
config: RtspConfig,
|
||||
frame_rx: Option<mpsc::Receiver<Frame>>,
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
gst_process: Option<Child>,
|
||||
frame_tx: Option<mpsc::Sender<Frame>>,
|
||||
}
|
||||
```
|
||||
|
||||
The server class provides methods for:
|
||||
- Starting and stopping the stream
|
||||
- Feeding frames to the stream
|
||||
- Updating configuration at runtime
|
||||
- Checking stream status
|
||||
- Getting the stream URL
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
The RTSP system uses GStreamer as the underlying streaming engine:
|
||||
|
||||
1. **Pipeline Architecture**:
|
||||
- Input: OpenCV frames from the camera module
|
||||
- Processing: Conversion to H.264 video
|
||||
- Output: RTSP streams via rtsp2sink
|
||||
|
||||
2. **Data Flow**:
|
||||
- Frames are received through a `tokio::sync::mpsc` channel
|
||||
- They are processed and fed to the GStreamer pipeline
|
||||
- GStreamer handles encoding and network transmission
|
||||
|
||||
3. **External Process**:
|
||||
- The GStreamer pipeline runs as a separate process
|
||||
- Communication happens through standard input/output
|
||||
- This improves stability by isolating potential crashes
|
||||
|
||||
## Integration with the Application
|
||||
|
||||
The RTSP server integrates with the meteor detection application:
|
||||
|
||||
1. **Initialization**: In `main.rs`, an RTSP server instance is created during application startup
|
||||
2. **Configuration**: Settings are loaded from the application configuration file
|
||||
3. **Frame Feeding**: A dedicated task continuously feeds frames from the frame buffer to the RTSP server
|
||||
4. **Lifecycle Management**: The server is started/stopped based on configuration and application state
|
||||
|
||||
## Configuration Example
|
||||
|
||||
The RTSP server can be configured in the application configuration file:
|
||||
|
||||
```toml
|
||||
[rtsp]
|
||||
enabled = true
|
||||
port = 8554
|
||||
mount_point = "/meteor"
|
||||
quality = "Medium" # "Low", "Medium", "High", or "Custom"
|
||||
custom_width = 1280 # Only used if quality = "Custom"
|
||||
custom_height = 720 # Only used if quality = "Custom"
|
||||
custom_bitrate = 2000 # In kbps, only used if quality = "Custom"
|
||||
custom_framerate = 30 # Only used if quality = "Custom"
|
||||
username = "admin" # Optional
|
||||
password = "password" # Optional
|
||||
```
|
||||
|
||||
## Quality Presets
|
||||
|
||||
The system provides the following quality presets:
|
||||
|
||||
| Quality | Resolution | Bitrate | Framerate |
|
||||
|---------|------------|---------|-----------|
|
||||
| Low | 640x480 | 500 kbps | 15 fps |
|
||||
| Medium | 1280x720 | 1500 kbps | 30 fps |
|
||||
| High | 1920x1080 | 3000 kbps | 30 fps |
|
||||
| Custom | User-defined | User-defined | User-defined |
|
||||
|
||||
## Client Access
|
||||
|
||||
Clients can access the stream using any RTSP-compatible player:
|
||||
|
||||
```
|
||||
# Using VLC
|
||||
vlc rtsp://[username:password@]hostname:port/meteor
|
||||
|
||||
# Using FFmpeg
|
||||
ffplay rtsp://[username:password@]hostname:port/meteor
|
||||
|
||||
# Using GStreamer
|
||||
gst-launch-1.0 playbin uri=rtsp://[username:password@]hostname:port/meteor
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The streaming system is designed with performance in mind:
|
||||
|
||||
1. **Resource Management**: Frames are only encoded when the RTSP server is active and has clients
|
||||
2. **Buffer Control**: A limited-size frame buffer prevents memory issues with slow clients
|
||||
3. **Adaptive Encoding**: Different quality presets allow adaptation to available bandwidth and CPU
|
||||
4. **Separate Process**: Using a separate GStreamer process isolates encoding load from the main application
|
||||
|
||||
For resource-constrained environments (like Raspberry Pi), consider:
|
||||
- Using the Low quality preset
|
||||
- Reducing the framerate
|
||||
- Disabling streaming when not needed
|
||||
|
||||
## Security Considerations
|
||||
|
||||
When deploying the RTSP server:
|
||||
|
||||
1. **Authentication**: Enable username/password authentication when exposing to untrusted networks
|
||||
2. **Firewall Rules**: Restrict access to the RTSP port (default: 8554) to trusted IP addresses
|
||||
3. **TLS Tunneling**: For additional security, consider tunneling RTSP over TLS using a reverse proxy
|
||||
4. **Privacy**: Be aware that streaming may include sensitive metadata in watermarks (GPS coordinates, etc.)
|
||||
|
||||
## Extending the System
|
||||
|
||||
The streaming system can be extended in several ways:
|
||||
|
||||
1. **Multiple Streams**: Support for multiple streams with different qualities
|
||||
2. **Recording**: Adding direct-to-disk recording capability
|
||||
3. **Stream Health Monitoring**: Adding diagnostics for stream performance
|
||||
4. **WebRTC Support**: Adding WebRTC for browser-based viewing
|
||||
5. **Adaptive Bitrate**: Implementing dynamic quality adjustment based on network conditions
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Common issues and solutions:
|
||||
|
||||
1. **Stream Not Starting**: Check that GStreamer and its RTSP plugins are installed on the system
|
||||
2. **No Video**: Verify that the camera is producing frames and that encoding parameters are compatible
|
||||
3. **High Latency**: Adjust the buffer size or reduce quality settings
|
||||
4. **Connection Refused**: Ensure the port is not blocked by a firewall
|
||||
5. **Poor Quality**: Try increasing the bitrate or resolution in the configuration
|
||||
|
||||
## Conclusion
|
||||
|
||||
The RTSP Streaming System provides a robust and configurable way to share live video from the meteor detection system. Its integration with industry-standard protocols ensures compatibility with a wide range of client applications and systems, making it suitable for both amateur and professional astronomical observation networks.
|
||||
149
docs/watermark_overlay.md
Normal file
149
docs/watermark_overlay.md
Normal file
@ -0,0 +1,149 @@
|
||||
# Watermark Overlay System
|
||||
|
||||
## Overview
|
||||
|
||||
The Watermark Overlay System is a feature that enables the addition of contextual information as visual overlays on video frames. This is particularly useful for scientific observations, providing critical metadata directly on the visual recording such as timestamps, geographical coordinates, and environmental conditions.
|
||||
|
||||
## Key Features
|
||||
|
||||
1. **Multiple Data Sources**: Integrates information from:
|
||||
- System timestamp
|
||||
- GPS coordinates and camera orientation
|
||||
- Environmental sensors (temperature, humidity)
|
||||
- Custom text
|
||||
|
||||
2. **Flexible Positioning**: Supports placement in:
|
||||
- Top-left corner
|
||||
- Top-right corner
|
||||
- Bottom-left corner
|
||||
- Bottom-right corner
|
||||
- Custom pixel coordinates
|
||||
|
||||
3. **Customizable Appearance**:
|
||||
- Configurable font scale and thickness
|
||||
- Custom text and background colors
|
||||
- Optional background with transparency
|
||||
- Adjustable padding
|
||||
|
||||
4. **Adaptive Formatting**:
|
||||
- Configurable timestamp format
|
||||
- Multiple coordinate formats (decimal degrees or DMS)
|
||||
- Temperature units (Celsius or Fahrenheit)
|
||||
|
||||
## Implementation Details
|
||||
|
||||
The watermark system is implemented in `src/overlay/watermark.rs` and consists of:
|
||||
|
||||
### WatermarkOptions
|
||||
|
||||
Configuration class that controls all aspects of the watermark:
|
||||
|
||||
```rust
|
||||
pub struct WatermarkOptions {
|
||||
pub enabled: bool,
|
||||
pub position: WatermarkPosition,
|
||||
pub font_scale: f64,
|
||||
pub thickness: i32,
|
||||
pub color: (u8, u8, u8, u8),
|
||||
pub background: bool,
|
||||
pub background_color: (u8, u8, u8, u8),
|
||||
pub padding: i32,
|
||||
pub content: Vec<WatermarkContent>,
|
||||
pub time_format: String,
|
||||
pub coordinate_format: String,
|
||||
pub temperature_format: String,
|
||||
}
|
||||
```
|
||||
|
||||
### WatermarkPosition
|
||||
|
||||
Enum defining where the watermark appears on the frame:
|
||||
|
||||
```rust
|
||||
pub enum WatermarkPosition {
|
||||
TopLeft,
|
||||
TopRight,
|
||||
BottomLeft,
|
||||
BottomRight,
|
||||
Custom(u32, u32),
|
||||
}
|
||||
```
|
||||
|
||||
### WatermarkContent
|
||||
|
||||
Enum specifying what information to include:
|
||||
|
||||
```rust
|
||||
pub enum WatermarkContent {
|
||||
Timestamp,
|
||||
GpsCoordinates,
|
||||
Environment,
|
||||
CameraOrientation,
|
||||
Custom(String),
|
||||
}
|
||||
```
|
||||
|
||||
### Watermark Class
|
||||
|
||||
The `Watermark` class handles the actual rendering of information onto frames, with methods for:
|
||||
|
||||
- Creating watermarks with custom options
|
||||
- Applying the watermark to video frames
|
||||
- Formatting GPS coordinates, timestamps, and temperature
|
||||
|
||||
## Integration with Frame Hooks
|
||||
|
||||
The watermark system is integrated into the application through the frame hook system:
|
||||
|
||||
1. A `Watermark` instance is created during application startup
|
||||
2. It's wrapped in a `BasicFrameHook` and registered with the `HookManager`
|
||||
3. When frames are processed by the pipeline, the hook manager calls the watermark's `apply` method
|
||||
|
||||
This implementation allows for:
|
||||
- Clean separation of concerns
|
||||
- Dynamically enabling/disabling the watermark
|
||||
- Concurrent frame processing with other hooks
|
||||
|
||||
## Example Configuration
|
||||
|
||||
The watermark can be configured in the application's configuration file:
|
||||
|
||||
```toml
|
||||
[watermark]
|
||||
enabled = true
|
||||
position = "BottomLeft"
|
||||
font_scale = 0.6
|
||||
thickness = 1
|
||||
color = [255, 255, 255, 255] # White
|
||||
background = true
|
||||
background_color = [0, 0, 0, 128] # Semi-transparent black
|
||||
padding = 8
|
||||
content = ["Timestamp", "GpsCoordinates", "Environment"]
|
||||
time_format = "%Y-%m-%d %H:%M:%S%.3f"
|
||||
coordinate_format = "decimal"
|
||||
temperature_format = "C"
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The watermark system uses OpenCV for text rendering and is designed to be efficient. Some considerations:
|
||||
|
||||
1. When disabled, the watermark has minimal overhead as it returns early
|
||||
2. Text measurement and position calculations are done once per frame
|
||||
3. Rendering is optimized for minimal impact on frame processing pipeline
|
||||
4. The background rectangle is drawn as a single operation for efficiency
|
||||
|
||||
For extremely resource-constrained systems, consider:
|
||||
- Using a smaller font scale
|
||||
- Reducing the number of content items
|
||||
- Enabling the watermark only when necessary
|
||||
|
||||
## Use Cases
|
||||
|
||||
1. **Scientific Validation**: Adding timestamps and coordinates for scientific validity of observations
|
||||
2. **Calibration**: Including environmental data for calibration of optical measurements
|
||||
3. **Field Work**: Recording location information automatically during mobile observations
|
||||
4. **Data Processing**: Simplifying post-processing by having metadata directly in the frames
|
||||
5. **Public Outreach**: Including attribution or explanatory information on videos shared publicly
|
||||
|
||||
The watermark overlay system provides a robust way to ensure all critical metadata is permanently associated with visual observations, enhancing the scientific and practical value of the recorded video.
|
||||
405
examples/cams_detector_demo.rs
Normal file
405
examples/cams_detector_demo.rs
Normal file
@ -0,0 +1,405 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::Utc;
|
||||
use clap::Parser;
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, highgui, imgcodecs, imgproc, prelude::*, videoio};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use meteor_detect::camera::frame_buffer::Frame;
|
||||
use meteor_detect::detection::{
|
||||
AggregationStrategy, BrightnessDetector, BrightnessDetectorParams,
|
||||
CamsDetector, CamsDetectorParams, DetectionResult, DetectorConfig,
|
||||
DetectorFactory, MeteorDetector, PipelineConfig
|
||||
};
|
||||
|
||||
/// Command line arguments for detector demo
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(author, version, about = "Meteor detection demo")]
|
||||
struct Args {
|
||||
/// Input video file path or camera device number
|
||||
#[clap(short, long)]
|
||||
input: String,
|
||||
|
||||
/// Output directory for feature images
|
||||
#[clap(short, long, default_value = "output")]
|
||||
output_dir: PathBuf,
|
||||
|
||||
/// Brightness threshold for meteor detection
|
||||
#[clap(long, default_value = "30")]
|
||||
brightness_threshold: u8,
|
||||
|
||||
/// Std-to-avg ratio threshold for meteor detection
|
||||
#[clap(long, default_value = "1.5")]
|
||||
std_ratio_threshold: f32,
|
||||
|
||||
/// Minimum pixel count to trigger detection
|
||||
#[clap(long, default_value = "10")]
|
||||
min_pixel_count: u32,
|
||||
|
||||
/// Minimum trajectory length
|
||||
#[clap(long, default_value = "5")]
|
||||
min_trajectory_length: u32,
|
||||
|
||||
/// Save all feature images, not just detections
|
||||
#[clap(long)]
|
||||
save_all: bool,
|
||||
|
||||
/// Display frames in real-time
|
||||
#[clap(short, long)]
|
||||
display: bool,
|
||||
|
||||
/// Batch size (number of frames to process)
|
||||
#[clap(short, long, default_value = "256")]
|
||||
batch_size: usize,
|
||||
|
||||
/// Detector type (cams, brightness, both)
|
||||
#[clap(short, long, default_value = "cams")]
|
||||
detector: String,
|
||||
|
||||
/// Minimum brightness delta for brightness detector
|
||||
#[clap(long, default_value = "30.0")]
|
||||
min_brightness_delta: f32,
|
||||
|
||||
/// Minimum pixel change for brightness detector
|
||||
#[clap(long, default_value = "10")]
|
||||
min_pixel_change: u32,
|
||||
|
||||
/// Minimum consecutive frames for brightness detector
|
||||
#[clap(long, default_value = "3")]
|
||||
min_frames: u32,
|
||||
|
||||
/// Sensitivity for brightness detector
|
||||
#[clap(long, default_value = "0.7")]
|
||||
sensitivity: f32,
|
||||
|
||||
/// Run detection in parallel
|
||||
#[clap(long)]
|
||||
parallel: bool,
|
||||
|
||||
/// Result aggregation strategy (any, all, majority)
|
||||
#[clap(long, default_value = "any")]
|
||||
aggregation: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Set up logging
|
||||
env_logger::init();
|
||||
|
||||
// Parse command line arguments
|
||||
let args = Args::parse();
|
||||
|
||||
// Create output directory if it doesn't exist
|
||||
std::fs::create_dir_all(&args.output_dir)?;
|
||||
|
||||
// Set up detector parameters
|
||||
let cams_params = CamsDetectorParams {
|
||||
brightness_threshold: args.brightness_threshold,
|
||||
std_to_avg_ratio_threshold: args.std_ratio_threshold,
|
||||
min_pixel_count: args.min_pixel_count,
|
||||
min_trajectory_length: args.min_trajectory_length,
|
||||
save_all_feature_images: args.save_all,
|
||||
output_dir: args.output_dir.clone(),
|
||||
file_prefix: "meteor".to_string(),
|
||||
id: "cams-demo".to_string(),
|
||||
};
|
||||
|
||||
let brightness_params = BrightnessDetectorParams {
|
||||
min_brightness_delta: args.min_brightness_delta,
|
||||
min_pixel_change: args.min_pixel_change,
|
||||
min_frames: args.min_frames,
|
||||
sensitivity: args.sensitivity,
|
||||
id: "brightness-demo".to_string(),
|
||||
};
|
||||
|
||||
// Create detectors based on command line argument
|
||||
let mut detectors: Vec<Box<dyn MeteorDetector>> = Vec::new();
|
||||
|
||||
match args.detector.as_str() {
|
||||
"cams" => {
|
||||
detectors.push(Box::new(CamsDetector::with_params(cams_params)));
|
||||
info!("Using CAMS detector");
|
||||
},
|
||||
"brightness" => {
|
||||
detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
|
||||
info!("Using brightness detector");
|
||||
},
|
||||
"both" => {
|
||||
detectors.push(Box::new(CamsDetector::with_params(cams_params)));
|
||||
detectors.push(Box::new(BrightnessDetector::with_params(brightness_params)));
|
||||
info!("Using both CAMS and brightness detectors");
|
||||
},
|
||||
_ => {
|
||||
return Err(anyhow::anyhow!("Unknown detector type: {}", args.detector));
|
||||
}
|
||||
}
|
||||
|
||||
// Set up detection pipeline config if running in parallel
|
||||
let aggregation_strategy = match args.aggregation.as_str() {
|
||||
"any" => AggregationStrategy::Any,
|
||||
"all" => AggregationStrategy::All,
|
||||
"majority" => AggregationStrategy::Majority,
|
||||
_ => AggregationStrategy::Any,
|
||||
};
|
||||
|
||||
// Open video source
|
||||
let mut cap = if args.input.chars().all(char::is_numeric) {
|
||||
// Input is a camera device number
|
||||
let device_id = args.input.parse::<i32>().unwrap_or(0);
|
||||
videoio::VideoCapture::new(device_id, videoio::CAP_ANY)?
|
||||
} else {
|
||||
// Input is a video file
|
||||
videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?
|
||||
};
|
||||
|
||||
// Check if video source is opened
|
||||
if !cap.is_opened()? {
|
||||
return Err(anyhow::anyhow!("Failed to open video source: {}", args.input));
|
||||
}
|
||||
|
||||
// Get video properties
|
||||
let fps = cap.get(videoio::CAP_PROP_FPS)?;
|
||||
let frame_width = cap.get(videoio::CAP_PROP_FRAME_WIDTH)? as i32;
|
||||
let frame_height = cap.get(videoio::CAP_PROP_FRAME_HEIGHT)? as i32;
|
||||
let frame_count = cap.get(videoio::CAP_PROP_FRAME_COUNT)? as i64;
|
||||
|
||||
info!(
|
||||
"Video source: {}x{}, {:.2} fps, {} frames",
|
||||
frame_width, frame_height, fps, frame_count
|
||||
);
|
||||
|
||||
// Create window if display is enabled
|
||||
if args.display {
|
||||
highgui::named_window("Video", highgui::WINDOW_NORMAL)?;
|
||||
highgui::resize_window("Video", 800, 600)?;
|
||||
}
|
||||
|
||||
// Initialize frame counter and runtime
|
||||
let mut frame_idx: u64 = 0;
|
||||
let mut runtime = if args.parallel {
|
||||
Some(Runtime::new()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Start time
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Process frames
|
||||
let mut frame = core::Mat::default();
|
||||
let mut detections = 0;
|
||||
|
||||
// Convert detectors to thread-safe Arc<Mutex<Box<dyn MeteorDetector>>> for parallel mode
|
||||
let shared_detectors: Vec<Arc<Mutex<Box<dyn MeteorDetector>>>> = detectors
|
||||
.into_iter()
|
||||
.map(|d| Arc::new(Mutex::new(d)))
|
||||
.collect();
|
||||
|
||||
while cap.read(&mut frame)? {
|
||||
// Skip empty frames
|
||||
if frame.empty() {
|
||||
warn!("Empty frame received, skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Process frame with detector(s)
|
||||
let results = if args.parallel && runtime.is_some() {
|
||||
// Process in parallel using tokio runtime
|
||||
let runtime = runtime.as_ref().unwrap();
|
||||
runtime.block_on(async {
|
||||
// Create a future for each detector
|
||||
let mut handles = Vec::with_capacity(shared_detectors.len());
|
||||
|
||||
for detector in &shared_detectors {
|
||||
let frame = frame.clone();
|
||||
let detector = detector.clone();
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut detector = detector.lock().unwrap();
|
||||
match detector.process_frame(&frame, frame_idx) {
|
||||
Ok(result) => Some(result),
|
||||
Err(e) => {
|
||||
error!("Error processing frame with detector {}: {}",
|
||||
detector.get_id(), e);
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
handles.push(handle);
|
||||
}
|
||||
|
||||
// Wait for all detectors to complete
|
||||
let results = futures::future::join_all(handles).await;
|
||||
|
||||
// Collect results
|
||||
results.into_iter()
|
||||
.filter_map(|r| r.ok().flatten())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
} else {
|
||||
// Process sequentially
|
||||
let mut results = Vec::new();
|
||||
|
||||
for detector in &shared_detectors {
|
||||
let mut detector = detector.lock().unwrap();
|
||||
match detector.process_frame(&frame, frame_idx) {
|
||||
Ok(result) => {
|
||||
results.push(result);
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error processing frame with detector {}: {}",
|
||||
detector.get_id(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results
|
||||
};
|
||||
|
||||
// Aggregate results based on strategy
|
||||
let detected = match aggregation_strategy {
|
||||
AggregationStrategy::Any => results.iter().any(|r| r.detected),
|
||||
AggregationStrategy::All => results.iter().all(|r| r.detected) && !results.is_empty(),
|
||||
AggregationStrategy::Majority => {
|
||||
let count = results.iter().filter(|r| r.detected).count();
|
||||
count > results.len() / 2 && !results.is_empty()
|
||||
},
|
||||
_ => results.iter().any(|r| r.detected),
|
||||
};
|
||||
|
||||
// Find result with highest confidence
|
||||
let result = if detected {
|
||||
results.iter()
|
||||
.filter(|r| r.detected)
|
||||
.max_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap())
|
||||
.cloned()
|
||||
} else {
|
||||
results.first().cloned()
|
||||
};
|
||||
|
||||
if let Some(result) = result {
|
||||
if result.detected {
|
||||
detections += 1;
|
||||
info!(
|
||||
"Meteor detected at frame {}: confidence={:.2}, pixels={}, detector={}",
|
||||
frame_idx, result.confidence, result.pixel_change,
|
||||
result.detector_id.as_deref().unwrap_or("unknown")
|
||||
);
|
||||
|
||||
// Draw bounding box if we have one
|
||||
if let Some(bbox) = result.bounding_box {
|
||||
let rect = core::Rect::new(
|
||||
bbox.0 as i32,
|
||||
bbox.1 as i32,
|
||||
bbox.2 as i32,
|
||||
bbox.3 as i32,
|
||||
);
|
||||
let mut display_frame = frame.clone();
|
||||
imgproc::rectangle(
|
||||
&mut display_frame,
|
||||
rect,
|
||||
core::Scalar::new(0.0, 255.0, 0.0, 0.0),
|
||||
2,
|
||||
imgproc::LINE_8,
|
||||
0,
|
||||
)?;
|
||||
|
||||
// Save detection frame
|
||||
let detection_path = args
|
||||
.output_dir
|
||||
.join(format!("detection_{}_frame.jpg", frame_idx));
|
||||
imgcodecs::imwrite(
|
||||
detection_path.to_str().unwrap(),
|
||||
&display_frame,
|
||||
&core::Vector::new(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Display progress
|
||||
if frame_idx % 100 == 0 {
|
||||
let elapsed = start_time.elapsed();
|
||||
let fps = frame_idx as f64 / elapsed.as_secs_f64();
|
||||
debug!(
|
||||
"Processed {} frames ({:.2} fps), {} detections",
|
||||
frame_idx,
|
||||
fps,
|
||||
detections,
|
||||
);
|
||||
}
|
||||
|
||||
// Display frame if enabled
|
||||
if args.display {
|
||||
// Draw text with frame information
|
||||
let mut display_frame = frame.clone();
|
||||
let text = format!(
|
||||
"Frame: {} | Detections: {} | {}",
|
||||
frame_idx,
|
||||
detections,
|
||||
if detected {
|
||||
format!("METEOR DETECTED - Confidence: {:.2}", result.confidence)
|
||||
} else {
|
||||
"No detection".to_string()
|
||||
}
|
||||
);
|
||||
imgproc::put_text(
|
||||
&mut display_frame,
|
||||
&text,
|
||||
core::Point::new(20, 40),
|
||||
imgproc::FONT_HERSHEY_SIMPLEX,
|
||||
0.7,
|
||||
core::Scalar::new(0.0, 255.0, 0.0, 0.0),
|
||||
2,
|
||||
imgproc::LINE_8,
|
||||
false,
|
||||
)?;
|
||||
|
||||
// Show frame
|
||||
highgui::imshow("Video", &display_frame)?;
|
||||
|
||||
// Wait for key press (30ms delay for video playback)
|
||||
let key = highgui::wait_key(30)?;
|
||||
if key > 0 && key != 255 {
|
||||
// 'q' or ESC key pressed
|
||||
if key == 113 || key == 27 {
|
||||
info!("User requested exit");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
frame_idx += 1;
|
||||
}
|
||||
|
||||
// Close video display window if enabled
|
||||
if args.display {
|
||||
highgui::destroy_all_windows()?;
|
||||
}
|
||||
|
||||
// Calculate overall statistics
|
||||
let elapsed = start_time.elapsed();
|
||||
let overall_fps = frame_idx as f64 / elapsed.as_secs_f64();
|
||||
|
||||
info!(
|
||||
"Processing complete: {} frames, {} detections, {:.2} fps average",
|
||||
frame_idx, detections, overall_fps
|
||||
);
|
||||
|
||||
// Display detection mode info
|
||||
info!(
|
||||
"Detection mode: {}, {}",
|
||||
if args.parallel { "parallel" } else { "sequential" },
|
||||
match aggregation_strategy {
|
||||
AggregationStrategy::Any => "any detector",
|
||||
AggregationStrategy::All => "all detectors",
|
||||
AggregationStrategy::Majority => "majority of detectors",
|
||||
_ => "custom strategy",
|
||||
}
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
280
src/camera/controller.rs
Normal file
280
src/camera/controller.rs
Normal file
@ -0,0 +1,280 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
|
||||
use crate::camera::frame_buffer::{Frame, FrameBuffer, SharedFrameBuffer};
|
||||
use crate::camera::v4l2::{V4l2Camera, V4l2CaptureStream};
|
||||
use crate::camera::{CameraSettings, MeteorEvent, Resolution, ExposureMode};
|
||||
|
||||
/// Camera controller manages camera operations and frame capture
|
||||
pub struct CameraController {
|
||||
/// Camera settings
|
||||
settings: CameraSettings,
|
||||
/// The V4L2 camera driver
|
||||
camera: Option<V4l2Camera>,
|
||||
/// The V4L2 capture stream
|
||||
stream: Option<V4l2CaptureStream>,
|
||||
/// Circular buffer for storing recent frames
|
||||
frame_buffer: SharedFrameBuffer,
|
||||
/// Frame counter
|
||||
frame_count: u64,
|
||||
/// Whether the camera is currently running
|
||||
is_running: bool,
|
||||
/// Channel for broadcasting new frames
|
||||
frame_tx: broadcast::Sender<Frame>,
|
||||
/// Path to save event videos
|
||||
events_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl CameraController {
|
||||
/// Create a new camera controller with the given configuration
|
||||
pub async fn new(config: &crate::Config) -> Result<Self> {
|
||||
// Extract camera settings from config (placeholder for now)
|
||||
let settings = CameraSettings::default();
|
||||
|
||||
// Create frame buffer with capacity for 10 minutes of video at settings.fps
|
||||
let buffer_capacity = (10 * 60 * settings.fps) as usize;
|
||||
let frame_buffer = Arc::new(FrameBuffer::new(buffer_capacity));
|
||||
|
||||
// Create broadcast channel for frames
|
||||
let (frame_tx, _) = broadcast::channel(30);
|
||||
|
||||
// Create events directory if it doesn't exist
|
||||
let events_dir = PathBuf::from("events");
|
||||
std::fs::create_dir_all(&events_dir)
|
||||
.context("Failed to create events directory")?;
|
||||
|
||||
Ok(Self {
|
||||
settings,
|
||||
camera: None,
|
||||
stream: None,
|
||||
frame_buffer,
|
||||
frame_count: 0,
|
||||
is_running: false,
|
||||
frame_tx,
|
||||
events_dir,
|
||||
})
|
||||
}
|
||||
|
||||
/// Initialize the camera with current settings
|
||||
pub async fn initialize(&mut self) -> Result<()> {
|
||||
// Open the camera
|
||||
let mut camera = V4l2Camera::open(&self.settings.device)
|
||||
.context("Failed to open camera")?;
|
||||
|
||||
// Configure camera parameters
|
||||
camera.set_format(self.settings.resolution)
|
||||
.context("Failed to set camera format")?;
|
||||
|
||||
camera.set_fps(self.settings.fps)
|
||||
.context("Failed to set camera FPS")?;
|
||||
|
||||
camera.set_exposure(self.settings.exposure)
|
||||
.context("Failed to set camera exposure")?;
|
||||
|
||||
camera.set_gain(self.settings.gain)
|
||||
.context("Failed to set camera gain")?;
|
||||
|
||||
if self.settings.focus_locked {
|
||||
camera.lock_focus_at_infinity()
|
||||
.context("Failed to lock focus at infinity")?;
|
||||
}
|
||||
|
||||
self.camera = Some(camera);
|
||||
info!("Camera initialized successfully");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start camera capture in a background task
|
||||
pub async fn start_capture(&mut self) -> Result<()> {
|
||||
if self.is_running {
|
||||
warn!("Camera capture is already running");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let camera = self.camera.as_mut()
|
||||
.ok_or_else(|| anyhow::anyhow!("Camera not initialized"))?;
|
||||
|
||||
// Start the camera streaming
|
||||
let stream = camera.start_streaming()
|
||||
.context("Failed to start camera streaming")?;
|
||||
|
||||
self.stream = Some(stream);
|
||||
self.is_running = true;
|
||||
|
||||
// Clone necessary values for the capture task
|
||||
let frame_buffer = self.frame_buffer.clone();
|
||||
let frame_tx = self.frame_tx.clone();
|
||||
let fps = self.settings.fps;
|
||||
let mut stream = self.stream.take()
|
||||
.expect("Stream just initialized but is None");
|
||||
let mut frame_count = self.frame_count;
|
||||
|
||||
// Start capture task
|
||||
tokio::spawn(async move {
|
||||
let frame_interval = Duration::from_secs_f64(1.0 / fps as f64);
|
||||
let mut interval = time::interval(frame_interval);
|
||||
|
||||
info!("Starting camera capture at {} fps", fps);
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match stream.capture_frame() {
|
||||
Ok(mat) => {
|
||||
// Create a new frame with timestamp
|
||||
let frame = Frame::new(mat, Utc::now(), frame_count);
|
||||
frame_count += 1;
|
||||
|
||||
// Add to frame buffer
|
||||
if let Err(e) = frame_buffer.push(frame.clone()) {
|
||||
error!("Failed to add frame to buffer: {}", e);
|
||||
}
|
||||
|
||||
// Broadcast frame to listeners
|
||||
let _ = frame_tx.send(frame);
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to capture frame: {}", e);
|
||||
// Small delay to avoid tight error loop
|
||||
time::sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
info!("Camera capture started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop camera capture
|
||||
pub async fn stop_capture(&mut self) -> Result<()> {
|
||||
if !self.is_running {
|
||||
warn!("Camera capture is not running");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// The stream will be stopped when dropped
|
||||
self.stream = None;
|
||||
|
||||
if let Some(camera) = &mut self.camera {
|
||||
camera.stop_streaming()?;
|
||||
}
|
||||
|
||||
self.is_running = false;
|
||||
info!("Camera capture stopped");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a subscriber to receive new frames
|
||||
pub fn subscribe_to_frames(&self) -> broadcast::Receiver<Frame> {
|
||||
self.frame_tx.subscribe()
|
||||
}
|
||||
|
||||
/// Get a clone of the frame buffer
|
||||
pub fn get_frame_buffer(&self) -> SharedFrameBuffer {
|
||||
self.frame_buffer.clone()
|
||||
}
|
||||
|
||||
/// Check if the camera is currently running
|
||||
pub fn is_running(&self) -> bool {
|
||||
self.is_running
|
||||
}
|
||||
|
||||
/// Update camera settings
|
||||
pub async fn update_settings(&mut self, new_settings: CameraSettings) -> Result<()> {
|
||||
// If camera is running, we need to stop it first
|
||||
let was_running = self.is_running();
|
||||
if was_running {
|
||||
self.stop_capture().await?;
|
||||
}
|
||||
|
||||
// Update settings
|
||||
self.settings = new_settings;
|
||||
|
||||
// Re-initialize camera with new settings
|
||||
if let Some(mut camera) = self.camera.take() {
|
||||
// Configure camera parameters
|
||||
camera.set_format(self.settings.resolution)?;
|
||||
camera.set_fps(self.settings.fps)?;
|
||||
camera.set_exposure(self.settings.exposure)?;
|
||||
camera.set_gain(self.settings.gain)?;
|
||||
|
||||
if self.settings.focus_locked {
|
||||
camera.lock_focus_at_infinity()?;
|
||||
}
|
||||
|
||||
self.camera = Some(camera);
|
||||
} else {
|
||||
self.initialize().await?;
|
||||
}
|
||||
|
||||
// Restart if it was running
|
||||
if was_running {
|
||||
self.start_capture().await?;
|
||||
}
|
||||
|
||||
info!("Camera settings updated");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save a meteor event with video
|
||||
pub async fn save_meteor_event(
|
||||
&self,
|
||||
timestamp: chrono::DateTime<Utc>,
|
||||
confidence: f32,
|
||||
bounding_box: (u32, u32, u32, u32),
|
||||
seconds_before: i64,
|
||||
seconds_after: i64,
|
||||
) -> Result<MeteorEvent> {
|
||||
// Extract frames from the buffer
|
||||
let frames = self.frame_buffer.extract_event_frames(
|
||||
timestamp,
|
||||
seconds_before,
|
||||
seconds_after,
|
||||
);
|
||||
|
||||
if frames.is_empty() {
|
||||
return Err(anyhow::anyhow!("No frames found for event"));
|
||||
}
|
||||
|
||||
// Create a unique ID for the event
|
||||
let event_id = uuid::Uuid::new_v4();
|
||||
|
||||
// Create a directory for the event
|
||||
let event_dir = self.events_dir.join(event_id.to_string());
|
||||
std::fs::create_dir_all(&event_dir)
|
||||
.context("Failed to create event directory")?;
|
||||
|
||||
// Save frames to the event directory
|
||||
for (i, frame) in frames.iter().enumerate() {
|
||||
let frame_path = event_dir.join(format!("frame_{:04}.jpg", i));
|
||||
frame.save_to_file(&frame_path)?;
|
||||
}
|
||||
|
||||
// Create a video file name
|
||||
let video_path = event_dir.join("event.mp4").to_string_lossy().to_string();
|
||||
|
||||
// TODO: Call FFmpeg to convert frames to video
|
||||
// This would be done by spawning an external process
|
||||
|
||||
// Create and return the event
|
||||
let event = MeteorEvent {
|
||||
id: event_id,
|
||||
timestamp,
|
||||
confidence,
|
||||
bounding_box,
|
||||
video_path,
|
||||
};
|
||||
|
||||
info!("Saved meteor event: {}", event_id);
|
||||
Ok(event)
|
||||
}
|
||||
}
|
||||
140
src/camera/frame_buffer.rs
Normal file
140
src/camera/frame_buffer.rs
Normal file
@ -0,0 +1,140 @@
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use opencv::{core, imgcodecs, prelude::*};
|
||||
use std::collections::VecDeque;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// A single video frame with timestamp and metadata
|
||||
#[derive(Clone)]
|
||||
pub struct Frame {
|
||||
/// OpenCV Mat containing the image data
|
||||
pub mat: core::Mat,
|
||||
/// Timestamp when the frame was captured
|
||||
pub timestamp: DateTime<Utc>,
|
||||
/// Frame index in the capture sequence
|
||||
pub index: u64,
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
/// Create a new frame
|
||||
pub fn new(mat: core::Mat, timestamp: DateTime<Utc>, index: u64) -> Self {
|
||||
Self {
|
||||
mat,
|
||||
timestamp,
|
||||
index,
|
||||
}
|
||||
}
|
||||
|
||||
/// Save the frame to a file
|
||||
pub fn save_to_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
imgcodecs::imwrite(
|
||||
path.as_ref().to_str().unwrap(),
|
||||
&self.mat,
|
||||
&core::Vector::new(),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A circular buffer for storing video frames
|
||||
pub struct FrameBuffer {
|
||||
/// The actual buffer containing frames
|
||||
buffer: Mutex<VecDeque<Frame>>,
|
||||
/// Maximum capacity of the buffer
|
||||
capacity: usize,
|
||||
}
|
||||
|
||||
impl FrameBuffer {
|
||||
/// Create a new frame buffer with the specified capacity
|
||||
pub fn new(capacity: usize) -> Self {
|
||||
Self {
|
||||
buffer: Mutex::new(VecDeque::with_capacity(capacity)),
|
||||
capacity,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a frame to the buffer, potentially removing the oldest frame if full
|
||||
pub fn push(&self, frame: Frame) -> Result<()> {
|
||||
let mut buffer = self.buffer.lock().unwrap();
|
||||
|
||||
// If buffer is at capacity, remove the oldest frame
|
||||
if buffer.len() >= self.capacity {
|
||||
buffer.pop_front();
|
||||
}
|
||||
|
||||
// Add the new frame
|
||||
buffer.push_back(frame);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a specific frame by index (most recent = 0, older frames have higher indices)
|
||||
pub fn get(&self, index: usize) -> Option<Frame> {
|
||||
let buffer = self.buffer.lock().unwrap();
|
||||
|
||||
if index >= buffer.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Convert from reverse index (newest = 0) to actual index
|
||||
let actual_index = buffer.len() - 1 - index;
|
||||
buffer.get(actual_index).cloned()
|
||||
}
|
||||
|
||||
/// Get all frames within a specific time range
|
||||
pub fn get_frames_in_range(
|
||||
&self,
|
||||
start_time: DateTime<Utc>,
|
||||
end_time: DateTime<Utc>,
|
||||
) -> Vec<Frame> {
|
||||
let buffer = self.buffer.lock().unwrap();
|
||||
|
||||
buffer
|
||||
.iter()
|
||||
.filter(|frame| {
|
||||
frame.timestamp >= start_time && frame.timestamp <= end_time
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get the number of frames currently in the buffer
|
||||
pub fn len(&self) -> usize {
|
||||
self.buffer.lock().unwrap().len()
|
||||
}
|
||||
|
||||
/// Check if the buffer is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.buffer.lock().unwrap().is_empty()
|
||||
}
|
||||
|
||||
/// Get the capacity of the buffer
|
||||
pub fn capacity(&self) -> usize {
|
||||
self.capacity
|
||||
}
|
||||
|
||||
/// Clear all frames from the buffer
|
||||
pub fn clear(&self) {
|
||||
self.buffer.lock().unwrap().clear();
|
||||
}
|
||||
|
||||
/// Extract frames around an event for saving
|
||||
/// Returns frames from `seconds_before` to `seconds_after` around the given timestamp
|
||||
pub fn extract_event_frames(
|
||||
&self,
|
||||
event_time: DateTime<Utc>,
|
||||
seconds_before: i64,
|
||||
seconds_after: i64,
|
||||
) -> Vec<Frame> {
|
||||
use chrono::Duration;
|
||||
|
||||
let start_time = event_time - Duration::seconds(seconds_before);
|
||||
let end_time = event_time + Duration::seconds(seconds_after);
|
||||
|
||||
self.get_frames_in_range(start_time, end_time)
|
||||
}
|
||||
}
|
||||
|
||||
/// Thread-safe frame buffer that can be shared across threads
|
||||
pub type SharedFrameBuffer = Arc<FrameBuffer>;
|
||||
92
src/camera/mod.rs
Normal file
92
src/camera/mod.rs
Normal file
@ -0,0 +1,92 @@
|
||||
mod controller;
|
||||
mod v4l2;
|
||||
mod frame_buffer;
|
||||
|
||||
pub use controller::CameraController;
|
||||
pub use frame_buffer::{Frame, FrameBuffer};
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
|
||||
/// Camera resolution options
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Resolution {
|
||||
/// 1920x1080 (1080p)
|
||||
HD1080p,
|
||||
/// 1280x720 (720p)
|
||||
HD720p,
|
||||
/// 640x480 (VGA)
|
||||
VGA,
|
||||
/// Custom resolution
|
||||
Custom {
|
||||
width: u32,
|
||||
height: u32,
|
||||
},
|
||||
}
|
||||
|
||||
impl Resolution {
|
||||
/// Get the width and height of the resolution
|
||||
pub fn dimensions(&self) -> (u32, u32) {
|
||||
match *self {
|
||||
Resolution::HD1080p => (1920, 1080),
|
||||
Resolution::HD720p => (1280, 720),
|
||||
Resolution::VGA => (640, 480),
|
||||
Resolution::Custom { width, height } => (width, height),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Camera exposure mode
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ExposureMode {
|
||||
/// Automatic exposure control
|
||||
Auto,
|
||||
/// Manual exposure control with exposure time in microseconds
|
||||
Manual(u32),
|
||||
}
|
||||
|
||||
/// Configuration parameters for the camera
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CameraSettings {
|
||||
/// Camera device path (e.g., /dev/video0)
|
||||
pub device: String,
|
||||
/// Resolution setting
|
||||
pub resolution: Resolution,
|
||||
/// Frames per second
|
||||
pub fps: u32,
|
||||
/// Exposure mode
|
||||
pub exposure: ExposureMode,
|
||||
/// Gain/ISO setting (0-255)
|
||||
pub gain: u8,
|
||||
/// Whether to lock focus at infinity
|
||||
pub focus_locked: bool,
|
||||
}
|
||||
|
||||
impl Default for CameraSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
device: "/dev/video0".to_string(),
|
||||
resolution: Resolution::HD720p,
|
||||
fps: 30,
|
||||
exposure: ExposureMode::Auto,
|
||||
gain: 128, // Mid-range default
|
||||
focus_locked: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a captured meteor event with video and metadata
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MeteorEvent {
|
||||
/// Unique identifier for the event
|
||||
pub id: uuid::Uuid,
|
||||
/// Timestamp when the event was detected
|
||||
pub timestamp: DateTime<Utc>,
|
||||
/// Confidence score (0.0-1.0)
|
||||
pub confidence: f32,
|
||||
/// Coordinates in the image frame (top-left x, top-left y, width, height)
|
||||
pub bounding_box: (u32, u32, u32, u32),
|
||||
/// Path to the saved video clip
|
||||
pub video_path: String,
|
||||
}
|
||||
287
src/camera/v4l2.rs
Normal file
287
src/camera/v4l2.rs
Normal file
@ -0,0 +1,287 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use log::{debug, error, info, warn};
|
||||
use std::path::Path;
|
||||
use v4l::buffer::Type;
|
||||
use v4l::io::traits::CaptureStream;
|
||||
use v4l::prelude::*;
|
||||
use v4l::video::Capture;
|
||||
use v4l::{Format, FourCC};
|
||||
|
||||
use opencv::{core, imgproc, prelude::*};
|
||||
|
||||
use crate::camera::{ExposureMode, Resolution};
|
||||
|
||||
/// V4L2 camera driver for star-light cameras
|
||||
pub struct V4l2Camera {
|
||||
/// The open device handle
|
||||
device: Device,
|
||||
/// The current camera format
|
||||
format: Format,
|
||||
/// Whether the camera is currently streaming
|
||||
is_streaming: bool,
|
||||
}
|
||||
|
||||
impl V4l2Camera {
|
||||
/// Open a camera device by path
|
||||
pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let device = Device::with_path(path.as_ref())
|
||||
.context("Failed to open camera device")?;
|
||||
|
||||
info!(
|
||||
"Opened camera: {} ({})",
|
||||
device.info().card,
|
||||
device.info().driver
|
||||
);
|
||||
|
||||
// Get the current format
|
||||
let format = device
|
||||
.format()
|
||||
.context("Failed to get camera format")?;
|
||||
|
||||
debug!("Initial camera format: {:?}", format);
|
||||
|
||||
Ok(Self {
|
||||
device,
|
||||
format,
|
||||
is_streaming: false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Set the camera resolution and pixel format
|
||||
pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
|
||||
let (width, height) = resolution.dimensions();
|
||||
|
||||
// Try to set format to MJPEG or YUYV first, then fall back to others
|
||||
let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")];
|
||||
|
||||
let mut success = false;
|
||||
let mut last_error = None;
|
||||
|
||||
for &fourcc in &formats {
|
||||
let mut format = Format::new(width, height, fourcc);
|
||||
|
||||
match self.device.set_format(&mut format) {
|
||||
Ok(_) => {
|
||||
self.format = format;
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
last_error = Some(e);
|
||||
warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !success {
|
||||
return Err(anyhow!(
|
||||
"Failed to set any supported format: {:?}",
|
||||
last_error.unwrap()
|
||||
));
|
||||
}
|
||||
|
||||
info!(
|
||||
"Set camera format: {}×{} {}",
|
||||
self.format.width, self.format.height,
|
||||
String::from_utf8_lossy(&self.format.fourcc.repr)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set the camera frame rate
|
||||
pub fn set_fps(&mut self, fps: u32) -> Result<()> {
|
||||
if let Some(params) = self.device.params() {
|
||||
let mut params = params.context("Failed to get camera parameters")?;
|
||||
params.set_frames_per_second(fps, 1);
|
||||
|
||||
self.device
|
||||
.set_params(¶ms)
|
||||
.context("Failed to set frame rate")?;
|
||||
|
||||
info!("Set camera frame rate: {} fps", fps);
|
||||
} else {
|
||||
warn!("Camera does not support frame rate adjustment");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set camera exposure mode and value
|
||||
pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
|
||||
// First, set auto/manual mode
|
||||
let ctrl_id = v4l::control::id::EXPOSURE_AUTO;
|
||||
let auto_value = match mode {
|
||||
ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO
|
||||
ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL
|
||||
};
|
||||
|
||||
self.device
|
||||
.set_control(ctrl_id, auto_value)
|
||||
.context("Failed to set exposure mode")?;
|
||||
|
||||
// If manual, set the exposure value
|
||||
if let ExposureMode::Manual(exposure_time) = mode {
|
||||
// Exposure time in microseconds
|
||||
let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE;
|
||||
self.device
|
||||
.set_control(ctrl_id, exposure_time as i64)
|
||||
.context("Failed to set exposure time")?;
|
||||
}
|
||||
|
||||
info!("Set camera exposure: {:?}", mode);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set camera gain (ISO)
|
||||
pub fn set_gain(&mut self, gain: u8) -> Result<()> {
|
||||
let ctrl_id = v4l::control::id::GAIN;
|
||||
|
||||
self.device
|
||||
.set_control(ctrl_id, gain as i64)
|
||||
.context("Failed to set gain")?;
|
||||
|
||||
info!("Set camera gain: {}", gain);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lock focus at infinity (if supported)
|
||||
pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
|
||||
// First, set focus mode to manual
|
||||
let auto_focus_id = v4l::control::id::FOCUS_AUTO;
|
||||
if let Ok(_) = self.device.set_control(auto_focus_id, 0) {
|
||||
// Then set focus to infinity (typically maximum value)
|
||||
let focus_id = v4l::control::id::FOCUS_ABSOLUTE;
|
||||
|
||||
// Get the range of the control
|
||||
if let Ok(control) = self.device.control(focus_id) {
|
||||
let max_focus = control.maximum();
|
||||
|
||||
if let Ok(_) = self.device.set_control(focus_id, max_focus) {
|
||||
info!("Locked focus at infinity (value: {})", max_focus);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
warn!("Failed to set focus to infinity");
|
||||
} else {
|
||||
warn!("Camera does not support focus control");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start streaming from the camera
|
||||
pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> {
|
||||
let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4)
|
||||
.context("Failed to create capture stream")?;
|
||||
|
||||
self.is_streaming = true;
|
||||
info!("Started camera streaming");
|
||||
|
||||
Ok(V4l2CaptureStream {
|
||||
stream: queue,
|
||||
format: self.format.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Stop streaming from the camera
|
||||
pub fn stop_streaming(&mut self) -> Result<()> {
|
||||
// The streaming will be stopped when the CaptureStream is dropped
|
||||
self.is_streaming = false;
|
||||
info!("Stopped camera streaming");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if the camera is currently streaming
|
||||
pub fn is_streaming(&self) -> bool {
|
||||
self.is_streaming
|
||||
}
|
||||
|
||||
/// Get current format width
|
||||
pub fn width(&self) -> u32 {
|
||||
self.format.width
|
||||
}
|
||||
|
||||
/// Get current format height
|
||||
pub fn height(&self) -> u32 {
|
||||
self.format.height
|
||||
}
|
||||
|
||||
/// Get current format pixel format
|
||||
pub fn pixel_format(&self) -> FourCC {
|
||||
self.format.fourcc
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper around V4L2 capture stream
|
||||
pub struct V4l2CaptureStream {
|
||||
stream: MmapStream,
|
||||
format: Format,
|
||||
}
|
||||
|
||||
impl V4l2CaptureStream {
|
||||
/// Capture a single frame from the camera
|
||||
pub fn capture_frame(&mut self) -> Result<core::Mat> {
|
||||
let buffer = self.stream.next()
|
||||
.context("Failed to capture frame")?;
|
||||
|
||||
let width = self.format.width as i32;
|
||||
let height = self.format.height as i32;
|
||||
|
||||
// Convert the buffer to an OpenCV Mat based on the pixel format
|
||||
let mat = match self.format.fourcc {
|
||||
// MJPEG format
|
||||
f if f == FourCC::new(b"MJPG") => {
|
||||
// Decode JPEG data
|
||||
let data = buffer.data();
|
||||
let vec_data = unsafe {
|
||||
std::slice::from_raw_parts(data.as_ptr(), data.len())
|
||||
}.to_vec();
|
||||
|
||||
let buf = core::Vector::from_slice(&vec_data);
|
||||
let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?;
|
||||
img
|
||||
},
|
||||
|
||||
// YUYV format
|
||||
f if f == FourCC::new(b"YUYV") => {
|
||||
let data = buffer.data();
|
||||
|
||||
// Create a Mat from the YUYV data
|
||||
let mut yuyv = unsafe {
|
||||
let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel
|
||||
let step = width as usize * bytes_per_pixel;
|
||||
core::Mat::new_rows_cols_with_data(
|
||||
height,
|
||||
width,
|
||||
core::CV_8UC2,
|
||||
data.as_ptr() as *mut _,
|
||||
step,
|
||||
)?
|
||||
};
|
||||
|
||||
// Convert YUYV to BGR
|
||||
let mut bgr = core::Mat::default()?;
|
||||
imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?;
|
||||
bgr
|
||||
},
|
||||
|
||||
// Unsupported format
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"Unsupported pixel format: {}",
|
||||
String::from_utf8_lossy(&self.format.fourcc.repr)
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(mat)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for V4l2CaptureStream {
|
||||
fn drop(&mut self) {
|
||||
debug!("V4L2 capture stream dropped");
|
||||
}
|
||||
}
|
||||
100
src/communication/mod.rs
Normal file
100
src/communication/mod.rs
Normal file
@ -0,0 +1,100 @@
|
||||
use anyhow::Result;
|
||||
use log::{info, warn};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::camera::MeteorEvent;
|
||||
use crate::config::Config;
|
||||
use crate::camera::CameraController;
|
||||
use crate::gps::GpsController;
|
||||
|
||||
/// Manager for MQTT and HTTP API communication
|
||||
pub struct CommunicationManager {
|
||||
/// Configuration
|
||||
config: Config,
|
||||
/// Camera controller reference
|
||||
camera_controller: Arc<Mutex<CameraController>>,
|
||||
/// GPS controller reference
|
||||
gps_controller: Arc<Mutex<GpsController>>,
|
||||
/// Channel for receiving meteor events
|
||||
event_rx: Option<mpsc::Receiver<MeteorEvent>>,
|
||||
/// Whether the manager is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
}
|
||||
|
||||
impl CommunicationManager {
|
||||
/// Create a new communication manager
|
||||
pub async fn new(
|
||||
config: &Config,
|
||||
camera_controller: Arc<Mutex<CameraController>>,
|
||||
gps_controller: Arc<Mutex<GpsController>>,
|
||||
) -> Result<Self> {
|
||||
info!("Initializing communication manager");
|
||||
|
||||
Ok(Self {
|
||||
config: config.clone(),
|
||||
camera_controller,
|
||||
gps_controller,
|
||||
event_rx: None,
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Start the communication services
|
||||
pub async fn run(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("Communication manager is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
info!("Starting communication manager");
|
||||
|
||||
// In a real implementation, this would:
|
||||
// 1. Connect to MQTT broker
|
||||
// 2. Start HTTP API server
|
||||
// 3. Subscribe to event channels
|
||||
// 4. Process and forward events
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the communication services
|
||||
pub async fn stop(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("Communication manager is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
info!("Stopping communication manager");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set the event receiver
|
||||
pub fn set_event_receiver(&mut self, rx: mpsc::Receiver<MeteorEvent>) {
|
||||
self.event_rx = Some(rx);
|
||||
}
|
||||
|
||||
/// Send a status update
|
||||
pub async fn send_status_update(&self) -> Result<()> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would send system status via MQTT
|
||||
info!("Sent status update");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send an event notification
|
||||
pub async fn send_event_notification(&self, event: &MeteorEvent) -> Result<()> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would send event data via MQTT
|
||||
info!("Sent notification for event {}", event.id);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
227
src/config.rs
Normal file
227
src/config.rs
Normal file
@ -0,0 +1,227 @@
|
||||
use anyhow::{Context, Result};
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::camera::{CameraSettings, Resolution, ExposureMode};
|
||||
use crate::detection::{DetectorConfig, PipelineConfig};
|
||||
use crate::gps::{GpsConfig, CameraOrientation};
|
||||
use crate::overlay::WatermarkOptions;
|
||||
use crate::sensors::SensorConfig;
|
||||
use crate::streaming::RtspConfig;
|
||||
|
||||
/// Configuration for data storage
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct StorageConfig {
|
||||
/// Directory for storing raw video data
|
||||
pub raw_video_dir: PathBuf,
|
||||
/// Directory for storing event video clips
|
||||
pub event_video_dir: PathBuf,
|
||||
/// Maximum disk space to use for storage (in MB)
|
||||
pub max_disk_usage_mb: u64,
|
||||
/// Number of days to keep event data
|
||||
pub event_retention_days: u32,
|
||||
/// Whether to compress video files
|
||||
pub compress_video: bool,
|
||||
}
|
||||
|
||||
impl Default for StorageConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
raw_video_dir: PathBuf::from("data/raw"),
|
||||
event_video_dir: PathBuf::from("data/events"),
|
||||
max_disk_usage_mb: 10000, // 10 GB
|
||||
event_retention_days: 30,
|
||||
compress_video: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Legacy configuration for event detection (backward compatibility)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DetectionConfig {
|
||||
/// Minimum brightness change to trigger detection
|
||||
pub min_brightness_delta: f32,
|
||||
/// Minimum number of pixels changed to trigger detection
|
||||
pub min_pixel_change: u32,
|
||||
/// Minimum number of consecutive frames to confirm event
|
||||
pub min_frames: u32,
|
||||
/// Number of seconds to save before/after event
|
||||
pub event_buffer_seconds: u32,
|
||||
/// Detection sensitivity (0.0-1.0)
|
||||
pub sensitivity: f32,
|
||||
/// Detection pipeline configuration
|
||||
#[serde(default)]
|
||||
pub pipeline: Option<PipelineConfig>,
|
||||
}
|
||||
|
||||
impl Default for DetectionConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
min_brightness_delta: 30.0,
|
||||
min_pixel_change: 10,
|
||||
min_frames: 3,
|
||||
event_buffer_seconds: 10,
|
||||
sensitivity: 0.7,
|
||||
pipeline: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for communication
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CommunicationConfig {
|
||||
/// MQTT broker URL
|
||||
pub mqtt_broker: String,
|
||||
/// MQTT client ID
|
||||
pub mqtt_client_id: String,
|
||||
/// MQTT username
|
||||
pub mqtt_username: Option<String>,
|
||||
/// MQTT password
|
||||
pub mqtt_password: Option<String>,
|
||||
/// Topic for event notifications
|
||||
pub event_topic: String,
|
||||
/// Topic for system status
|
||||
pub status_topic: String,
|
||||
/// HTTP API port
|
||||
pub api_port: u16,
|
||||
/// Whether to enable SSL for HTTP API
|
||||
pub api_use_ssl: bool,
|
||||
/// Path to SSL certificate (if SSL enabled)
|
||||
pub api_cert_path: Option<PathBuf>,
|
||||
/// Path to SSL key (if SSL enabled)
|
||||
pub api_key_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Default for CommunicationConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
mqtt_broker: "mqtt://localhost:1883".to_string(),
|
||||
mqtt_client_id: format!("meteor-detector-{}", uuid::Uuid::new_v4()),
|
||||
mqtt_username: None,
|
||||
mqtt_password: None,
|
||||
event_topic: "meteor/events".to_string(),
|
||||
status_topic: "meteor/status".to_string(),
|
||||
api_port: 8080,
|
||||
api_use_ssl: false,
|
||||
api_cert_path: None,
|
||||
api_key_path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Main application configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
/// Unique ID for this meteor detector
|
||||
pub device_id: String,
|
||||
/// Camera configuration
|
||||
pub camera: CameraSettings,
|
||||
/// GPS configuration
|
||||
pub gps: GpsConfig,
|
||||
/// Sensor configuration
|
||||
pub sensors: SensorConfig,
|
||||
/// Storage configuration
|
||||
pub storage: StorageConfig,
|
||||
/// Detection configuration
|
||||
pub detection: DetectionConfig,
|
||||
/// Communication configuration
|
||||
pub communication: CommunicationConfig,
|
||||
/// Watermark configuration
|
||||
#[serde(default)]
|
||||
pub watermark: WatermarkOptions,
|
||||
/// RTSP streaming configuration
|
||||
#[serde(default)]
|
||||
pub rtsp: RtspConfig,
|
||||
/// Logging level
|
||||
pub log_level: String,
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
device_id: uuid::Uuid::new_v4().to_string(),
|
||||
camera: CameraSettings {
|
||||
device: "/dev/video0".to_string(),
|
||||
resolution: Resolution::HD720p,
|
||||
fps: 30,
|
||||
exposure: ExposureMode::Auto,
|
||||
gain: 128,
|
||||
focus_locked: true,
|
||||
},
|
||||
gps: GpsConfig::default(),
|
||||
sensors: SensorConfig::default(),
|
||||
storage: StorageConfig::default(),
|
||||
detection: DetectionConfig::default(),
|
||||
communication: CommunicationConfig::default(),
|
||||
watermark: WatermarkOptions::default(),
|
||||
rtsp: RtspConfig::default(),
|
||||
log_level: "info".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Load configuration from file
|
||||
pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {
|
||||
let config_str = fs::read_to_string(&path)
|
||||
.context(format!("Failed to read config file {:?}", path.as_ref()))?;
|
||||
|
||||
let config: Config = toml::from_str(&config_str)
|
||||
.context("Failed to parse config file")?;
|
||||
|
||||
info!("Loaded configuration from {:?}", path.as_ref());
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
/// Save configuration to file
|
||||
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
let config_str = toml::to_string_pretty(self)
|
||||
.context("Failed to serialize config")?;
|
||||
|
||||
fs::write(&path, config_str)
|
||||
.context(format!("Failed to write config to {:?}", path.as_ref()))?;
|
||||
|
||||
info!("Saved configuration to {:?}", path.as_ref());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create default configuration file if it doesn't exist
|
||||
pub fn create_default<P: AsRef<Path>>(path: P) -> Result<()> {
|
||||
if path.as_ref().exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create parent directories if they don't exist
|
||||
if let Some(parent) = path.as_ref().parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.context(format!("Failed to create directory {:?}", parent))?;
|
||||
}
|
||||
|
||||
let config = Config::default();
|
||||
config.save(path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the default configuration file path
|
||||
pub fn default_path() -> PathBuf {
|
||||
if let Some(config_dir) = dirs::config_dir() {
|
||||
config_dir.join("meteor_detect").join("config.toml")
|
||||
} else {
|
||||
PathBuf::from("config.toml")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Load configuration, creating a default if it doesn't exist
|
||||
pub fn load_config() -> Result<Config> {
|
||||
let config_path = Config::default_path();
|
||||
|
||||
// Create default config if it doesn't exist
|
||||
Config::create_default(&config_path)?;
|
||||
|
||||
// Load the config
|
||||
Config::load(config_path)
|
||||
}
|
||||
275
src/detection/brightness_detector.rs
Normal file
275
src/detection/brightness_detector.rs
Normal file
@ -0,0 +1,275 @@
|
||||
use anyhow::{Context, Result};
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, imgproc, prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::detection::{DetectionResult, DetectorConfig, MeteorDetector};
|
||||
|
||||
/// Parameters for the brightness-based detector
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BrightnessDetectorParams {
|
||||
/// Minimum brightness change to trigger detection
|
||||
pub min_brightness_delta: f32,
|
||||
/// Minimum number of pixels changed to trigger detection
|
||||
pub min_pixel_change: u32,
|
||||
/// Minimum number of consecutive frames to confirm event
|
||||
pub min_frames: u32,
|
||||
/// Detection sensitivity (0.0-1.0)
|
||||
pub sensitivity: f32,
|
||||
/// Unique ID for this detector instance
|
||||
#[serde(default = "Uuid::new_v4")]
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl Default for BrightnessDetectorParams {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
min_brightness_delta: 30.0,
|
||||
min_pixel_change: 10,
|
||||
min_frames: 3,
|
||||
sensitivity: 0.7,
|
||||
id: format!("brightness-{}", Uuid::new_v4()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Background subtraction based meteor detector
|
||||
pub struct BrightnessDetector {
|
||||
/// Background model
|
||||
background: Option<core::Mat>,
|
||||
/// Previous frame for motion detection
|
||||
prev_frame: Option<core::Mat>,
|
||||
/// Detection parameters
|
||||
params: BrightnessDetectorParams,
|
||||
/// Consecutive detections counter
|
||||
detection_counter: u32,
|
||||
/// Last detection result
|
||||
last_result: DetectionResult,
|
||||
}
|
||||
|
||||
impl BrightnessDetector {
|
||||
/// Create a new brightness detector with default parameters
|
||||
pub fn new() -> Self {
|
||||
Self::with_params(BrightnessDetectorParams::default())
|
||||
}
|
||||
|
||||
/// Create a new brightness detector with custom parameters
|
||||
pub fn with_params(params: BrightnessDetectorParams) -> Self {
|
||||
Self {
|
||||
background: None,
|
||||
prev_frame: None,
|
||||
params,
|
||||
detection_counter: 0,
|
||||
last_result: DetectionResult::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new brightness detector from configuration
|
||||
pub fn from_config(config: &Config) -> Self {
|
||||
let params = BrightnessDetectorParams {
|
||||
min_brightness_delta: config.detection.min_brightness_delta,
|
||||
min_pixel_change: config.detection.min_pixel_change,
|
||||
min_frames: config.detection.min_frames,
|
||||
sensitivity: config.detection.sensitivity,
|
||||
id: format!("brightness-{}", Uuid::new_v4()),
|
||||
};
|
||||
|
||||
Self::with_params(params)
|
||||
}
|
||||
|
||||
/// Update the background model with a new frame
|
||||
fn update_background(&mut self, frame: &core::Mat) -> Result<()> {
|
||||
// Convert frame to grayscale
|
||||
let mut gray = core::Mat::default()?;
|
||||
imgproc::cvt_color(frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?;
|
||||
|
||||
match &mut self.background {
|
||||
Some(bg) => {
|
||||
// Gradually update background model (running average)
|
||||
let alpha = 0.05; // Update rate
|
||||
core::add_weighted(bg, 1.0 - alpha, &gray, alpha, 0.0, bg, -1)?;
|
||||
},
|
||||
None => {
|
||||
// Initialize background model
|
||||
self.background = Some(gray.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calculate the absolute difference between current frame and background
|
||||
fn compute_difference(&self, frame: &core::Mat) -> Result<core::Mat> {
|
||||
// Convert frame to grayscale
|
||||
let mut gray = core::Mat::default()?;
|
||||
imgproc::cvt_color(frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?;
|
||||
|
||||
// Calculate absolute difference from background
|
||||
let mut diff = core::Mat::default()?;
|
||||
if let Some(bg) = &self.background {
|
||||
core::absdiff(bg, &gray, &mut diff)?;
|
||||
} else {
|
||||
// If no background, use previous frame if available
|
||||
if let Some(prev) = &self.prev_frame {
|
||||
core::absdiff(prev, &gray, &mut diff)?;
|
||||
} else {
|
||||
// No previous frame, can't compute difference
|
||||
return Ok(gray);
|
||||
}
|
||||
}
|
||||
|
||||
// Update previous frame
|
||||
self.prev_frame = Some(gray);
|
||||
|
||||
// Apply threshold to highlight significant changes
|
||||
let mut thresholded = core::Mat::default()?;
|
||||
imgproc::threshold(&diff, &mut thresholded, 25.0, 255.0, imgproc::THRESH_BINARY)?;
|
||||
|
||||
Ok(thresholded)
|
||||
}
|
||||
|
||||
/// Find contours in the thresholded difference image
|
||||
fn find_meteor_candidates(&self, diff: &core::Mat) -> Result<Vec<core::Vector<core::Point>>> {
|
||||
let mut contours = core::Vector::<core::Vector<core::Point>>::new();
|
||||
let mut hierarchy = core::Vector::<core::Vec4i>::new();
|
||||
|
||||
// Find contours in the thresholded image
|
||||
imgproc::find_contours(
|
||||
diff,
|
||||
&mut contours,
|
||||
&mut hierarchy,
|
||||
imgproc::RETR_EXTERNAL,
|
||||
imgproc::CHAIN_APPROX_SIMPLE,
|
||||
core::Point::new(0, 0),
|
||||
)?;
|
||||
|
||||
// Convert to Vec for easier processing
|
||||
let contours_vec = contours.to_vec();
|
||||
|
||||
Ok(contours_vec)
|
||||
}
|
||||
|
||||
/// Calculate the brightness change in the frame
|
||||
fn calculate_brightness(&self, frame: &core::Mat) -> Result<f32> {
|
||||
// Convert to grayscale
|
||||
let mut gray = core::Mat::default()?;
|
||||
imgproc::cvt_color(frame, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?;
|
||||
|
||||
// Calculate mean brightness
|
||||
let mean = core::mean(&gray, &core::no_array())?;
|
||||
Ok(mean[0] as f32)
|
||||
}
|
||||
}
|
||||
|
||||
impl MeteorDetector for BrightnessDetector {
|
||||
fn process_frame(&mut self, frame: &core::Mat, frame_index: u64) -> Result<DetectionResult> {
|
||||
// Skip first few frames to initialize background
|
||||
if frame_index < 10 {
|
||||
self.update_background(frame)?;
|
||||
return Ok(DetectionResult::default());
|
||||
}
|
||||
|
||||
// Compute frame difference
|
||||
let diff = self.compute_difference(frame)?;
|
||||
|
||||
// Find potential meteor candidates
|
||||
let contours = self.find_meteor_candidates(&diff)?;
|
||||
|
||||
// Calculate brightness
|
||||
let brightness = self.calculate_brightness(frame)?;
|
||||
let prev_brightness = if let Some(prev) = &self.prev_frame {
|
||||
self.calculate_brightness(prev)?
|
||||
} else {
|
||||
brightness
|
||||
};
|
||||
|
||||
// Calculate metrics
|
||||
let brightness_delta = (brightness - prev_brightness).abs();
|
||||
let pixel_change = contours.iter()
|
||||
.map(|c| c.len() as u32)
|
||||
.sum();
|
||||
|
||||
// Check if we have a potential meteor
|
||||
let potential_detection =
|
||||
brightness_delta > self.params.min_brightness_delta * self.params.sensitivity &&
|
||||
pixel_change > self.params.min_pixel_change;
|
||||
|
||||
// Update detection counter
|
||||
if potential_detection {
|
||||
self.detection_counter += 1;
|
||||
} else {
|
||||
self.detection_counter = 0;
|
||||
}
|
||||
|
||||
// Check if we have enough consecutive detections
|
||||
let detected = self.detection_counter >= self.params.min_frames;
|
||||
|
||||
// If detected, compute bounding box and confidence
|
||||
let (bounding_box, confidence, trajectory) = if detected && !contours.is_empty() {
|
||||
// Find the largest contour (potentially the meteor)
|
||||
let largest_contour = contours.iter()
|
||||
.max_by_key(|c| c.len())
|
||||
.unwrap();
|
||||
|
||||
// Compute bounding rectangle
|
||||
let rect = imgproc::bounding_rect(&largest_contour)?;
|
||||
|
||||
// Calculate a simple confidence score based on brightness delta and pixel change
|
||||
let conf = (brightness_delta / (self.params.min_brightness_delta * 2.0))
|
||||
.min(1.0)
|
||||
* (pixel_change as f32 / (self.params.min_pixel_change as f32 * 2.0))
|
||||
.min(1.0);
|
||||
|
||||
// For trajectory, we'd need tracking across frames
|
||||
// This is a simplified approach just using the center of the bounding box
|
||||
let center_x = (rect.x + rect.width / 2) as u32;
|
||||
let center_y = (rect.y + rect.height / 2) as u32;
|
||||
let trajectory = vec![(center_x, center_y)];
|
||||
|
||||
(
|
||||
Some((rect.x as u32, rect.y as u32, rect.width as u32, rect.height as u32)),
|
||||
conf,
|
||||
trajectory
|
||||
)
|
||||
} else {
|
||||
(None, 0.0, Vec::new())
|
||||
};
|
||||
|
||||
// If not a strong detection, slowly update background model
|
||||
if !detected {
|
||||
self.update_background(frame)?;
|
||||
}
|
||||
|
||||
// Create detection result with detector info
|
||||
let result = DetectionResult {
|
||||
detected,
|
||||
confidence,
|
||||
bounding_box,
|
||||
brightness_delta,
|
||||
pixel_change,
|
||||
trajectory,
|
||||
detector_id: Some(self.params.id.clone()),
|
||||
timestamp: None, // Will be set by the detection pipeline
|
||||
};
|
||||
|
||||
self.last_result = result.clone();
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.background = None;
|
||||
self.prev_frame = None;
|
||||
self.detection_counter = 0;
|
||||
self.last_result = DetectionResult::default();
|
||||
}
|
||||
|
||||
fn get_config(&self) -> DetectorConfig {
|
||||
DetectorConfig::Brightness(self.params.clone())
|
||||
}
|
||||
|
||||
fn get_id(&self) -> &str {
|
||||
&self.params.id
|
||||
}
|
||||
}
|
||||
365
src/detection/cams_detector.rs
Normal file
365
src/detection/cams_detector.rs
Normal file
@ -0,0 +1,365 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, imgproc, prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{Duration, Instant};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::camera::frame_buffer::Frame;
|
||||
use crate::config::Config;
|
||||
use crate::detection::{
|
||||
DetectionResult, DetectorConfig, FeatureImages, FrameStacker, MeteorDetector, SharedFrameStacker,
|
||||
};
|
||||
|
||||
/// Detector parameters for the CAMS FTP detector
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CamsDetectorParams {
|
||||
/// Brightness threshold for meteor detection in maxpixel image
|
||||
pub brightness_threshold: u8,
|
||||
/// Minimum ratio of stdpixel to avepixel for meteor detection
|
||||
pub std_to_avg_ratio_threshold: f32,
|
||||
/// Minimum number of pixels that must exceed thresholds
|
||||
pub min_pixel_count: u32,
|
||||
/// Minimum trajectory length (pixels) to be considered a meteor
|
||||
pub min_trajectory_length: u32,
|
||||
/// Whether to save feature images for all batches (not just detections)
|
||||
pub save_all_feature_images: bool,
|
||||
/// Directory to save feature images
|
||||
pub output_dir: PathBuf,
|
||||
/// Prefix for saved files
|
||||
pub file_prefix: String,
|
||||
/// Unique ID for this detector instance
|
||||
#[serde(default = "Uuid::new_v4")]
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl Default for CamsDetectorParams {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
brightness_threshold: 30, // Minimum brightness in maxpixel
|
||||
std_to_avg_ratio_threshold: 1.5, // stdpixel should be higher than avepixel
|
||||
min_pixel_count: 10, // Minimum number of pixels to trigger detection
|
||||
min_trajectory_length: 5, // Minimum trajectory length
|
||||
save_all_feature_images: false, // Only save on detection by default
|
||||
output_dir: PathBuf::from("output"),
|
||||
file_prefix: "meteor".to_string(),
|
||||
id: format!("cams-{}", Uuid::new_v4()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// CAMS FTP format based meteor detector
|
||||
/// Uses frame stacking to generate feature images (maxpixel, avepixel, stdpixel, maxframe)
|
||||
/// and analyzes these images to detect meteors
|
||||
pub struct CamsDetector {
|
||||
/// Frame stacker for generating feature images
|
||||
stacker: SharedFrameStacker,
|
||||
/// Last processed feature images
|
||||
last_features: Option<FeatureImages>,
|
||||
/// Detection parameters
|
||||
params: CamsDetectorParams,
|
||||
/// Last detection result
|
||||
last_result: DetectionResult,
|
||||
/// Last batch processing time
|
||||
last_processing_time: Duration,
|
||||
/// Whether to force processing on next frame
|
||||
force_process: bool,
|
||||
}
|
||||
|
||||
impl CamsDetector {
|
||||
/// Create a new CAMS detector with default parameters
|
||||
pub fn new() -> Self {
|
||||
Self::with_params(CamsDetectorParams::default())
|
||||
}
|
||||
|
||||
/// Create a new CAMS detector with the given parameters
|
||||
pub fn with_params(params: CamsDetectorParams) -> Self {
|
||||
// Create output directory if it doesn't exist
|
||||
if !params.output_dir.exists() {
|
||||
if let Err(e) = std::fs::create_dir_all(¶ms.output_dir) {
|
||||
error!("Failed to create output directory {:?}: {}", params.output_dir, e);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
stacker: crate::detection::new_shared_stacker(),
|
||||
last_features: None,
|
||||
params,
|
||||
last_result: DetectionResult::default(),
|
||||
last_processing_time: Duration::default(),
|
||||
force_process: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new CAMS detector with configuration from Config
|
||||
pub fn from_config(config: &Config) -> Self {
|
||||
// Extract detector parameters from config (placeholder for now)
|
||||
let params = CamsDetectorParams::default();
|
||||
Self::with_params(params)
|
||||
}
|
||||
|
||||
/// Set whether to force processing on the next frame
|
||||
pub fn set_force_process(&mut self, force: bool) {
|
||||
self.force_process = force;
|
||||
}
|
||||
|
||||
/// Get the frame count in the current batch
|
||||
pub fn frame_count(&self) -> usize {
|
||||
self.stacker.lock().unwrap().frame_count()
|
||||
}
|
||||
|
||||
/// Get the batch size
|
||||
pub fn batch_size(&self) -> usize {
|
||||
self.stacker.lock().unwrap().batch_size()
|
||||
}
|
||||
|
||||
/// Get the last processing time
|
||||
pub fn last_processing_time(&self) -> Duration {
|
||||
self.last_processing_time
|
||||
}
|
||||
|
||||
/// Set detector parameters
|
||||
pub fn set_params(&mut self, params: CamsDetectorParams) {
|
||||
self.params = params;
|
||||
}
|
||||
|
||||
/// Analyze feature images to detect meteors
|
||||
fn analyze_features(&self, features: &FeatureImages) -> Result<DetectionResult> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Convert OpenCV matrices to more manageable types for analysis
|
||||
// This could be optimized further for performance
|
||||
|
||||
// Apply threshold to maxpixel image to find bright areas
|
||||
let mut thresholded = core::Mat::default()?;
|
||||
opencv::imgproc::threshold(
|
||||
&features.maxpixel,
|
||||
&mut thresholded,
|
||||
self.params.brightness_threshold as f64,
|
||||
255.0,
|
||||
opencv::imgproc::THRESH_BINARY,
|
||||
)?;
|
||||
|
||||
// Find contours in the thresholded image
|
||||
let mut contours = core::Vector::<core::Vector<core::Point>>::new();
|
||||
let mut hierarchy = core::Vector::<core::Vec4i>::new();
|
||||
|
||||
imgproc::find_contours(
|
||||
&thresholded,
|
||||
&mut contours,
|
||||
&mut hierarchy,
|
||||
imgproc::RETR_EXTERNAL,
|
||||
imgproc::CHAIN_APPROX_SIMPLE,
|
||||
core::Point::new(0, 0),
|
||||
)?;
|
||||
|
||||
// Convert to Vec for easier processing
|
||||
let contours_vec = contours.to_vec();
|
||||
|
||||
// If no contours found, no meteor
|
||||
if contours_vec.is_empty() {
|
||||
return Ok(DetectionResult::default());
|
||||
}
|
||||
|
||||
// Function to check if a pixel is likely part of a meteor
|
||||
let is_meteor_pixel = |x: i32, y: i32| -> Result<bool> {
|
||||
// Get values from feature images
|
||||
let max_val = *features.maxpixel.at::<u8>(y, x)?.get(0).unwrap_or(&0);
|
||||
let avg_val = *features.avepixel.at::<u8>(y, x)?.get(0).unwrap_or(&0);
|
||||
|
||||
// Avoid division by zero
|
||||
if avg_val == 0 {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let std_val = *features.stdpixel.at::<u8>(y, x)?.get(0).unwrap_or(&0);
|
||||
|
||||
let std_to_avg_ratio = std_val as f32 / avg_val as f32;
|
||||
|
||||
// Check thresholds
|
||||
Ok(max_val >= self.params.brightness_threshold &&
|
||||
std_to_avg_ratio >= self.params.std_to_avg_ratio_threshold)
|
||||
};
|
||||
|
||||
// Count pixels that meet criteria
|
||||
let mut meteor_pixel_count = 0;
|
||||
let mut meteor_points = Vec::new();
|
||||
|
||||
// Check each contour
|
||||
for contour in contours_vec {
|
||||
for point in contour {
|
||||
if is_meteor_pixel(point.x, point.y)? {
|
||||
meteor_pixel_count += 1;
|
||||
meteor_points.push((point.x as u32, point.y as u32));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if enough pixels meet criteria
|
||||
let detected = meteor_pixel_count >= self.params.min_pixel_count;
|
||||
|
||||
// Calculate trajectory length
|
||||
let trajectory_length = if meteor_points.len() >= 2 {
|
||||
let mut sorted_points = meteor_points.clone();
|
||||
|
||||
// Sort points by x coordinate (for simple linear trajectory)
|
||||
sorted_points.sort_by_key(|p| p.0);
|
||||
|
||||
// Calculate Euclidean distance between first and last point
|
||||
let first = sorted_points.first().unwrap();
|
||||
let last = sorted_points.last().unwrap();
|
||||
|
||||
let dx = first.0 as i32 - last.0 as i32;
|
||||
let dy = first.1 as i32 - last.1 as i32;
|
||||
|
||||
(dx * dx + dy * dy) as f32
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
// Calculate bounding box of meteor pixels
|
||||
let bounding_box = if !meteor_points.is_empty() {
|
||||
let min_x = meteor_points.iter().map(|p| p.0).min().unwrap();
|
||||
let max_x = meteor_points.iter().map(|p| p.0).max().unwrap();
|
||||
let min_y = meteor_points.iter().map(|p| p.1).min().unwrap();
|
||||
let max_y = meteor_points.iter().map(|p| p.1).max().unwrap();
|
||||
|
||||
Some((min_x, min_y, max_x - min_x, max_y - min_y))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Calculate confidence based on pixel count and trajectory length
|
||||
let confidence = if detected {
|
||||
let pixel_confidence = (meteor_pixel_count as f32 / self.params.min_pixel_count as f32)
|
||||
.min(1.0);
|
||||
let trajectory_confidence = (trajectory_length.sqrt() / self.params.min_trajectory_length as f32)
|
||||
.min(1.0);
|
||||
|
||||
// Combine confidence metrics
|
||||
(pixel_confidence + trajectory_confidence) / 2.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
let result = DetectionResult {
|
||||
detected,
|
||||
confidence,
|
||||
bounding_box,
|
||||
brightness_delta: meteor_pixel_count as f32,
|
||||
pixel_change: meteor_pixel_count,
|
||||
trajectory: meteor_points,
|
||||
detector_id: Some(self.params.id.clone()),
|
||||
timestamp: Some(Utc::now()),
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Process batch and save feature images if detection occurs
|
||||
fn process_and_save(&mut self) -> Result<Option<DetectionResult>> {
|
||||
let mut stacker = self.stacker.lock().unwrap();
|
||||
|
||||
// Process the batch
|
||||
let features = match stacker.process_batch()? {
|
||||
Some(f) => f,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
// Analyze features for meteor detection
|
||||
let result = self.analyze_features(&features)?;
|
||||
|
||||
// Save feature images if a meteor was detected or save_all_feature_images is true
|
||||
if result.detected || self.params.save_all_feature_images {
|
||||
// Create the output directory if it doesn't exist
|
||||
std::fs::create_dir_all(&self.params.output_dir)?;
|
||||
|
||||
// Create a prefix that includes detection status
|
||||
let full_prefix = if result.detected {
|
||||
format!("{}_detected_{:.0}pct",
|
||||
self.params.file_prefix,
|
||||
result.confidence * 100.0)
|
||||
} else {
|
||||
format!("{}_batch", self.params.file_prefix)
|
||||
};
|
||||
|
||||
// Save to the output directory with the prefix
|
||||
let output_path = self.params.output_dir.join(full_prefix);
|
||||
features.save_to_files(&output_path.to_string_lossy())?;
|
||||
|
||||
if result.detected {
|
||||
info!("Meteor detected with confidence {:.2}! Feature images saved to {:?}",
|
||||
result.confidence, output_path);
|
||||
} else if self.params.save_all_feature_images {
|
||||
debug!("Feature images saved to {:?}", output_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Store the features
|
||||
self.last_features = Some(features);
|
||||
|
||||
Ok(Some(result))
|
||||
}
|
||||
}
|
||||
|
||||
impl MeteorDetector for CamsDetector {
|
||||
fn get_config(&self) -> DetectorConfig {
|
||||
DetectorConfig::Cams(self.params.clone())
|
||||
}
|
||||
|
||||
fn get_id(&self) -> &str {
|
||||
&self.params.id
|
||||
}
|
||||
|
||||
fn process_frame(&mut self, frame: &core::Mat, frame_index: u64) -> Result<DetectionResult> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Create Frame struct from opencv Mat
|
||||
let frame = Frame::new(frame.clone(), Utc::now(), frame_index);
|
||||
|
||||
// Add frame to stacker
|
||||
let batch_complete = {
|
||||
let mut stacker = self.stacker.lock().unwrap();
|
||||
stacker.push_frame(frame)
|
||||
};
|
||||
|
||||
// Process batch if complete or forced
|
||||
let process = batch_complete || self.force_process;
|
||||
self.force_process = false;
|
||||
|
||||
let result = if process {
|
||||
match self.process_and_save() {
|
||||
Ok(Some(res)) => {
|
||||
self.last_result = res.clone();
|
||||
res
|
||||
},
|
||||
Ok(None) => self.last_result.clone(),
|
||||
Err(e) => {
|
||||
error!("Error processing frame batch: {}", e);
|
||||
self.last_result.clone()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.last_result.clone()
|
||||
};
|
||||
|
||||
// Record processing time
|
||||
self.last_processing_time = start_time.elapsed();
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
// Reset frame stacker
|
||||
self.stacker.lock().unwrap().reset();
|
||||
|
||||
// Reset state
|
||||
self.last_features = None;
|
||||
self.last_result = DetectionResult::default();
|
||||
self.last_processing_time = Duration::default();
|
||||
self.force_process = false;
|
||||
}
|
||||
}
|
||||
373
src/detection/frame_stacker.rs
Normal file
373
src/detection/frame_stacker.rs
Normal file
@ -0,0 +1,373 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{
|
||||
core,
|
||||
prelude::*,
|
||||
imgproc,
|
||||
types,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::VecDeque;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::camera::frame_buffer::Frame;
|
||||
|
||||
/// Configuration for the frame stacker
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FrameStackerConfig {
|
||||
/// Number of frames to stack (typically 256 for CAMS format)
|
||||
pub frames_per_stack: usize,
|
||||
/// Whether to save stacked frames to disk
|
||||
pub save_stacked_frames: bool,
|
||||
/// Directory to save stacked frames
|
||||
pub output_directory: PathBuf,
|
||||
/// Write FITS format instead of PNG
|
||||
pub write_fits: bool,
|
||||
/// Maximum pixel value
|
||||
pub max_pixel_value: u8,
|
||||
}
|
||||
|
||||
impl Default for FrameStackerConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
frames_per_stack: 256,
|
||||
save_stacked_frames: true,
|
||||
output_directory: PathBuf::from("data/stacked"),
|
||||
write_fits: false,
|
||||
max_pixel_value: 255,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Stacked frames output
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StackedFrames {
|
||||
/// Maximum pixel value across all frames
|
||||
pub maxpixel: core::Mat,
|
||||
/// Average pixel value (excluding maximum)
|
||||
pub avepixel: core::Mat,
|
||||
/// Standard deviation (excluding maximum)
|
||||
pub stdpixel: core::Mat,
|
||||
/// Frame number with maximum pixel value
|
||||
pub maxframe: core::Mat,
|
||||
/// Start time of the stack
|
||||
pub start_time: DateTime<Utc>,
|
||||
/// End time of the stack
|
||||
pub end_time: DateTime<Utc>,
|
||||
/// Stack ID (timestamp-based)
|
||||
pub stack_id: String,
|
||||
}
|
||||
|
||||
/// Frame stacker for CAMS FTP format
|
||||
pub struct FrameStacker {
|
||||
/// Frame stacker configuration
|
||||
config: FrameStackerConfig,
|
||||
/// Buffer of frames
|
||||
frame_buffer: VecDeque<Frame>,
|
||||
/// Whether the stacker is currently processing
|
||||
processing: bool,
|
||||
/// Current stack ID
|
||||
current_stack_id: String,
|
||||
/// Start time of current stack
|
||||
start_time: Option<DateTime<Utc>>,
|
||||
/// Shared stacker state
|
||||
state: Arc<Mutex<FrameStackerState>>,
|
||||
}
|
||||
|
||||
/// State information for the frame stacker
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FrameStackerState {
|
||||
/// Number of stacks processed
|
||||
pub stacks_processed: usize,
|
||||
/// Last stack completed time
|
||||
pub last_stack_time: Option<DateTime<Utc>>,
|
||||
/// Last stack ID
|
||||
pub last_stack_id: Option<String>,
|
||||
/// Number of frames in current stack
|
||||
pub current_frame_count: usize,
|
||||
}
|
||||
|
||||
impl Default for FrameStackerState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
stacks_processed: 0,
|
||||
last_stack_time: None,
|
||||
last_stack_id: None,
|
||||
current_frame_count: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FrameStacker {
|
||||
/// Create a new frame stacker with the given configuration
|
||||
pub fn new(config: FrameStackerConfig) -> Result<Self> {
|
||||
// Create output directory if it doesn't exist
|
||||
if config.save_stacked_frames {
|
||||
std::fs::create_dir_all(&config.output_directory)
|
||||
.context("Failed to create output directory for stacked frames")?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
config,
|
||||
frame_buffer: VecDeque::with_capacity(256),
|
||||
processing: false,
|
||||
current_stack_id: format!("stack_{}", Utc::now().timestamp()),
|
||||
start_time: None,
|
||||
state: Arc::new(Mutex::new(FrameStackerState::default())),
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the current state of the frame stacker
|
||||
pub fn get_state(&self) -> FrameStackerState {
|
||||
self.state.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Process a new frame
|
||||
pub fn process_frame(&mut self, frame: Frame) -> Result<Option<StackedFrames>> {
|
||||
// Add frame to buffer
|
||||
self.frame_buffer.push_back(frame.clone());
|
||||
|
||||
// Update state
|
||||
{
|
||||
let mut state = self.state.lock().unwrap();
|
||||
state.current_frame_count = self.frame_buffer.len();
|
||||
}
|
||||
|
||||
// Set start time of stack if this is the first frame
|
||||
if self.start_time.is_none() {
|
||||
self.start_time = Some(frame.timestamp);
|
||||
}
|
||||
|
||||
// If we've collected enough frames, process them
|
||||
if self.frame_buffer.len() >= self.config.frames_per_stack {
|
||||
debug!("Processing {} frames into a stack", self.frame_buffer.len());
|
||||
|
||||
self.processing = true;
|
||||
let result = self.stack_frames();
|
||||
self.processing = false;
|
||||
|
||||
// Generate new stack ID
|
||||
self.current_stack_id = format!("stack_{}", Utc::now().timestamp());
|
||||
|
||||
// Reset start time for next stack
|
||||
self.start_time = None;
|
||||
|
||||
// Update state
|
||||
{
|
||||
let mut state = self.state.lock().unwrap();
|
||||
state.stacks_processed += 1;
|
||||
state.last_stack_time = Some(Utc::now());
|
||||
state.last_stack_id = Some(self.current_stack_id.clone());
|
||||
state.current_frame_count = 0;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Stack the frames in the buffer to produce CAMS FTP format images
|
||||
fn stack_frames(&mut self) -> Result<Option<StackedFrames>> {
|
||||
if self.frame_buffer.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Get dimensions from the first frame
|
||||
let first_frame = &self.frame_buffer[0];
|
||||
let width = first_frame.data.cols();
|
||||
let height = first_frame.data.rows();
|
||||
let channels = first_frame.data.channels();
|
||||
|
||||
if channels != 1 {
|
||||
// Convert any color frames to grayscale
|
||||
warn!("Input frames are not grayscale. Converting to grayscale for stacking.");
|
||||
}
|
||||
|
||||
// Create output matrices
|
||||
let mut maxpixel = core::Mat::zeros(height, width, core::CV_8UC1)?;
|
||||
let mut sum_pixel = core::Mat::zeros(height, width, core::CV_64FC1)?; // Use double for accumulation
|
||||
let mut sum_sq_pixel = core::Mat::zeros(height, width, core::CV_64FC1)?; // Use double for accumulation
|
||||
let mut maxframe = core::Mat::zeros(height, width, core::CV_8UC1)?;
|
||||
|
||||
// Process all frames
|
||||
for (frame_idx, frame) in self.frame_buffer.iter().enumerate() {
|
||||
// Convert to grayscale if needed
|
||||
let gray_frame = if frame.data.channels() != 1 {
|
||||
let mut gray = core::Mat::default();
|
||||
imgproc::cvt_color(&frame.data, &mut gray, imgproc::COLOR_BGR2GRAY, 0)?;
|
||||
gray
|
||||
} else {
|
||||
frame.data.clone()
|
||||
};
|
||||
|
||||
// Make sure the grayscale image is 8-bit
|
||||
let gray_8bit = if gray_frame.depth() != core::CV_8U {
|
||||
let mut converted = core::Mat::default();
|
||||
gray_frame.convert_to(&mut converted, core::CV_8U, 1.0, 0.0)?;
|
||||
converted
|
||||
} else {
|
||||
gray_frame
|
||||
};
|
||||
|
||||
// Update maxpixel and maxframe
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let current = gray_8bit.at_2d::<u8>(y, x)?;
|
||||
let current_max = maxpixel.at_2d::<u8>(y, x)?;
|
||||
|
||||
if current > current_max {
|
||||
// Update maximum pixel value
|
||||
unsafe {
|
||||
*maxpixel.at_2d_mut::<u8>(y, x)? = current;
|
||||
// Record the frame number (0-255)
|
||||
*maxframe.at_2d_mut::<u8>(y, x)? = frame_idx as u8;
|
||||
}
|
||||
}
|
||||
|
||||
// Add to the sums for average and std dev calculations
|
||||
let current_f64 = f64::from(current);
|
||||
unsafe {
|
||||
let sum = sum_pixel.at_2d_mut::<f64>(y, x)?;
|
||||
*sum += current_f64;
|
||||
|
||||
let sum_sq = sum_sq_pixel.at_2d_mut::<f64>(y, x)?;
|
||||
*sum_sq += current_f64 * current_f64;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate avepixel (excluding the maximum pixel value)
|
||||
let mut avepixel = core::Mat::zeros(height, width, core::CV_8UC1)?;
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let max_val = f64::from(maxpixel.at_2d::<u8>(y, x)?);
|
||||
let sum = *sum_pixel.at_2d::<f64>(y, x)?;
|
||||
|
||||
// Subtract the maximum value and divide by (N-1)
|
||||
let avg = (sum - max_val) / (self.frame_buffer.len() as f64 - 1.0);
|
||||
|
||||
// Convert to 8-bit with rounding
|
||||
let avg_u8 = if avg < 0.0 {
|
||||
0
|
||||
} else if avg > 255.0 {
|
||||
255
|
||||
} else {
|
||||
avg.round() as u8
|
||||
};
|
||||
|
||||
unsafe {
|
||||
*avepixel.at_2d_mut::<u8>(y, x)? = avg_u8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate stdpixel (excluding the maximum pixel value)
|
||||
let mut stdpixel = core::Mat::zeros(height, width, core::CV_8UC1)?;
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let max_val = f64::from(maxpixel.at_2d::<u8>(y, x)?);
|
||||
let avg = f64::from(avepixel.at_2d::<u8>(y, x)?);
|
||||
|
||||
// Get sum of squares of remaining values
|
||||
let sum_sq = *sum_sq_pixel.at_2d::<f64>(y, x)?;
|
||||
|
||||
// Sum of squared differences can be computed as:
|
||||
// sum((x_i - avg)^2) = sum(x_i^2) - N * avg^2
|
||||
let variance = (sum_sq - max_val * max_val) / (self.frame_buffer.len() as f64 - 1.0) - avg * avg;
|
||||
|
||||
// Take square root for standard deviation, handle negative values due to numerical issues
|
||||
let std_dev = if variance > 0.0 { variance.sqrt() } else { 0.0 };
|
||||
|
||||
// Convert to 8-bit with rounding
|
||||
let std_u8 = if std_dev < 0.0 {
|
||||
0
|
||||
} else if std_dev > 255.0 {
|
||||
255
|
||||
} else {
|
||||
std_dev.round() as u8
|
||||
};
|
||||
|
||||
unsafe {
|
||||
*stdpixel.at_2d_mut::<u8>(y, x)? = std_u8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the stacked frames result
|
||||
let first_frame_time = self.frame_buffer.front().unwrap().timestamp;
|
||||
let last_frame_time = self.frame_buffer.back().unwrap().timestamp;
|
||||
|
||||
let stacked = StackedFrames {
|
||||
maxpixel,
|
||||
avepixel,
|
||||
stdpixel,
|
||||
maxframe,
|
||||
start_time: first_frame_time,
|
||||
end_time: last_frame_time,
|
||||
stack_id: self.current_stack_id.clone(),
|
||||
};
|
||||
|
||||
// Save the stacked frames if enabled
|
||||
if self.config.save_stacked_frames {
|
||||
self.save_stacked_frames(&stacked)?;
|
||||
}
|
||||
|
||||
// Clear the buffer for the next stack
|
||||
self.frame_buffer.clear();
|
||||
|
||||
Ok(Some(stacked))
|
||||
}
|
||||
|
||||
/// Save the stacked frames to disk
|
||||
fn save_stacked_frames(&self, stacked: &StackedFrames) -> Result<()> {
|
||||
let timestamp = stacked.start_time.format("%Y%m%d_%H%M%S").to_string();
|
||||
let base_path = self.config.output_directory.join(×tamp);
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
std::fs::create_dir_all(base_path.parent().unwrap())
|
||||
.context("Failed to create parent directory for stacked frames")?;
|
||||
|
||||
// Save each image
|
||||
let maxpixel_path = format!("{}_maxpixel.png", base_path.to_string_lossy());
|
||||
let avepixel_path = format!("{}_avepixel.png", base_path.to_string_lossy());
|
||||
let stdpixel_path = format!("{}_stdpixel.png", base_path.to_string_lossy());
|
||||
let maxframe_path = format!("{}_maxframe.png", base_path.to_string_lossy());
|
||||
|
||||
opencv::imgcodecs::imwrite(&maxpixel_path, &stacked.maxpixel, &core::Vector::new())?;
|
||||
opencv::imgcodecs::imwrite(&avepixel_path, &stacked.avepixel, &core::Vector::new())?;
|
||||
opencv::imgcodecs::imwrite(&stdpixel_path, &stacked.stdpixel, &core::Vector::new())?;
|
||||
opencv::imgcodecs::imwrite(&maxframe_path, &stacked.maxframe, &core::Vector::new())?;
|
||||
|
||||
info!("Saved stacked frames to {}", base_path.to_string_lossy());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset the frame stacker, clearing the buffer
|
||||
pub fn reset(&mut self) {
|
||||
self.frame_buffer.clear();
|
||||
self.start_time = None;
|
||||
self.current_stack_id = format!("stack_{}", Utc::now().timestamp());
|
||||
|
||||
let mut state = self.state.lock().unwrap();
|
||||
state.current_frame_count = 0;
|
||||
}
|
||||
|
||||
/// Check if the stacker is currently processing
|
||||
pub fn is_processing(&self) -> bool {
|
||||
self.processing
|
||||
}
|
||||
|
||||
/// Get the current frame count
|
||||
pub fn frame_count(&self) -> usize {
|
||||
self.frame_buffer.len()
|
||||
}
|
||||
|
||||
/// Get the stack progress (0.0 - 1.0)
|
||||
pub fn progress(&self) -> f32 {
|
||||
self.frame_buffer.len() as f32 / self.config.frames_per_stack as f32
|
||||
}
|
||||
}
|
||||
102
src/detection/mod.rs
Normal file
102
src/detection/mod.rs
Normal file
@ -0,0 +1,102 @@
|
||||
mod pipeline;
|
||||
mod frame_stacker;
|
||||
mod cams_detector;
|
||||
mod brightness_detector;
|
||||
|
||||
pub use pipeline::{DetectionPipeline, PipelineConfig, AggregationStrategy};
|
||||
pub use frame_stacker::{FrameStacker, FrameStackerConfig, StackedFrames, FrameStackerState};
|
||||
pub use cams_detector::{CamsDetector, CamsDetectorParams};
|
||||
pub use brightness_detector::{BrightnessDetector, BrightnessDetectorParams};
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::Utc;
|
||||
use opencv::core;
|
||||
use std::path::PathBuf;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Result of meteor detection analysis
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DetectionResult {
|
||||
/// Whether a meteor was detected
|
||||
pub detected: bool,
|
||||
/// Confidence score (0.0-1.0)
|
||||
pub confidence: f32,
|
||||
/// Bounding box of the meteor (x, y, width, height)
|
||||
pub bounding_box: Option<(u32, u32, u32, u32)>,
|
||||
/// Brightness delta that triggered detection
|
||||
pub brightness_delta: f32,
|
||||
/// Number of pixels that changed
|
||||
pub pixel_change: u32,
|
||||
/// Trajectory points (if tracking enabled)
|
||||
pub trajectory: Vec<(u32, u32)>,
|
||||
/// ID of the detector that produced this result
|
||||
pub detector_id: Option<String>,
|
||||
/// Timestamp when detection occurred
|
||||
pub timestamp: Option<chrono::DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl Default for DetectionResult {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
detected: false,
|
||||
confidence: 0.0,
|
||||
bounding_box: None,
|
||||
brightness_delta: 0.0,
|
||||
pixel_change: 0,
|
||||
trajectory: Vec::new(),
|
||||
detector_id: None,
|
||||
timestamp: Some(Utc::now()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for a detector
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum DetectorConfig {
|
||||
/// Brightness-based detector
|
||||
#[serde(rename = "brightness")]
|
||||
Brightness(BrightnessDetectorParams),
|
||||
|
||||
/// CAMS FTP format detector
|
||||
#[serde(rename = "cams")]
|
||||
Cams(CamsDetectorParams),
|
||||
}
|
||||
|
||||
impl Default for DetectorConfig {
|
||||
fn default() -> Self {
|
||||
DetectorConfig::Brightness(BrightnessDetectorParams::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Detector factory for creating detectors from configuration
|
||||
pub struct DetectorFactory;
|
||||
|
||||
impl DetectorFactory {
|
||||
/// Create a new detector from configuration
|
||||
pub fn create(config: &DetectorConfig) -> Box<dyn MeteorDetector> {
|
||||
match config {
|
||||
DetectorConfig::Brightness(params) => {
|
||||
Box::new(BrightnessDetector::with_params(params.clone()))
|
||||
},
|
||||
DetectorConfig::Cams(params) => {
|
||||
Box::new(CamsDetector::with_params(params.clone()))
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait for meteor detection algorithms
|
||||
pub trait MeteorDetector: Send + Sync {
|
||||
/// Process a frame and detect meteors
|
||||
fn process_frame(&mut self, frame: &core::Mat, frame_index: u64) -> Result<DetectionResult>;
|
||||
|
||||
/// Reset the detector state
|
||||
fn reset(&mut self);
|
||||
|
||||
/// Get the detector's configuration
|
||||
fn get_config(&self) -> DetectorConfig;
|
||||
|
||||
/// Get the detector's unique ID
|
||||
fn get_id(&self) -> &str;
|
||||
}
|
||||
427
src/detection/pipeline.rs
Normal file
427
src/detection/pipeline.rs
Normal file
@ -0,0 +1,427 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::future::join_all;
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::runtime::Runtime;
|
||||
use tokio::sync::{broadcast, mpsc};
|
||||
use tokio::time;
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
use crate::camera::frame_buffer::{Frame, SharedFrameBuffer};
|
||||
use crate::camera::{CameraController, MeteorEvent};
|
||||
use crate::config::Config;
|
||||
use crate::detection::{
|
||||
BrightnessDetector, CamsDetector, DetectionResult, DetectorConfig, DetectorFactory, MeteorDetector
|
||||
};
|
||||
|
||||
/// Configuration for the detector pipeline
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PipelineConfig {
|
||||
/// List of detectors to use
|
||||
pub detectors: Vec<DetectorConfig>,
|
||||
/// Maximum number of parallel workers
|
||||
pub max_parallel_workers: usize,
|
||||
/// Buffer time in seconds for events (before/after detection)
|
||||
pub event_buffer_seconds: u32,
|
||||
/// Strategy for aggregating multiple detector results
|
||||
pub aggregation_strategy: AggregationStrategy,
|
||||
}
|
||||
|
||||
impl Default for PipelineConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
detectors: vec![DetectorConfig::default()],
|
||||
max_parallel_workers: 4,
|
||||
event_buffer_seconds: 10,
|
||||
aggregation_strategy: AggregationStrategy::Any,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Strategy for aggregating results from multiple detectors
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum AggregationStrategy {
|
||||
/// Any detector reports detection
|
||||
#[serde(rename = "any")]
|
||||
Any,
|
||||
/// All detectors must report detection
|
||||
#[serde(rename = "all")]
|
||||
All,
|
||||
/// Majority of detectors must report detection
|
||||
#[serde(rename = "majority")]
|
||||
Majority,
|
||||
/// Custom threshold of detectors must report detection
|
||||
#[serde(rename = "threshold")]
|
||||
Threshold(f32),
|
||||
}
|
||||
|
||||
/// Detection pipeline for meteor events with parallel detector support
|
||||
pub struct DetectionPipeline {
|
||||
/// List of detector instances
|
||||
detectors: Vec<Arc<Mutex<Box<dyn MeteorDetector>>>>,
|
||||
/// Camera controller
|
||||
camera_controller: Arc<Mutex<CameraController>>,
|
||||
/// Pipeline configuration
|
||||
config: PipelineConfig,
|
||||
/// Tokio runtime for async tasks
|
||||
runtime: Runtime,
|
||||
/// Channel for sending detected events
|
||||
event_tx: mpsc::Sender<MeteorEvent>,
|
||||
/// Channel for receiving events
|
||||
event_rx: mpsc::Receiver<MeteorEvent>,
|
||||
/// Whether the pipeline is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
/// Current frame index
|
||||
frame_index: Arc<Mutex<u64>>,
|
||||
}
|
||||
|
||||
impl DetectionPipeline {
|
||||
/// Create a new detection pipeline from configuration
|
||||
pub fn new(
|
||||
camera_controller: Arc<Mutex<CameraController>>,
|
||||
config: &Config,
|
||||
pipeline_config: Option<PipelineConfig>,
|
||||
) -> Result<Self> {
|
||||
// Use provided pipeline config or create default
|
||||
let pipeline_config = pipeline_config.unwrap_or_else(|| {
|
||||
// Create default pipeline with brightness detector
|
||||
let brightness_params = crate::detection::BrightnessDetectorParams {
|
||||
min_brightness_delta: config.detection.min_brightness_delta,
|
||||
min_pixel_change: config.detection.min_pixel_change,
|
||||
min_frames: config.detection.min_frames,
|
||||
sensitivity: config.detection.sensitivity,
|
||||
id: format!("brightness-default"),
|
||||
};
|
||||
|
||||
PipelineConfig {
|
||||
detectors: vec![DetectorConfig::Brightness(brightness_params)],
|
||||
max_parallel_workers: 4,
|
||||
event_buffer_seconds: config.detection.event_buffer_seconds,
|
||||
aggregation_strategy: AggregationStrategy::Any,
|
||||
}
|
||||
});
|
||||
|
||||
// Create detectors from config
|
||||
let mut detectors = Vec::with_capacity(pipeline_config.detectors.len());
|
||||
for detector_config in &pipeline_config.detectors {
|
||||
let detector = DetectorFactory::create(detector_config);
|
||||
detectors.push(Arc::new(Mutex::new(detector)));
|
||||
}
|
||||
|
||||
// Create tokio runtime
|
||||
let runtime = Runtime::new().context("Failed to create Tokio runtime")?;
|
||||
|
||||
// Create channel for event communication
|
||||
let (event_tx, event_rx) = mpsc::channel(32);
|
||||
|
||||
Ok(Self {
|
||||
detectors,
|
||||
camera_controller,
|
||||
config: pipeline_config,
|
||||
runtime,
|
||||
event_tx,
|
||||
event_rx,
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
frame_index: Arc::new(Mutex::new(0)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Add a detector to the pipeline
|
||||
pub fn add_detector(&mut self, config: DetectorConfig) -> Result<()> {
|
||||
let detector = DetectorFactory::create(&config);
|
||||
self.detectors.push(Arc::new(Mutex::new(detector)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create from legacy config (for backward compatibility)
|
||||
pub fn from_legacy_config(
|
||||
camera_controller: Arc<Mutex<CameraController>>,
|
||||
config: &Config,
|
||||
) -> Result<Self> {
|
||||
Self::new(camera_controller, config, None)
|
||||
}
|
||||
|
||||
/// Process frame with all detectors in parallel
|
||||
async fn process_frame_parallel(
|
||||
&self,
|
||||
frame: &core::Mat,
|
||||
frame_index: u64,
|
||||
) -> Vec<DetectionResult> {
|
||||
let mut handles = Vec::with_capacity(self.detectors.len());
|
||||
|
||||
// Create a task for each detector
|
||||
for detector in &self.detectors {
|
||||
let frame = frame.clone();
|
||||
let detector = detector.clone();
|
||||
|
||||
// Spawn a task for this detector
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut detector = detector.lock().unwrap();
|
||||
match detector.process_frame(&frame, frame_index) {
|
||||
Ok(result) => result,
|
||||
Err(e) => {
|
||||
error!("Error processing frame with detector {}: {}",
|
||||
detector.get_id(), e);
|
||||
DetectionResult::default()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
handles.push(handle);
|
||||
}
|
||||
|
||||
// Wait for all detectors to complete
|
||||
let results = join_all(handles).await;
|
||||
|
||||
// Unwrap results
|
||||
results.into_iter()
|
||||
.filter_map(|r| r.ok())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Aggregate results from multiple detectors
|
||||
fn aggregate_results(&self, results: Vec<DetectionResult>) -> Option<DetectionResult> {
|
||||
// If no results, return None
|
||||
if results.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Count detections
|
||||
let total = results.len();
|
||||
let detected = results.iter().filter(|r| r.detected).count();
|
||||
|
||||
// Check if we have a detection based on aggregation strategy
|
||||
let detection_triggered = match self.config.aggregation_strategy {
|
||||
AggregationStrategy::Any => detected > 0,
|
||||
AggregationStrategy::All => detected == total,
|
||||
AggregationStrategy::Majority => detected > total / 2,
|
||||
AggregationStrategy::Threshold(threshold) => {
|
||||
(detected as f32 / total as f32) >= threshold
|
||||
},
|
||||
};
|
||||
|
||||
if !detection_triggered {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the result with the highest confidence
|
||||
let best_result = results.into_iter()
|
||||
.filter(|r| r.detected)
|
||||
.max_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap());
|
||||
|
||||
best_result
|
||||
}
|
||||
|
||||
/// Start the detection pipeline
|
||||
pub async fn run(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("Detection pipeline is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
info!("Starting meteor detection pipeline with {} detectors", self.detectors.len());
|
||||
|
||||
// Start background processing task
|
||||
let camera_controller = self.camera_controller.clone();
|
||||
let detectors = self.detectors.clone();
|
||||
let event_tx = self.event_tx.clone();
|
||||
let buffer_seconds = self.config.event_buffer_seconds as i64;
|
||||
let is_running = self.is_running.clone();
|
||||
let frame_index = self.frame_index.clone();
|
||||
|
||||
// Process incoming frames
|
||||
let process_task = tokio::spawn(async move {
|
||||
// Get frame receiver
|
||||
let mut frame_rx = {
|
||||
let camera = camera_controller.lock().unwrap();
|
||||
camera.subscribe_to_frames()
|
||||
};
|
||||
|
||||
// Get frame buffer reference
|
||||
let frame_buffer = {
|
||||
let camera = camera_controller.lock().unwrap();
|
||||
camera.get_frame_buffer()
|
||||
};
|
||||
|
||||
while {
|
||||
let is_running = is_running.lock().unwrap();
|
||||
*is_running
|
||||
} {
|
||||
// Wait for next frame or timeout
|
||||
match tokio::time::timeout(Duration::from_secs(1), frame_rx.recv()).await {
|
||||
Ok(Ok(frame)) => {
|
||||
// Update frame index
|
||||
let current_frame_index = {
|
||||
let mut idx = frame_index.lock().unwrap();
|
||||
*idx += 1;
|
||||
*idx
|
||||
};
|
||||
|
||||
// Process frame with all detectors in parallel
|
||||
let results = Self::process_frame_parallel_static(
|
||||
&detectors, &frame.mat, current_frame_index).await;
|
||||
|
||||
// Aggregate results
|
||||
if let Some(result) = Self::aggregate_results_static(&results) {
|
||||
// Handle detection
|
||||
if result.detected {
|
||||
debug!("Meteor detected: confidence={:.2}, bbox={:?}, detector={}",
|
||||
result.confidence, result.bounding_box,
|
||||
result.detector_id.as_deref().unwrap_or("unknown"));
|
||||
|
||||
// Save event
|
||||
if let Some(bbox) = result.bounding_box {
|
||||
let event = {
|
||||
let mut camera = camera_controller.lock().unwrap();
|
||||
match camera.save_meteor_event(
|
||||
frame.timestamp,
|
||||
result.confidence,
|
||||
bbox,
|
||||
buffer_seconds,
|
||||
buffer_seconds
|
||||
).await {
|
||||
Ok(event) => event,
|
||||
Err(e) => {
|
||||
error!("Failed to save meteor event: {}", e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Send event notification
|
||||
let _ = event_tx.send(event).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(Err(e)) => {
|
||||
error!("Error receiving frame: {}", e);
|
||||
|
||||
// Small delay to avoid tight error loop
|
||||
time::sleep(Duration::from_millis(100)).await;
|
||||
},
|
||||
Err(_) => {
|
||||
// Timeout, check if we should exit
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Detection pipeline stopped");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Static version of process_frame_parallel for use in async tasks
|
||||
async fn process_frame_parallel_static(
|
||||
detectors: &Vec<Arc<Mutex<Box<dyn MeteorDetector>>>>,
|
||||
frame: &core::Mat,
|
||||
frame_index: u64,
|
||||
) -> Vec<DetectionResult> {
|
||||
let mut handles = Vec::with_capacity(detectors.len());
|
||||
|
||||
// Create a task for each detector
|
||||
for detector in detectors {
|
||||
let frame = frame.clone();
|
||||
let detector = detector.clone();
|
||||
|
||||
// Spawn a task for this detector
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut detector = detector.lock().unwrap();
|
||||
match detector.process_frame(&frame, frame_index) {
|
||||
Ok(result) => result,
|
||||
Err(e) => {
|
||||
error!("Error processing frame with detector {}: {}",
|
||||
detector.get_id(), e);
|
||||
DetectionResult::default()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
handles.push(handle);
|
||||
}
|
||||
|
||||
// Wait for all detectors to complete
|
||||
let results = join_all(handles).await;
|
||||
|
||||
// Unwrap results
|
||||
results.into_iter()
|
||||
.filter_map(|r| r.ok())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Static version of aggregate_results for use in async tasks
|
||||
fn aggregate_results_static(results: &Vec<DetectionResult>) -> Option<DetectionResult> {
|
||||
// If no results, return None
|
||||
if results.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Check if any detector reported a detection
|
||||
let any_detected = results.iter().any(|r| r.detected);
|
||||
|
||||
if !any_detected {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find the result with the highest confidence
|
||||
results.iter()
|
||||
.filter(|r| r.detected)
|
||||
.max_by(|a, b| a.confidence.partial_cmp(&b.confidence).unwrap())
|
||||
.cloned()
|
||||
}
|
||||
|
||||
/// Get a receiver for meteor events
|
||||
pub fn subscribe_to_events(&self) -> mpsc::Receiver<MeteorEvent> {
|
||||
self.event_rx.clone()
|
||||
}
|
||||
|
||||
/// Stop the detection pipeline
|
||||
pub async fn stop(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("Detection pipeline is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
info!("Stopping detection pipeline");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset all detectors
|
||||
pub fn reset(&self) -> Result<()> {
|
||||
for detector in &self.detectors {
|
||||
detector.lock().unwrap().reset();
|
||||
}
|
||||
|
||||
// Reset frame index
|
||||
let mut frame_index = self.frame_index.lock().unwrap();
|
||||
*frame_index = 0;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the number of detectors
|
||||
pub fn detector_count(&self) -> usize {
|
||||
self.detectors.len()
|
||||
}
|
||||
|
||||
/// Get detector configurations
|
||||
pub fn get_detector_configs(&self) -> Vec<DetectorConfig> {
|
||||
self.detectors.iter()
|
||||
.map(|d| d.lock().unwrap().get_config())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
471
src/gps/controller.rs
Normal file
471
src/gps/controller.rs
Normal file
@ -0,0 +1,471 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use log::{debug, error, info, warn};
|
||||
use rppal::gpio::{Gpio, InputPin, Trigger};
|
||||
use serialport::{SerialPort, SerialPortSettings};
|
||||
use std::io::{BufRead, BufReader};
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::time;
|
||||
use tokio::sync::broadcast;
|
||||
|
||||
use crate::gps::nmea::{parse_nmea_sentence, NmeaPosition};
|
||||
use crate::gps::{GeoPosition, GpsConfig, GpsStatus, SyncStatus, CameraOrientation};
|
||||
|
||||
/// GPS module state information
|
||||
#[derive(Debug, Clone)]
|
||||
struct GpsState {
|
||||
/// Whether the GPS hardware is initialized
|
||||
initialized: bool,
|
||||
/// Whether the module is in degraded mode (using fallback values)
|
||||
degraded: bool,
|
||||
/// Last successful fix time
|
||||
last_fix: Option<DateTime<Utc>>,
|
||||
/// Initialization failure count
|
||||
init_failures: u32,
|
||||
}
|
||||
|
||||
impl Default for GpsState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
initialized: false,
|
||||
degraded: false,
|
||||
last_fix: None,
|
||||
init_failures: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Controller for GPS and time synchronization
|
||||
pub struct GpsController {
|
||||
/// GPS configuration
|
||||
config: GpsConfig,
|
||||
/// Serial port for GPS communication
|
||||
port: Option<Box<dyn SerialPort>>,
|
||||
/// GPIO pin for PPS signal
|
||||
pps_pin: Option<InputPin>,
|
||||
/// Last known position
|
||||
position: Arc<Mutex<GeoPosition>>,
|
||||
/// Current synchronization status
|
||||
sync_status: Arc<Mutex<SyncStatus>>,
|
||||
/// Last PPS pulse timestamp
|
||||
last_pps: Arc<Mutex<Option<DateTime<Utc>>>>,
|
||||
/// Number of satellites in view
|
||||
satellites: Arc<Mutex<u8>>,
|
||||
/// Last position update timestamp
|
||||
last_update: Arc<Mutex<Option<DateTime<Utc>>>>,
|
||||
/// Broadcast channel for position updates
|
||||
position_tx: broadcast::Sender<GeoPosition>,
|
||||
/// Whether the GPS is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
/// GPS module state
|
||||
gps_state: GpsState,
|
||||
}
|
||||
|
||||
impl GpsController {
|
||||
/// Create a new GPS controller with the given configuration
|
||||
pub async fn new(config: &crate::Config) -> Result<Self> {
|
||||
// Extract GPS settings from config
|
||||
let gps_config = config.gps.clone();
|
||||
|
||||
// Create broadcast channel for position updates
|
||||
let (position_tx, _) = broadcast::channel(10);
|
||||
|
||||
// Use fallback position as initial position if GPS is not enabled
|
||||
let initial_position = if gps_config.enable_gps {
|
||||
GeoPosition::default()
|
||||
} else {
|
||||
gps_config.fallback_position
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
config: gps_config,
|
||||
port: None,
|
||||
pps_pin: None,
|
||||
position: Arc::new(Mutex::new(initial_position)),
|
||||
sync_status: Arc::new(Mutex::new(SyncStatus::NoSync)),
|
||||
last_pps: Arc::new(Mutex::new(None)),
|
||||
satellites: Arc::new(Mutex::new(0)),
|
||||
last_update: Arc::new(Mutex::new(None)),
|
||||
position_tx,
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
gps_state: GpsState::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Initialize the GPS module
|
||||
pub async fn initialize(&mut self) -> Result<()> {
|
||||
// Check if GPS is enabled in config
|
||||
if !self.config.enable_gps {
|
||||
info!("GPS module disabled in configuration. Using fallback position.");
|
||||
self.gps_state.degraded = true;
|
||||
|
||||
// Set fallback position
|
||||
let mut pos = self.position.lock().unwrap();
|
||||
*pos = self.config.fallback_position;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut init_failed = false;
|
||||
|
||||
// Open the serial port
|
||||
let settings = SerialPortSettings {
|
||||
baud_rate: self.config.baud_rate,
|
||||
data_bits: serialport::DataBits::Eight,
|
||||
flow_control: serialport::FlowControl::None,
|
||||
parity: serialport::Parity::None,
|
||||
stop_bits: serialport::StopBits::One,
|
||||
timeout: time::Duration::from_millis(1000),
|
||||
};
|
||||
|
||||
match serialport::open_with_settings(&self.config.port, &settings) {
|
||||
Ok(port) => {
|
||||
self.port = Some(port);
|
||||
self.gps_state.initialized = true;
|
||||
},
|
||||
Err(e) => {
|
||||
self.gps_state.init_failures += 1;
|
||||
|
||||
if self.config.allow_degraded_mode {
|
||||
warn!("Failed to open GPS port {}: {}. Using fallback position.",
|
||||
self.config.port, e);
|
||||
self.gps_state.degraded = true;
|
||||
init_failed = true;
|
||||
|
||||
// Set fallback position
|
||||
let mut pos = self.position.lock().unwrap();
|
||||
*pos = self.config.fallback_position;
|
||||
} else {
|
||||
return Err(anyhow!("Failed to open GPS port and degraded mode is not allowed: {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize PPS pin if enabled and GPS initialized successfully
|
||||
if self.config.use_pps && !self.gps_state.degraded {
|
||||
match Gpio::new() {
|
||||
Ok(gpio) => {
|
||||
match gpio.get(self.config.pps_pin) {
|
||||
Ok(pin) => {
|
||||
let mut input_pin = pin.into_input();
|
||||
|
||||
// Set up PPS edge detection (rising edge)
|
||||
let last_pps = self.last_pps.clone();
|
||||
match input_pin.set_async_interrupt(Trigger::RisingEdge, move |_| {
|
||||
let now = Utc::now();
|
||||
let mut last_pps = last_pps.lock().unwrap();
|
||||
*last_pps = Some(now);
|
||||
}) {
|
||||
Ok(_) => {
|
||||
self.pps_pin = Some(input_pin);
|
||||
info!("PPS signal detection initialized on GPIO {}", self.config.pps_pin);
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Failed to set up PPS interrupt: {}. PPS sync disabled.", e);
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Failed to access GPIO pin {}: {}. PPS sync disabled.",
|
||||
self.config.pps_pin, e);
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("Failed to access GPIO: {}. PPS sync disabled.", e);
|
||||
}
|
||||
}
|
||||
} else if !self.config.use_pps {
|
||||
info!("PPS signal detection disabled in configuration");
|
||||
}
|
||||
|
||||
if self.gps_state.degraded {
|
||||
if self.config.allow_degraded_mode {
|
||||
info!("GPS module initialized in degraded mode (using fallback position)");
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("GPS initialization failed and degraded mode is not allowed"))
|
||||
}
|
||||
} else {
|
||||
info!("GPS module initialized successfully");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Start GPS processing
|
||||
pub async fn start(&self) -> Result<()> {
|
||||
// Handle degraded mode
|
||||
if self.gps_state.degraded {
|
||||
info!("Starting GPS in degraded mode (using fallback position)");
|
||||
|
||||
// Set fallback position and status
|
||||
{
|
||||
let mut pos = self.position.lock().unwrap();
|
||||
*pos = self.config.fallback_position;
|
||||
|
||||
let mut status = self.sync_status.lock().unwrap();
|
||||
*status = SyncStatus::NoSync;
|
||||
|
||||
// Send an initial position update with fallback
|
||||
let _ = self.position_tx.send(self.config.fallback_position);
|
||||
}
|
||||
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Normal mode - require initialized GPS
|
||||
if self.port.is_none() {
|
||||
return Err(anyhow!("GPS not initialized"));
|
||||
}
|
||||
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("GPS is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
// Clone Arc references for the background task
|
||||
let port_name = self.config.port.clone();
|
||||
let position = self.position.clone();
|
||||
let sync_status = self.sync_status.clone();
|
||||
let satellites = self.satellites.clone();
|
||||
let last_update = self.last_update.clone();
|
||||
let position_tx = self.position_tx.clone();
|
||||
let is_running = self.is_running.clone();
|
||||
|
||||
// Clone fallback position in case we need it later
|
||||
let fallback_position = self.config.fallback_position;
|
||||
let allow_degraded = self.config.allow_degraded_mode;
|
||||
|
||||
// Create a separate thread for GPS processing (blocking I/O)
|
||||
thread::spawn(move || {
|
||||
info!("Starting GPS processing on port {}", port_name);
|
||||
|
||||
// Get the port
|
||||
let port = match serialport::open(&port_name) {
|
||||
Ok(port) => port,
|
||||
Err(e) => {
|
||||
error!("Failed to open GPS port: {}", e);
|
||||
|
||||
// Use fallback position if degraded mode allowed
|
||||
if allow_degraded {
|
||||
warn!("Using fallback position due to GPS port error");
|
||||
let mut pos = position.lock().unwrap();
|
||||
*pos = fallback_position;
|
||||
|
||||
let _ = position_tx.send(fallback_position);
|
||||
}
|
||||
|
||||
{
|
||||
let mut is_running = is_running.lock().unwrap();
|
||||
*is_running = false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let reader = BufReader::new(port);
|
||||
|
||||
// Set a timeout for acquiring fix
|
||||
let start_time = Utc::now();
|
||||
let mut fix_acquired = false;
|
||||
|
||||
for line in reader.lines() {
|
||||
// Check if we should exit
|
||||
{
|
||||
let is_running = is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we've been waiting too long for a fix
|
||||
if !fix_acquired && allow_degraded {
|
||||
let elapsed = Utc::now() - start_time;
|
||||
if elapsed > Duration::seconds(30) { // 30 second timeout for initial fix
|
||||
warn!("Timeout waiting for GPS fix, using fallback position");
|
||||
|
||||
// Set fallback position
|
||||
{
|
||||
let mut pos = position.lock().unwrap();
|
||||
*pos = fallback_position;
|
||||
let _ = position_tx.send(fallback_position);
|
||||
}
|
||||
|
||||
// Continue trying, but we at least have a fallback now
|
||||
}
|
||||
}
|
||||
|
||||
match line {
|
||||
Ok(sentence) => {
|
||||
if let Ok(Some(nmea_pos)) = parse_nmea_sentence(&sentence) {
|
||||
// Update position
|
||||
{
|
||||
let mut pos = position.lock().unwrap();
|
||||
*pos = nmea_pos.position;
|
||||
|
||||
// Send position update
|
||||
let _ = position_tx.send(nmea_pos.position);
|
||||
}
|
||||
|
||||
// Update satellites
|
||||
{
|
||||
let mut sats = satellites.lock().unwrap();
|
||||
*sats = nmea_pos.satellites;
|
||||
}
|
||||
|
||||
// Update sync status
|
||||
{
|
||||
let mut status = sync_status.lock().unwrap();
|
||||
if nmea_pos.fix_quality > 0 {
|
||||
*status = SyncStatus::GpsOnly;
|
||||
fix_acquired = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Update last update time
|
||||
{
|
||||
let mut update = last_update.lock().unwrap();
|
||||
*update = Some(Utc::now());
|
||||
}
|
||||
|
||||
debug!("GPS update: lat={:.6}, lon={:.6}, alt={:.1}, satellites={}",
|
||||
nmea_pos.position.latitude,
|
||||
nmea_pos.position.longitude,
|
||||
nmea_pos.position.altitude,
|
||||
nmea_pos.satellites);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error reading GPS data: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("GPS processing stopped");
|
||||
{
|
||||
let mut is_running = is_running.lock().unwrap();
|
||||
*is_running = false;
|
||||
}
|
||||
});
|
||||
|
||||
info!("GPS processing started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop GPS processing
|
||||
pub async fn stop(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("GPS is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
// The background thread will notice the is_running flag and exit
|
||||
info!("GPS processing stopping (may take a moment to complete)");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current GPS status
|
||||
pub fn get_status(&self) -> GpsStatus {
|
||||
let position = self.position.lock().unwrap().clone();
|
||||
let satellites = *self.satellites.lock().unwrap();
|
||||
let sync_status = match *self.sync_status.lock().unwrap() {
|
||||
SyncStatus::NoSync => "no_sync",
|
||||
SyncStatus::GpsOnly => "gps_only",
|
||||
SyncStatus::FullSync => "full_sync",
|
||||
}.to_string();
|
||||
|
||||
// Calculate time accuracy estimate
|
||||
let time_accuracy_ms = match *self.sync_status.lock().unwrap() {
|
||||
SyncStatus::NoSync => 1000.0, // 1 second
|
||||
SyncStatus::GpsOnly => 100.0, // 100 ms
|
||||
SyncStatus::FullSync => 1.0, // 1 ms
|
||||
};
|
||||
|
||||
GpsStatus {
|
||||
position,
|
||||
satellites,
|
||||
timestamp: Utc::now(),
|
||||
sync_status,
|
||||
time_accuracy_ms,
|
||||
camera_orientation: self.config.camera_orientation,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a precise UTC timestamp using PPS if available
|
||||
pub fn get_precise_time(&self) -> DateTime<Utc> {
|
||||
let now = Utc::now();
|
||||
|
||||
// If we have PPS sync, adjust to the last PPS pulse
|
||||
if let SyncStatus::FullSync = *self.sync_status.lock().unwrap() {
|
||||
if let Some(last_pps) = *self.last_pps.lock().unwrap() {
|
||||
// Only use PPS if it's recent (within last second)
|
||||
let elapsed = now - last_pps;
|
||||
if elapsed < Duration::seconds(1) {
|
||||
return last_pps + elapsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to system time
|
||||
now
|
||||
}
|
||||
|
||||
/// Get current position
|
||||
pub fn get_position(&self) -> GeoPosition {
|
||||
self.position.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Subscribe to position updates
|
||||
pub fn subscribe(&self) -> broadcast::Receiver<GeoPosition> {
|
||||
self.position_tx.subscribe()
|
||||
}
|
||||
|
||||
/// Check if the GPS has a valid fix
|
||||
pub fn has_fix(&self) -> bool {
|
||||
// In degraded mode with fallback, pretend we have a fix
|
||||
if self.gps_state.degraded && self.config.allow_degraded_mode {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise check actual sync status
|
||||
match *self.sync_status.lock().unwrap() {
|
||||
SyncStatus::NoSync => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the camera orientation
|
||||
pub fn set_camera_orientation(&mut self, orientation: CameraOrientation) {
|
||||
self.config.camera_orientation = orientation;
|
||||
}
|
||||
|
||||
/// Get the camera orientation
|
||||
pub fn get_camera_orientation(&self) -> CameraOrientation {
|
||||
self.config.camera_orientation
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for GpsController {
|
||||
fn drop(&mut self) {
|
||||
// Ensure the GPS processing is stopped
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
*is_running = false;
|
||||
|
||||
// PPS pin will be dropped automatically, removing the interrupt
|
||||
}
|
||||
}
|
||||
115
src/gps/mod.rs
Normal file
115
src/gps/mod.rs
Normal file
@ -0,0 +1,115 @@
|
||||
mod controller;
|
||||
mod nmea;
|
||||
|
||||
pub use controller::GpsController;
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Represents a geographic location with latitude, longitude, and altitude
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
pub struct GeoPosition {
|
||||
/// Latitude in degrees (positive is North, negative is South)
|
||||
pub latitude: f64,
|
||||
/// Longitude in degrees (positive is East, negative is West)
|
||||
pub longitude: f64,
|
||||
/// Altitude in meters above sea level
|
||||
pub altitude: f64,
|
||||
}
|
||||
|
||||
impl Default for GeoPosition {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
latitude: 0.0,
|
||||
longitude: 0.0,
|
||||
altitude: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Camera orientation in 3D space
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
|
||||
pub struct CameraOrientation {
|
||||
/// Azimuth/heading in degrees (0 = North, 90 = East, etc.)
|
||||
pub azimuth: f64,
|
||||
/// Elevation/pitch in degrees (0 = horizontal, 90 = straight up)
|
||||
pub elevation: f64,
|
||||
}
|
||||
|
||||
impl Default for CameraOrientation {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
azimuth: 0.0, // Pointing North
|
||||
elevation: 90.0, // Pointing straight up
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// GPS/Time synchronization status
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SyncStatus {
|
||||
/// No GPS signal, using system time
|
||||
NoSync,
|
||||
/// GPS signal acquired, but no PPS (lower precision)
|
||||
GpsOnly,
|
||||
/// Full sync with GPS and PPS signal
|
||||
FullSync,
|
||||
}
|
||||
|
||||
/// Status and data from the GPS
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GpsStatus {
|
||||
/// Current position from GPS
|
||||
pub position: GeoPosition,
|
||||
/// Number of satellites in view
|
||||
pub satellites: u8,
|
||||
/// Current time from GPS
|
||||
pub timestamp: DateTime<Utc>,
|
||||
/// Time synchronization status
|
||||
pub sync_status: String,
|
||||
/// Estimated time accuracy in milliseconds
|
||||
pub time_accuracy_ms: f64,
|
||||
/// Camera orientation (from configuration or sensor)
|
||||
pub camera_orientation: CameraOrientation,
|
||||
}
|
||||
|
||||
/// Configuration for the GPS module
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GpsConfig {
|
||||
/// Whether to enable GPS functionality
|
||||
pub enable_gps: bool,
|
||||
/// Serial port for GPS (/dev/ttyAMA0, /dev/ttyUSB0, etc.)
|
||||
pub port: String,
|
||||
/// Baud rate for serial communication
|
||||
pub baud_rate: u32,
|
||||
/// Whether to use PPS for precise timing
|
||||
pub use_pps: bool,
|
||||
/// GPIO pin for PPS signal (BCM numbering)
|
||||
pub pps_pin: u8,
|
||||
/// Fixed camera orientation
|
||||
pub camera_orientation: CameraOrientation,
|
||||
/// Fallback position to use when GPS is unavailable
|
||||
pub fallback_position: GeoPosition,
|
||||
/// Allow system to run without GPS (using fallback position)
|
||||
pub allow_degraded_mode: bool,
|
||||
}
|
||||
|
||||
impl Default for GpsConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable_gps: true,
|
||||
port: "/dev/ttyAMA0".to_string(),
|
||||
baud_rate: 9600,
|
||||
use_pps: true,
|
||||
pps_pin: 18, // GPIO 18 (pin 12 on Raspberry Pi)
|
||||
camera_orientation: CameraOrientation::default(),
|
||||
fallback_position: GeoPosition {
|
||||
latitude: 34.0522, // Los Angeles as example
|
||||
longitude: -118.2437,
|
||||
altitude: 85.0,
|
||||
},
|
||||
allow_degraded_mode: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
318
src/gps/nmea.rs
Normal file
318
src/gps/nmea.rs
Normal file
@ -0,0 +1,318 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc};
|
||||
use log::{debug, warn};
|
||||
|
||||
use crate::gps::GeoPosition;
|
||||
|
||||
/// NMEA sentence types we're interested in
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum NmeaSentenceType {
|
||||
/// Global Positioning System Fix Data
|
||||
GGA,
|
||||
/// Geographic position - Latitude/Longitude
|
||||
GLL,
|
||||
/// Recommended minimum navigation information
|
||||
RMC,
|
||||
/// GPS DOP and active satellites
|
||||
GSA,
|
||||
/// Satellites in view
|
||||
GSV,
|
||||
/// Course over ground and ground speed
|
||||
VTG,
|
||||
/// Time & Date
|
||||
ZDA,
|
||||
/// Unknown/unsupported sentence
|
||||
Unknown,
|
||||
}
|
||||
|
||||
/// Extracted position data from NMEA
|
||||
#[derive(Debug, Default)]
|
||||
pub struct NmeaPosition {
|
||||
/// Geographic position
|
||||
pub position: GeoPosition,
|
||||
/// UTC timestamp
|
||||
pub timestamp: Option<DateTime<Utc>>,
|
||||
/// Number of satellites used in fix
|
||||
pub satellites: u8,
|
||||
/// Fix quality (0 = invalid, 1 = GPS fix, 2 = DGPS fix)
|
||||
pub fix_quality: u8,
|
||||
}
|
||||
|
||||
/// Parse an NMEA sentence and extract relevant data
|
||||
pub fn parse_nmea_sentence(sentence: &str) -> Result<Option<NmeaPosition>> {
|
||||
// Check if sentence starts with $ and has a checksum
|
||||
if !sentence.starts_with('$') || !sentence.contains('*') {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Split the sentence at '*' to get the message and checksum
|
||||
let parts: Vec<&str> = sentence.split('*').collect();
|
||||
if parts.len() != 2 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let message = parts[0];
|
||||
|
||||
// Validate checksum (optional)
|
||||
/*
|
||||
let checksum = u8::from_str_radix(parts[1], 16)
|
||||
.context("Invalid checksum format")?;
|
||||
if calculate_checksum(message) != checksum {
|
||||
return Err(anyhow!("NMEA checksum validation failed"));
|
||||
}
|
||||
*/
|
||||
|
||||
// Split the message into fields
|
||||
let fields: Vec<&str> = message.split(',').collect();
|
||||
if fields.len() < 2 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Get sentence type
|
||||
let sentence_type = match &fields[0][1..] {
|
||||
"GPGGA" | "GNGGA" => NmeaSentenceType::GGA,
|
||||
"GPGLL" | "GNGLL" => NmeaSentenceType::GLL,
|
||||
"GPRMC" | "GNRMC" => NmeaSentenceType::RMC,
|
||||
"GPGSA" | "GNGSA" => NmeaSentenceType::GSA,
|
||||
"GPGSV" | "GNGSV" => NmeaSentenceType::GSV,
|
||||
"GPVTG" | "GNVTG" => NmeaSentenceType::VTG,
|
||||
"GPZDA" | "GNZDA" => NmeaSentenceType::ZDA,
|
||||
_ => NmeaSentenceType::Unknown,
|
||||
};
|
||||
|
||||
// Parse based on sentence type
|
||||
match sentence_type {
|
||||
NmeaSentenceType::GGA => parse_gga(&fields),
|
||||
NmeaSentenceType::RMC => parse_rmc(&fields),
|
||||
_ => Ok(None), // Ignore other sentence types for now
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse GGA sentence (Global Positioning System Fix Data)
|
||||
fn parse_gga(fields: &[&str]) -> Result<Option<NmeaPosition>> {
|
||||
if fields.len() < 15 {
|
||||
warn!("GGA: Not enough fields");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Example: $GPGGA,123519,4807.038,N,01131.000,E,1,08,0.9,545.4,M,46.9,M,,*47
|
||||
|
||||
let mut position = NmeaPosition::default();
|
||||
|
||||
// Parse time
|
||||
if let Some(time) = parse_nmea_time(fields[1]) {
|
||||
// This only has time, not date
|
||||
position.timestamp = Some(Utc.from_utc_datetime(&time.and_utc()));
|
||||
}
|
||||
|
||||
// Parse latitude (field 2 & 3)
|
||||
if !fields[2].is_empty() && !fields[3].is_empty() {
|
||||
let latitude = parse_nmea_latitude(fields[2], fields[3])
|
||||
.context("Failed to parse latitude")?;
|
||||
position.position.latitude = latitude;
|
||||
}
|
||||
|
||||
// Parse longitude (field 4 & 5)
|
||||
if !fields[4].is_empty() && !fields[5].is_empty() {
|
||||
let longitude = parse_nmea_longitude(fields[4], fields[5])
|
||||
.context("Failed to parse longitude")?;
|
||||
position.position.longitude = longitude;
|
||||
}
|
||||
|
||||
// Parse fix quality (field 6)
|
||||
if !fields[6].is_empty() {
|
||||
position.fix_quality = fields[6].parse::<u8>().unwrap_or(0);
|
||||
}
|
||||
|
||||
// Parse satellites (field 7)
|
||||
if !fields[7].is_empty() {
|
||||
position.satellites = fields[7].parse::<u8>().unwrap_or(0);
|
||||
}
|
||||
|
||||
// Parse altitude (field 9)
|
||||
if !fields[9].is_empty() {
|
||||
position.position.altitude = fields[9].parse::<f64>().unwrap_or(0.0);
|
||||
}
|
||||
|
||||
Ok(Some(position))
|
||||
}
|
||||
|
||||
/// Parse RMC sentence (Recommended Minimum Navigation Information)
|
||||
fn parse_rmc(fields: &[&str]) -> Result<Option<NmeaPosition>> {
|
||||
if fields.len() < 12 {
|
||||
warn!("RMC: Not enough fields");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Example: $GPRMC,123519,A,4807.038,N,01131.000,E,022.4,084.4,230394,003.1,W*6A
|
||||
|
||||
let mut position = NmeaPosition::default();
|
||||
|
||||
// Check status (field 2) - A=active, V=void
|
||||
if fields[2] != "A" {
|
||||
// Data is not valid
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Parse time (field 1) and date (field 9)
|
||||
if !fields[1].is_empty() && !fields[9].is_empty() {
|
||||
if let (Some(time), Some(date)) = (parse_nmea_time(fields[1]), parse_nmea_date(fields[9])) {
|
||||
let datetime = NaiveDateTime::new(date, time);
|
||||
position.timestamp = Some(Utc.from_utc_datetime(&datetime));
|
||||
}
|
||||
}
|
||||
|
||||
// Parse latitude (field 3 & 4)
|
||||
if !fields[3].is_empty() && !fields[4].is_empty() {
|
||||
let latitude = parse_nmea_latitude(fields[3], fields[4])
|
||||
.context("Failed to parse latitude")?;
|
||||
position.position.latitude = latitude;
|
||||
}
|
||||
|
||||
// Parse longitude (field 5 & 6)
|
||||
if !fields[5].is_empty() && !fields[6].is_empty() {
|
||||
let longitude = parse_nmea_longitude(fields[5], fields[6])
|
||||
.context("Failed to parse longitude")?;
|
||||
position.position.longitude = longitude;
|
||||
}
|
||||
|
||||
position.fix_quality = 1; // RMC with status A has a valid fix
|
||||
position.satellites = 0; // RMC doesn't include satellite count
|
||||
|
||||
Ok(Some(position))
|
||||
}
|
||||
|
||||
/// Parse NMEA time format (HHMMSS.SSS)
|
||||
fn parse_nmea_time(time_str: &str) -> Option<NaiveTime> {
|
||||
if time_str.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Parse the time string
|
||||
let hours = time_str.get(0..2)?.parse::<u32>().ok()?;
|
||||
let minutes = time_str.get(2..4)?.parse::<u32>().ok()?;
|
||||
|
||||
// Seconds can include decimal part
|
||||
let seconds_str = time_str.get(4..)?;
|
||||
let seconds_float = seconds_str.parse::<f64>().ok()?;
|
||||
let seconds = seconds_float as u32;
|
||||
let nanoseconds = ((seconds_float - seconds as f64) * 1_000_000_000.0) as u32;
|
||||
|
||||
NaiveTime::from_hms_nano_opt(hours, minutes, seconds, nanoseconds)
|
||||
}
|
||||
|
||||
/// Parse NMEA date format (DDMMYY)
|
||||
fn parse_nmea_date(date_str: &str) -> Option<NaiveDate> {
|
||||
if date_str.len() != 6 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let day = date_str.get(0..2)?.parse::<u32>().ok()?;
|
||||
let month = date_str.get(2..4)?.parse::<u32>().ok()?;
|
||||
|
||||
// Add 2000 to years < 80, add 1900 to years >= 80
|
||||
let year_short = date_str.get(4..6)?.parse::<u32>().ok()?;
|
||||
let year = if year_short < 80 { 2000 + year_short } else { 1900 + year_short };
|
||||
|
||||
NaiveDate::from_ymd_opt(year as i32, month, day)
|
||||
}
|
||||
|
||||
/// Parse NMEA latitude format (DDMM.MMMM,N/S)
|
||||
fn parse_nmea_latitude(lat_str: &str, dir: &str) -> Result<f64> {
|
||||
if lat_str.is_empty() {
|
||||
return Ok(0.0);
|
||||
}
|
||||
|
||||
// Format is typically DDMM.MMMM where DD is degrees and MM.MMMM is minutes
|
||||
let degrees = lat_str.get(0..2)
|
||||
.ok_or_else(|| anyhow!("Invalid latitude format"))?
|
||||
.parse::<f64>()?;
|
||||
|
||||
let minutes = lat_str.get(2..)
|
||||
.ok_or_else(|| anyhow!("Invalid latitude format"))?
|
||||
.parse::<f64>()?;
|
||||
|
||||
let mut latitude = degrees + (minutes / 60.0);
|
||||
|
||||
// Apply the direction
|
||||
if dir == "S" {
|
||||
latitude = -latitude;
|
||||
}
|
||||
|
||||
Ok(latitude)
|
||||
}
|
||||
|
||||
/// Parse NMEA longitude format (DDDMM.MMMM,E/W)
|
||||
fn parse_nmea_longitude(lon_str: &str, dir: &str) -> Result<f64> {
|
||||
if lon_str.is_empty() {
|
||||
return Ok(0.0);
|
||||
}
|
||||
|
||||
// Format is typically DDDMM.MMMM where DDD is degrees and MM.MMMM is minutes
|
||||
let degrees = lon_str.get(0..3)
|
||||
.ok_or_else(|| anyhow!("Invalid longitude format"))?
|
||||
.parse::<f64>()?;
|
||||
|
||||
let minutes = lon_str.get(3..)
|
||||
.ok_or_else(|| anyhow!("Invalid longitude format"))?
|
||||
.parse::<f64>()?;
|
||||
|
||||
let mut longitude = degrees + (minutes / 60.0);
|
||||
|
||||
// Apply the direction
|
||||
if dir == "W" {
|
||||
longitude = -longitude;
|
||||
}
|
||||
|
||||
Ok(longitude)
|
||||
}
|
||||
|
||||
/// Calculate NMEA checksum
|
||||
#[allow(dead_code)]
|
||||
fn calculate_checksum(message: &str) -> u8 {
|
||||
// Skip the leading $ if present
|
||||
let start = if message.starts_with('$') { 1 } else { 0 };
|
||||
|
||||
// XOR all bytes
|
||||
message.bytes().skip(start).fold(0, |checksum, byte| checksum ^ byte)
|
||||
}
|
||||
|
||||
/// Unit tests for NMEA parser
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_gga() {
|
||||
let sentence = "$GPGGA,123519,4807.038,N,01131.000,E,1,08,0.9,545.4,M,46.9,M,,*47";
|
||||
let result = parse_nmea_sentence(sentence).unwrap().unwrap();
|
||||
|
||||
assert_eq!(result.fix_quality, 1);
|
||||
assert_eq!(result.satellites, 8);
|
||||
assert!((result.position.latitude - 48.1173).abs() < 0.0001);
|
||||
assert!((result.position.longitude - 11.5167).abs() < 0.0001);
|
||||
assert!((result.position.altitude - 545.4).abs() < 0.0001);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_rmc() {
|
||||
let sentence = "$GPRMC,123519,A,4807.038,N,01131.000,E,022.4,084.4,230394,003.1,W*6A";
|
||||
let result = parse_nmea_sentence(sentence).unwrap().unwrap();
|
||||
|
||||
assert_eq!(result.fix_quality, 1);
|
||||
assert!((result.position.latitude - 48.1173).abs() < 0.0001);
|
||||
assert!((result.position.longitude - 11.5167).abs() < 0.0001);
|
||||
|
||||
if let Some(timestamp) = result.timestamp {
|
||||
let naive = timestamp.naive_utc();
|
||||
assert_eq!(naive.year(), 1994);
|
||||
assert_eq!(naive.month(), 3);
|
||||
assert_eq!(naive.day(), 23);
|
||||
assert_eq!(naive.hour(), 12);
|
||||
assert_eq!(naive.minute(), 35);
|
||||
assert_eq!(naive.second(), 19);
|
||||
} else {
|
||||
panic!("Timestamp should be parsed");
|
||||
}
|
||||
}
|
||||
}
|
||||
151
src/hooks/mod.rs
Normal file
151
src/hooks/mod.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, prelude::*};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Hook for processing video frames
|
||||
pub trait FrameHook: Send + Sync {
|
||||
/// Process a frame
|
||||
fn process_frame(&mut self, frame: &mut core::Mat, timestamp: DateTime<Utc>) -> Result<()>;
|
||||
|
||||
/// Get hook ID
|
||||
fn get_id(&self) -> &str;
|
||||
|
||||
/// Get hook name
|
||||
fn get_name(&self) -> &str;
|
||||
|
||||
/// Get hook description
|
||||
fn get_description(&self) -> &str;
|
||||
|
||||
/// Check if hook is enabled
|
||||
fn is_enabled(&self) -> bool;
|
||||
|
||||
/// Enable or disable the hook
|
||||
fn set_enabled(&mut self, enabled: bool);
|
||||
}
|
||||
|
||||
/// Wrapper for thread-safe frame hook
|
||||
pub type SharedFrameHook = Arc<Mutex<Box<dyn FrameHook>>>;
|
||||
|
||||
/// Hook manager for processing video frames
|
||||
pub struct HookManager {
|
||||
/// List of registered hooks
|
||||
hooks: Vec<SharedFrameHook>,
|
||||
}
|
||||
|
||||
impl HookManager {
|
||||
/// Create a new hook manager
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
hooks: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Register a hook
|
||||
pub fn register_hook(&mut self, hook: Box<dyn FrameHook>) -> SharedFrameHook {
|
||||
let hook = Arc::new(Mutex::new(hook));
|
||||
self.hooks.push(hook.clone());
|
||||
hook
|
||||
}
|
||||
|
||||
/// Remove a hook by ID
|
||||
pub fn remove_hook(&mut self, id: &str) -> bool {
|
||||
let before_len = self.hooks.len();
|
||||
self.hooks.retain(|h| {
|
||||
let hook = h.lock().unwrap();
|
||||
hook.get_id() != id
|
||||
});
|
||||
self.hooks.len() < before_len
|
||||
}
|
||||
|
||||
/// Process a frame through all hooks
|
||||
pub fn process_frame(&self, frame: &mut core::Mat, timestamp: DateTime<Utc>) -> Result<()> {
|
||||
for hook in &self.hooks {
|
||||
let mut hook = hook.lock().unwrap();
|
||||
if hook.is_enabled() {
|
||||
if let Err(e) = hook.process_frame(frame, timestamp) {
|
||||
error!("Error in frame hook {}: {}", hook.get_id(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all hooks
|
||||
pub fn get_hooks(&self) -> &[SharedFrameHook] {
|
||||
&self.hooks
|
||||
}
|
||||
|
||||
/// Get hook by ID
|
||||
pub fn get_hook(&self, id: &str) -> Option<SharedFrameHook> {
|
||||
self.hooks.iter()
|
||||
.find(|h| {
|
||||
let hook = h.lock().unwrap();
|
||||
hook.get_id() == id
|
||||
})
|
||||
.cloned()
|
||||
}
|
||||
}
|
||||
|
||||
/// A basic frame hook implementation
|
||||
pub struct BasicFrameHook {
|
||||
/// Hook ID
|
||||
id: String,
|
||||
/// Hook name
|
||||
name: String,
|
||||
/// Hook description
|
||||
description: String,
|
||||
/// Whether the hook is enabled
|
||||
enabled: bool,
|
||||
/// Hook processor function
|
||||
processor: Box<dyn Fn(&mut core::Mat, DateTime<Utc>) -> Result<()> + Send + Sync>,
|
||||
}
|
||||
|
||||
impl BasicFrameHook {
|
||||
/// Create a new basic frame hook
|
||||
pub fn new<F>(
|
||||
id: impl Into<String>,
|
||||
name: impl Into<String>,
|
||||
description: impl Into<String>,
|
||||
enabled: bool,
|
||||
processor: F,
|
||||
) -> Self
|
||||
where
|
||||
F: Fn(&mut core::Mat, DateTime<Utc>) -> Result<()> + Send + Sync + 'static,
|
||||
{
|
||||
Self {
|
||||
id: id.into(),
|
||||
name: name.into(),
|
||||
description: description.into(),
|
||||
enabled,
|
||||
processor: Box::new(processor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FrameHook for BasicFrameHook {
|
||||
fn process_frame(&mut self, frame: &mut core::Mat, timestamp: DateTime<Utc>) -> Result<()> {
|
||||
(self.processor)(frame, timestamp)
|
||||
}
|
||||
|
||||
fn get_id(&self) -> &str {
|
||||
&self.id
|
||||
}
|
||||
|
||||
fn get_name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
fn get_description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
fn is_enabled(&self) -> bool {
|
||||
self.enabled
|
||||
}
|
||||
|
||||
fn set_enabled(&mut self, enabled: bool) {
|
||||
self.enabled = enabled;
|
||||
}
|
||||
}
|
||||
181
src/main.rs
Normal file
181
src/main.rs
Normal file
@ -0,0 +1,181 @@
|
||||
mod camera;
|
||||
mod config;
|
||||
mod detection;
|
||||
mod gps;
|
||||
mod hooks;
|
||||
mod overlay;
|
||||
mod sensors;
|
||||
mod storage;
|
||||
mod streaming;
|
||||
mod communication;
|
||||
mod monitoring;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use log::{info, error};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub use config::Config;
|
||||
|
||||
/// Main entry point for the meteor detection system
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Initialize logger with default settings
|
||||
env_logger::init_from_env(
|
||||
env_logger::Env::default().filter_or("RUST_LOG", "info")
|
||||
);
|
||||
|
||||
info!("Meteor detection system starting up");
|
||||
|
||||
// Load configuration
|
||||
let config = config::load_config()
|
||||
.context("Failed to load configuration")?;
|
||||
|
||||
// Re-initialize logger with configured log level
|
||||
std::env::set_var("RUST_LOG", &config.log_level);
|
||||
env_logger::builder().init();
|
||||
|
||||
info!("Loaded configuration with device ID: {}", config.device_id);
|
||||
|
||||
// Initialize camera subsystem
|
||||
let camera_controller = camera::CameraController::new(&config)
|
||||
.await
|
||||
.context("Failed to initialize camera")?;
|
||||
let camera_controller = Arc::new(Mutex::new(camera_controller));
|
||||
|
||||
// Initialize GPS module
|
||||
let gps_controller = gps::GpsController::new(&config)
|
||||
.await
|
||||
.context("Failed to initialize GPS")?;
|
||||
let gps_controller = Arc::new(Mutex::new(gps_controller));
|
||||
|
||||
// Initialize sensor controller
|
||||
let sensor_controller = sensors::SensorController::new(&config)
|
||||
.await
|
||||
.context("Failed to initialize sensors")?;
|
||||
let sensor_controller = Arc::new(Mutex::new(sensor_controller));
|
||||
|
||||
// Initialize watermark overlay
|
||||
let watermark = {
|
||||
let gps_status = gps_controller.lock().await.get_status();
|
||||
let env_data = sensor_controller.lock().await.get_current_data();
|
||||
|
||||
overlay::watermark::Watermark::new(
|
||||
config.watermark.clone(),
|
||||
Arc::new(Mutex::new(env_data)),
|
||||
Arc::new(Mutex::new(gps_status)),
|
||||
)
|
||||
};
|
||||
let watermark = Arc::new(Mutex::new(watermark));
|
||||
|
||||
// Initialize frame hook manager
|
||||
let hook_manager = hooks::HookManager::new();
|
||||
let hook_manager = Arc::new(Mutex::new(hook_manager));
|
||||
|
||||
// Initialize RTSP streaming server
|
||||
let rtsp_server = streaming::RtspServer::new(config.rtsp.clone());
|
||||
let rtsp_server = Arc::new(Mutex::new(rtsp_server));
|
||||
|
||||
// Initialize detection pipeline
|
||||
let detection_pipeline = detection::DetectionPipeline::new(
|
||||
camera_controller.clone(),
|
||||
&config
|
||||
).await.context("Failed to initialize detection pipeline")?;
|
||||
|
||||
// Initialize storage system
|
||||
let storage_manager = storage::StorageManager::new(&config)
|
||||
.await
|
||||
.context("Failed to initialize storage")?;
|
||||
|
||||
// Initialize communication module
|
||||
let comms = communication::CommunicationManager::new(
|
||||
&config,
|
||||
camera_controller.clone(),
|
||||
gps_controller.clone(),
|
||||
).await.context("Failed to initialize communication")?;
|
||||
|
||||
// Initialize health monitoring
|
||||
let monitor = monitoring::SystemMonitor::new(&config)
|
||||
.await
|
||||
.context("Failed to initialize system monitor")?;
|
||||
|
||||
// Start all subsystems
|
||||
info!("All subsystems initialized, starting main processing loop");
|
||||
|
||||
// Initialize sensors
|
||||
sensor_controller.lock().await.initialize().await
|
||||
.context("Failed to initialize sensors")?;
|
||||
sensor_controller.lock().await.start().await
|
||||
.context("Failed to start sensors")?;
|
||||
|
||||
// Initialize GPS
|
||||
gps_controller.lock().await.initialize().await
|
||||
.context("Failed to initialize GPS")?;
|
||||
gps_controller.lock().await.start().await
|
||||
.context("Failed to start GPS")?;
|
||||
|
||||
// Start RTSP server if enabled
|
||||
if config.rtsp.enabled {
|
||||
rtsp_server.lock().await.start().await
|
||||
.context("Failed to start RTSP server")?;
|
||||
info!("RTSP server started at {}", rtsp_server.lock().await.get_url());
|
||||
}
|
||||
|
||||
// Run the main event loop
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
// Add watermark hook
|
||||
{
|
||||
let mut manager = hook_manager.lock().await;
|
||||
manager.register_hook(Box::new(hooks::BasicFrameHook::new(
|
||||
"watermark",
|
||||
"Watermark Overlay",
|
||||
"Adds timestamp, GPS, and sensor data overlay to frames",
|
||||
config.watermark.enabled,
|
||||
move |frame, timestamp| {
|
||||
let mut watermark_instance = watermark.lock().unwrap();
|
||||
watermark_instance.apply(frame, timestamp)?;
|
||||
Ok(())
|
||||
},
|
||||
)));
|
||||
}
|
||||
|
||||
tasks.push(tokio::spawn(async move {
|
||||
if let Err(e) = detection_pipeline.run().await {
|
||||
error!("Detection pipeline error: {}", e);
|
||||
}
|
||||
}));
|
||||
|
||||
tasks.push(tokio::spawn(async move {
|
||||
if let Err(e) = comms.run().await {
|
||||
error!("Communication manager error: {}", e);
|
||||
}
|
||||
}));
|
||||
|
||||
tasks.push(tokio::spawn(async move {
|
||||
if let Err(e) = monitor.run().await {
|
||||
error!("System monitor error: {}", e);
|
||||
}
|
||||
}));
|
||||
|
||||
// Add RTSP streaming task
|
||||
let rtsp_server_clone = rtsp_server.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
if config.rtsp.enabled {
|
||||
info!("Starting RTSP streaming task");
|
||||
// This task would feed frames to the RTSP server from the frame buffer
|
||||
// Implementation placeholder for now
|
||||
}
|
||||
}));
|
||||
|
||||
// Wait for all tasks to complete (they generally shouldn't unless there's an error)
|
||||
for task in tasks {
|
||||
if let Err(e) = task.await {
|
||||
error!("Task panicked: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
error!("Main loop exited, this should not happen under normal circumstances");
|
||||
Ok(())
|
||||
}
|
||||
156
src/monitoring/mod.rs
Normal file
156
src/monitoring/mod.rs
Normal file
@ -0,0 +1,156 @@
|
||||
use anyhow::Result;
|
||||
use log::{info, warn, error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::time;
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
/// System health information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SystemHealth {
|
||||
/// CPU usage (0-100%)
|
||||
pub cpu_usage: f32,
|
||||
/// Memory usage (0-100%)
|
||||
pub memory_usage: f32,
|
||||
/// Storage usage (0-100%)
|
||||
pub storage_usage: f32,
|
||||
/// System temperature (Celsius)
|
||||
pub temperature: f32,
|
||||
/// Uptime in seconds
|
||||
pub uptime: u64,
|
||||
/// Timestamp of last health check
|
||||
pub timestamp: chrono::DateTime<chrono::Utc>,
|
||||
}
|
||||
|
||||
impl Default for SystemHealth {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
cpu_usage: 0.0,
|
||||
memory_usage: 0.0,
|
||||
storage_usage: 0.0,
|
||||
temperature: 0.0,
|
||||
uptime: 0,
|
||||
timestamp: chrono::Utc::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Monitor for system health and resource usage
|
||||
pub struct SystemMonitor {
|
||||
/// Configuration
|
||||
config: Config,
|
||||
/// Current system health
|
||||
health: Arc<Mutex<SystemHealth>>,
|
||||
/// System start time
|
||||
start_time: Instant,
|
||||
/// Whether the monitor is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
}
|
||||
|
||||
impl SystemMonitor {
|
||||
/// Create a new system monitor
|
||||
pub async fn new(config: &Config) -> Result<Self> {
|
||||
info!("Initializing system monitor");
|
||||
|
||||
Ok(Self {
|
||||
config: config.clone(),
|
||||
health: Arc::new(Mutex::new(SystemHealth::default())),
|
||||
start_time: Instant::now(),
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Start the monitoring loop
|
||||
pub async fn run(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("System monitor is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
info!("Starting system monitor");
|
||||
|
||||
let health = self.health.clone();
|
||||
let start_time = self.start_time;
|
||||
let is_running = self.is_running.clone();
|
||||
|
||||
// Start monitoring task
|
||||
tokio::spawn(async move {
|
||||
let mut interval = time::interval(Duration::from_secs(60));
|
||||
|
||||
while {
|
||||
let is_running = is_running.lock().unwrap();
|
||||
*is_running
|
||||
} {
|
||||
interval.tick().await;
|
||||
|
||||
// Update system health
|
||||
match Self::check_system_health(start_time) {
|
||||
Ok(new_health) => {
|
||||
let mut health = health.lock().unwrap();
|
||||
*health = new_health;
|
||||
|
||||
// Check for critical conditions
|
||||
if health.temperature > 80.0 {
|
||||
error!("CRITICAL: System temperature is too high: {:.1}°C", health.temperature);
|
||||
// In a real system, this would trigger cooling or shutdown
|
||||
}
|
||||
|
||||
if health.storage_usage > 90.0 {
|
||||
error!("CRITICAL: Storage usage is very high: {:.1}%", health.storage_usage);
|
||||
// In a real system, this would trigger cleanup
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to check system health: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("System monitor stopped");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the monitoring loop
|
||||
pub async fn stop(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("System monitor is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
info!("Stopping system monitor");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current system health
|
||||
pub fn get_health(&self) -> SystemHealth {
|
||||
self.health.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Check system health (CPU, memory, storage, temperature)
|
||||
fn check_system_health(start_time: Instant) -> Result<SystemHealth> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would use libraries like sysinfo or psutil
|
||||
// to get actual system metrics
|
||||
|
||||
Ok(SystemHealth {
|
||||
cpu_usage: 30.0,
|
||||
memory_usage: 40.0,
|
||||
storage_usage: 50.0,
|
||||
temperature: 45.0,
|
||||
uptime: start_time.elapsed().as_secs(),
|
||||
timestamp: chrono::Utc::now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
3
src/overlay/mod.rs
Normal file
3
src/overlay/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
mod watermark;
|
||||
|
||||
pub use watermark::{WatermarkOptions, WatermarkPosition, Watermark, WatermarkContent};
|
||||
320
src/overlay/watermark.rs
Normal file
320
src/overlay/watermark.rs
Normal file
@ -0,0 +1,320 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, error, info};
|
||||
use opencv::{core, imgproc, prelude::*, types};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::gps::{GeoPosition, GpsStatus};
|
||||
use crate::sensors::EnvironmentData;
|
||||
|
||||
/// Position for the watermark
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum WatermarkPosition {
|
||||
/// Top left corner of the frame
|
||||
TopLeft,
|
||||
/// Top right corner of the frame
|
||||
TopRight,
|
||||
/// Bottom left corner of the frame
|
||||
BottomLeft,
|
||||
/// Bottom right corner of the frame
|
||||
BottomRight,
|
||||
/// Custom position (x, y) in pixels from top-left
|
||||
Custom(u32, u32),
|
||||
}
|
||||
|
||||
impl Default for WatermarkPosition {
|
||||
fn default() -> Self {
|
||||
Self::BottomLeft
|
||||
}
|
||||
}
|
||||
|
||||
/// Types of content to include in watermark
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum WatermarkContent {
|
||||
/// Timestamp (always included)
|
||||
Timestamp,
|
||||
/// GPS coordinates
|
||||
GpsCoordinates,
|
||||
/// Temperature and humidity
|
||||
Environment,
|
||||
/// Camera orientation (azimuth, elevation)
|
||||
CameraOrientation,
|
||||
/// Custom text
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
/// Options for configuring the watermark
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WatermarkOptions {
|
||||
/// Whether watermarking is enabled
|
||||
pub enabled: bool,
|
||||
/// Position of the watermark
|
||||
pub position: WatermarkPosition,
|
||||
/// Font scale (1.0 = normal size)
|
||||
pub font_scale: f64,
|
||||
/// Font thickness
|
||||
pub thickness: i32,
|
||||
/// Text color (B, G, R, A)
|
||||
pub color: (u8, u8, u8, u8),
|
||||
/// Include background behind text
|
||||
pub background: bool,
|
||||
/// Background color (B, G, R, A)
|
||||
pub background_color: (u8, u8, u8, u8),
|
||||
/// Padding around text (in pixels)
|
||||
pub padding: i32,
|
||||
/// Content to include in the watermark
|
||||
pub content: Vec<WatermarkContent>,
|
||||
/// Date/time format string
|
||||
pub time_format: String,
|
||||
/// Coordinate format (decimal degrees, DMS, etc.)
|
||||
pub coordinate_format: String,
|
||||
/// Temperature format (Celsius, Fahrenheit)
|
||||
pub temperature_format: String,
|
||||
}
|
||||
|
||||
impl Default for WatermarkOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
position: WatermarkPosition::BottomLeft,
|
||||
font_scale: 0.6,
|
||||
thickness: 1,
|
||||
color: (255, 255, 255, 255), // White
|
||||
background: true,
|
||||
background_color: (0, 0, 0, 128), // Semi-transparent black
|
||||
padding: 8,
|
||||
content: vec![
|
||||
WatermarkContent::Timestamp,
|
||||
WatermarkContent::GpsCoordinates,
|
||||
WatermarkContent::Environment,
|
||||
],
|
||||
time_format: "%Y-%m-%d %H:%M:%S%.3f".to_string(),
|
||||
coordinate_format: "decimal".to_string(), // "decimal" or "dms"
|
||||
temperature_format: "C".to_string(), // "C" or "F"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Watermark for overlaying information on video frames
|
||||
pub struct Watermark {
|
||||
/// Watermark configuration options
|
||||
options: WatermarkOptions,
|
||||
/// Current environment data
|
||||
environment: Arc<Mutex<EnvironmentData>>,
|
||||
/// Current GPS status
|
||||
gps_status: Arc<Mutex<GpsStatus>>,
|
||||
}
|
||||
|
||||
impl Watermark {
|
||||
/// Create a new watermark with the given options
|
||||
pub fn new(
|
||||
options: WatermarkOptions,
|
||||
environment: Arc<Mutex<EnvironmentData>>,
|
||||
gps_status: Arc<Mutex<GpsStatus>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
options,
|
||||
environment,
|
||||
gps_status,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new watermark with default options
|
||||
pub fn default(
|
||||
environment: Arc<Mutex<EnvironmentData>>,
|
||||
gps_status: Arc<Mutex<GpsStatus>>,
|
||||
) -> Self {
|
||||
Self::new(
|
||||
WatermarkOptions::default(),
|
||||
environment,
|
||||
gps_status,
|
||||
)
|
||||
}
|
||||
|
||||
/// Update watermark options
|
||||
pub fn set_options(&mut self, options: WatermarkOptions) {
|
||||
self.options = options;
|
||||
}
|
||||
|
||||
/// Convert GPS coordinates to formatted string
|
||||
fn format_coordinates(&self, position: &GeoPosition) -> String {
|
||||
if self.options.coordinate_format == "dms" {
|
||||
// Convert decimal degrees to degrees, minutes, seconds
|
||||
let lat_deg = position.latitude.abs().trunc() as i32;
|
||||
let lat_min = ((position.latitude.abs() - lat_deg as f64) * 60.0).trunc() as i32;
|
||||
let lat_sec = ((position.latitude.abs() - lat_deg as f64 - lat_min as f64 / 60.0) * 3600.0).round() as i32;
|
||||
let lat_dir = if position.latitude >= 0.0 { "N" } else { "S" };
|
||||
|
||||
let lon_deg = position.longitude.abs().trunc() as i32;
|
||||
let lon_min = ((position.longitude.abs() - lon_deg as f64) * 60.0).trunc() as i32;
|
||||
let lon_sec = ((position.longitude.abs() - lon_deg as f64 - lon_min as f64 / 60.0) * 3600.0).round() as i32;
|
||||
let lon_dir = if position.longitude >= 0.0 { "E" } else { "W" };
|
||||
|
||||
format!(
|
||||
"{}°{:02}'{:02}\"{} {}°{:02}'{:02}\"{} Alt: {:.1}m",
|
||||
lat_deg, lat_min, lat_sec, lat_dir,
|
||||
lon_deg, lon_min, lon_sec, lon_dir,
|
||||
position.altitude
|
||||
)
|
||||
} else {
|
||||
// Decimal degrees format
|
||||
format!(
|
||||
"Lat: {:.6}° Lon: {:.6}° Alt: {:.1}m",
|
||||
position.latitude, position.longitude, position.altitude
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert temperature to formatted string based on options
|
||||
fn format_temperature(&self, temp_c: f32) -> String {
|
||||
if self.options.temperature_format == "F" {
|
||||
// Convert to Fahrenheit
|
||||
let temp_f = temp_c * 9.0 / 5.0 + 32.0;
|
||||
format!("{:.1}°F", temp_f)
|
||||
} else {
|
||||
// Default to Celsius
|
||||
format!("{:.1}°C", temp_c)
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply watermark to the given frame
|
||||
pub fn apply(&self, frame: &mut core::Mat, timestamp: DateTime<Utc>) -> Result<()> {
|
||||
if !self.options.enabled {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Build the text lines to display
|
||||
let mut lines = Vec::new();
|
||||
|
||||
for content in &self.options.content {
|
||||
match content {
|
||||
WatermarkContent::Timestamp => {
|
||||
lines.push(timestamp.format(&self.options.time_format).to_string());
|
||||
},
|
||||
WatermarkContent::GpsCoordinates => {
|
||||
let gps = self.gps_status.lock().unwrap();
|
||||
lines.push(self.format_coordinates(&gps.position));
|
||||
},
|
||||
WatermarkContent::Environment => {
|
||||
let env = self.environment.lock().unwrap();
|
||||
lines.push(format!(
|
||||
"Temp: {} Humidity: {:.1}%",
|
||||
self.format_temperature(env.temperature),
|
||||
env.humidity
|
||||
));
|
||||
},
|
||||
WatermarkContent::CameraOrientation => {
|
||||
let gps = self.gps_status.lock().unwrap();
|
||||
lines.push(format!(
|
||||
"Az: {:.1}° El: {:.1}°",
|
||||
gps.camera_orientation.azimuth,
|
||||
gps.camera_orientation.elevation
|
||||
));
|
||||
},
|
||||
WatermarkContent::Custom(text) => {
|
||||
lines.push(text.clone());
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Skip if no content
|
||||
if lines.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Get frame dimensions
|
||||
let width = frame.cols();
|
||||
let height = frame.rows();
|
||||
|
||||
// Calculate text size and position
|
||||
let font = imgproc::FONT_HERSHEY_SIMPLEX;
|
||||
let font_scale = self.options.font_scale;
|
||||
let thickness = self.options.thickness;
|
||||
let padding = self.options.padding;
|
||||
|
||||
// Calculate total height of all lines
|
||||
let mut line_heights = Vec::with_capacity(lines.len());
|
||||
let mut max_width = 0;
|
||||
|
||||
for line in &lines {
|
||||
let size = imgproc::get_text_size(line, font, font_scale, thickness, &mut 0)?;
|
||||
line_heights.push(size.height);
|
||||
max_width = max_width.max(size.width);
|
||||
}
|
||||
|
||||
let total_height: i32 = line_heights.iter().sum();
|
||||
let line_spacing = 4; // Space between lines
|
||||
let total_space_height = line_spacing * (lines.len() as i32 - 1);
|
||||
let text_block_height = total_height + total_space_height + padding * 2;
|
||||
let text_block_width = max_width + padding * 2;
|
||||
|
||||
// Calculate watermark position
|
||||
let (x, y) = match self.options.position {
|
||||
WatermarkPosition::TopLeft => (padding, padding),
|
||||
WatermarkPosition::TopRight => (width - text_block_width - padding, padding),
|
||||
WatermarkPosition::BottomLeft => (padding, height - text_block_height - padding),
|
||||
WatermarkPosition::BottomRight => (
|
||||
width - text_block_width - padding,
|
||||
height - text_block_height - padding
|
||||
),
|
||||
WatermarkPosition::Custom(x, y) => (x as i32, y as i32),
|
||||
};
|
||||
|
||||
// Draw background rectangle if enabled
|
||||
if self.options.background {
|
||||
let bg_color = core::Scalar::new(
|
||||
self.options.background_color.0 as f64,
|
||||
self.options.background_color.1 as f64,
|
||||
self.options.background_color.2 as f64,
|
||||
self.options.background_color.3 as f64,
|
||||
);
|
||||
|
||||
let rect = core::Rect::new(
|
||||
x,
|
||||
y,
|
||||
text_block_width,
|
||||
text_block_height
|
||||
);
|
||||
|
||||
imgproc::rectangle(
|
||||
frame,
|
||||
rect,
|
||||
bg_color,
|
||||
-1, // Fill
|
||||
imgproc::LINE_8,
|
||||
0,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Draw text lines
|
||||
let text_color = core::Scalar::new(
|
||||
self.options.color.0 as f64,
|
||||
self.options.color.1 as f64,
|
||||
self.options.color.2 as f64,
|
||||
self.options.color.3 as f64,
|
||||
);
|
||||
|
||||
let mut current_y = y + padding + line_heights[0];
|
||||
|
||||
for (i, line) in lines.iter().enumerate() {
|
||||
imgproc::put_text(
|
||||
frame,
|
||||
line,
|
||||
core::Point::new(x + padding, current_y),
|
||||
font,
|
||||
font_scale,
|
||||
text_color,
|
||||
thickness,
|
||||
imgproc::LINE_AA,
|
||||
false,
|
||||
)?;
|
||||
|
||||
if i < lines.len() - 1 {
|
||||
current_y += line_heights[i] + line_spacing;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
382
src/sensors/controller.rs
Normal file
382
src/sensors/controller.rs
Normal file
@ -0,0 +1,382 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
|
||||
use crate::sensors::dht22::Dht22Sensor;
|
||||
use crate::sensors::{EnvironmentData, LightSensor, SensorConfig, TemperatureHumiditySensor};
|
||||
|
||||
/// A simple light sensor implementation that uses camera brightness as a proxy
|
||||
pub struct CameraLightSensor {
|
||||
/// The brightness value (0-1)
|
||||
brightness: Arc<Mutex<f32>>,
|
||||
}
|
||||
|
||||
impl CameraLightSensor {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
brightness: Arc::new(Mutex::new(0.0)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the brightness level from camera data
|
||||
pub fn update_brightness(&self, value: f32) {
|
||||
let mut brightness = self.brightness.lock().unwrap();
|
||||
*brightness = value;
|
||||
}
|
||||
}
|
||||
|
||||
impl LightSensor for CameraLightSensor {
|
||||
fn read_light_level(&self) -> Result<f32> {
|
||||
let brightness = self.brightness.lock().unwrap();
|
||||
Ok(*brightness)
|
||||
}
|
||||
}
|
||||
|
||||
/// Sensor state information
|
||||
#[derive(Debug, Clone)]
|
||||
struct SensorState {
|
||||
/// Whether the sensor is initialized
|
||||
initialized: bool,
|
||||
/// Whether the sensor is in degraded mode (using fallback values)
|
||||
degraded: bool,
|
||||
/// Last successful reading time
|
||||
last_reading: Option<DateTime<Utc>>,
|
||||
/// Initialization failure count
|
||||
init_failures: u32,
|
||||
}
|
||||
|
||||
impl Default for SensorState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
initialized: false,
|
||||
degraded: false,
|
||||
last_reading: None,
|
||||
init_failures: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Controller for environmental sensors
|
||||
pub struct SensorController {
|
||||
/// Sensor configuration
|
||||
config: SensorConfig,
|
||||
/// Temperature and humidity sensor (DHT22)
|
||||
temp_humidity_sensor: Option<Box<dyn TemperatureHumiditySensor>>,
|
||||
/// Light sensor for sky brightness
|
||||
light_sensor: Option<Box<dyn LightSensor>>,
|
||||
/// Current environmental data
|
||||
current_data: Arc<Mutex<EnvironmentData>>,
|
||||
/// Broadcast channel for data updates
|
||||
data_tx: broadcast::Sender<EnvironmentData>,
|
||||
/// Whether the controller is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
/// Sensor state tracking
|
||||
temp_sensor_state: SensorState,
|
||||
/// Light sensor state tracking
|
||||
light_sensor_state: SensorState,
|
||||
}
|
||||
|
||||
impl SensorController {
|
||||
/// Create a new sensor controller with the given configuration
|
||||
pub async fn new(config: &crate::Config) -> Result<Self> {
|
||||
// Extract sensor settings from config
|
||||
let sensor_config = config.sensors.clone();
|
||||
|
||||
// Create broadcast channel for data updates
|
||||
let (data_tx, _) = broadcast::channel(10);
|
||||
|
||||
// Initialize environmental data with fallback values
|
||||
let initial_data = EnvironmentData {
|
||||
temperature: sensor_config.fallback_temperature,
|
||||
humidity: sensor_config.fallback_humidity,
|
||||
sky_brightness: sensor_config.fallback_sky_brightness,
|
||||
timestamp: Utc::now(),
|
||||
};
|
||||
|
||||
let current_data = Arc::new(Mutex::new(initial_data));
|
||||
|
||||
Ok(Self {
|
||||
config: sensor_config,
|
||||
temp_humidity_sensor: None,
|
||||
light_sensor: None,
|
||||
current_data,
|
||||
data_tx,
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
temp_sensor_state: SensorState::default(),
|
||||
light_sensor_state: SensorState::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Initialize the sensor hardware
|
||||
pub async fn initialize(&mut self) -> Result<()> {
|
||||
let mut init_failed = false;
|
||||
|
||||
// Initialize temperature/humidity sensor if configured
|
||||
if self.config.use_dht22 {
|
||||
info!("Initializing DHT22 temperature/humidity sensor");
|
||||
|
||||
match Dht22Sensor::new(self.config.dht22_pin) {
|
||||
Ok(dht22) => {
|
||||
self.temp_humidity_sensor = Some(Box::new(dht22));
|
||||
self.temp_sensor_state.initialized = true;
|
||||
info!("DHT22 temperature/humidity sensor initialized successfully");
|
||||
},
|
||||
Err(e) => {
|
||||
self.temp_sensor_state.init_failures += 1;
|
||||
self.temp_sensor_state.degraded = true;
|
||||
|
||||
if self.config.allow_degraded_mode {
|
||||
warn!("Failed to initialize DHT22 sensor: {}. Using fallback values.", e);
|
||||
init_failed = true;
|
||||
} else {
|
||||
return Err(anyhow!("Failed to initialize temperature sensor and degraded mode is not allowed: {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Sensor is not enabled, mark as degraded and use fallback
|
||||
info!("Temperature sensor disabled in config. Using fallback values.");
|
||||
self.temp_sensor_state.degraded = true;
|
||||
}
|
||||
|
||||
// Initialize light sensor if configured
|
||||
if self.config.use_light_sensor {
|
||||
info!("Initializing light sensor");
|
||||
|
||||
// For now, we'll use a camera-based light sensor since we don't have a direct
|
||||
// interface for analog light sensors. In a real implementation, this would
|
||||
// use an ADC to read from a photodiode or similar sensor.
|
||||
let light_sensor = CameraLightSensor::new();
|
||||
|
||||
self.light_sensor = Some(Box::new(light_sensor));
|
||||
self.light_sensor_state.initialized = true;
|
||||
info!("Light sensor initialized successfully");
|
||||
} else {
|
||||
// Sensor is not enabled, mark as degraded and use fallback
|
||||
info!("Light sensor disabled in config. Using fallback values.");
|
||||
self.light_sensor_state.degraded = true;
|
||||
}
|
||||
|
||||
if init_failed && !self.config.allow_degraded_mode {
|
||||
return Err(anyhow!("Sensor initialization failed and degraded mode is not allowed"));
|
||||
}
|
||||
|
||||
info!("Sensor initialization complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start the sensor sampling loop
|
||||
pub async fn start(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("Sensor controller is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
// Clone Arc references for the background task
|
||||
let interval = self.config.sampling_interval;
|
||||
let current_data = self.current_data.clone();
|
||||
let data_tx = self.data_tx.clone();
|
||||
let is_running = self.is_running.clone();
|
||||
|
||||
// Clone config for fallback values
|
||||
let config = self.config.clone();
|
||||
|
||||
// Clone sensor state
|
||||
let mut temp_sensor_state = self.temp_sensor_state.clone();
|
||||
let mut light_sensor_state = self.light_sensor_state.clone();
|
||||
|
||||
// Get sensor instances that implement the trait
|
||||
let temp_humidity_sensor: Option<Box<dyn TemperatureHumiditySensor>> =
|
||||
if self.temp_sensor_state.initialized && !self.temp_sensor_state.degraded {
|
||||
if let Some(sensor) = &self.temp_humidity_sensor {
|
||||
// Clone the sensor for the background task
|
||||
match Dht22Sensor::new(self.config.dht22_pin) {
|
||||
Ok(dht22) => Some(Box::new(dht22)),
|
||||
Err(e) => {
|
||||
warn!("Failed to clone temperature sensor for sampling task: {}. Using fallback values.", e);
|
||||
temp_sensor_state.degraded = true;
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Get camera light sensor if available
|
||||
let light_sensor: Option<Box<dyn LightSensor>> =
|
||||
if self.light_sensor_state.initialized && !self.light_sensor_state.degraded {
|
||||
// For simplicity, we'll just create a new camera light sensor
|
||||
// rather than trying to clone the existing one
|
||||
Some(Box::new(CameraLightSensor::new()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Start sampling task
|
||||
tokio::spawn(async move {
|
||||
let mut interval = time::interval(Duration::from_secs(interval));
|
||||
|
||||
info!("Starting sensor sampling loop");
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
// Check if we should exit
|
||||
{
|
||||
let is_running = is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new environment data object with fallback values
|
||||
let mut data = EnvironmentData {
|
||||
temperature: config.fallback_temperature,
|
||||
humidity: config.fallback_humidity,
|
||||
sky_brightness: config.fallback_sky_brightness,
|
||||
timestamp: Utc::now(),
|
||||
};
|
||||
|
||||
// Read temperature if sensor is available
|
||||
if let Some(sensor) = &temp_humidity_sensor {
|
||||
match sensor.read_temperature() {
|
||||
Ok(temp) => {
|
||||
data.temperature = temp;
|
||||
temp_sensor_state.last_reading = Some(Utc::now());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to read temperature: {}. Using fallback value.", e);
|
||||
// Keep using fallback value from data initialization
|
||||
}
|
||||
}
|
||||
|
||||
match sensor.read_humidity() {
|
||||
Ok(humidity) => {
|
||||
data.humidity = humidity;
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to read humidity: {}. Using fallback value.", e);
|
||||
// Keep using fallback value from data initialization
|
||||
}
|
||||
}
|
||||
} else if temp_sensor_state.degraded {
|
||||
debug!("Using fallback temperature value: {:.1}°C", data.temperature);
|
||||
}
|
||||
|
||||
// Read light level if sensor is available
|
||||
if let Some(sensor) = &light_sensor {
|
||||
match sensor.read_light_level() {
|
||||
Ok(level) => {
|
||||
data.sky_brightness = level;
|
||||
light_sensor_state.last_reading = Some(Utc::now());
|
||||
},
|
||||
Err(e) => {
|
||||
error!("Failed to read light level: {}. Using fallback value.", e);
|
||||
// Keep using fallback value from data initialization
|
||||
}
|
||||
}
|
||||
} else if light_sensor_state.degraded {
|
||||
debug!("Using fallback sky brightness value: {:.3}", data.sky_brightness);
|
||||
}
|
||||
|
||||
// Update current data
|
||||
{
|
||||
let mut current = current_data.lock().unwrap();
|
||||
*current = data.clone();
|
||||
}
|
||||
|
||||
// Broadcast update
|
||||
let _ = data_tx.send(data);
|
||||
|
||||
debug!("Sensor data updated: temp={:.1}°C, humidity={:.1}%, light={:.3}",
|
||||
data.temperature, data.humidity, data.sky_brightness);
|
||||
}
|
||||
|
||||
info!("Sensor sampling loop stopped");
|
||||
});
|
||||
|
||||
info!("Sensor controller started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the sensor sampling loop
|
||||
pub async fn stop(&self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("Sensor controller is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
info!("Sensor controller stopping");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current environment data
|
||||
pub fn get_current_data(&self) -> EnvironmentData {
|
||||
self.current_data.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Subscribe to environment data updates
|
||||
pub fn subscribe(&self) -> broadcast::Receiver<EnvironmentData> {
|
||||
self.data_tx.subscribe()
|
||||
}
|
||||
|
||||
/// Update the sky brightness from camera data
|
||||
pub fn update_sky_brightness(&self, brightness: f32) -> Result<()> {
|
||||
// If we have a camera light sensor, update it
|
||||
if let Some(sensor) = &self.light_sensor {
|
||||
if let Some(camera_sensor) = sensor.as_any().downcast_ref::<CameraLightSensor>() {
|
||||
camera_sensor.update_brightness(brightness);
|
||||
|
||||
// Update the current data
|
||||
let mut data = self.current_data.lock().unwrap();
|
||||
data.sky_brightness = brightness;
|
||||
data.timestamp = Utc::now();
|
||||
|
||||
// Broadcast update
|
||||
let _ = self.data_tx.send(data.clone());
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("No camera light sensor available"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait to allow downcasting of trait objects
|
||||
trait LightSensorExt: LightSensor {
|
||||
fn as_any(&self) -> &dyn std::any::Any;
|
||||
}
|
||||
|
||||
impl<T: LightSensor + std::any::Any> LightSensorExt for T {
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl LightSensor for Box<dyn LightSensor> {
|
||||
fn read_light_level(&self) -> Result<f32> {
|
||||
(**self).read_light_level()
|
||||
}
|
||||
}
|
||||
|
||||
impl LightSensorExt for Box<dyn LightSensor> {
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
168
src/sensors/dht22.rs
Normal file
168
src/sensors/dht22.rs
Normal file
@ -0,0 +1,168 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use log::{debug, error, warn};
|
||||
use rppal::gpio::{Gpio, Level, Mode, OutputPin};
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::sensors::TemperatureHumiditySensor;
|
||||
|
||||
/// DHT22 temperature and humidity sensor driver
|
||||
pub struct Dht22Sensor {
|
||||
/// GPIO pin number (BCM)
|
||||
pin: u8,
|
||||
/// Last temperature reading
|
||||
last_temperature: f32,
|
||||
/// Last humidity reading
|
||||
last_humidity: f32,
|
||||
/// Last successful reading time
|
||||
last_reading: Instant,
|
||||
}
|
||||
|
||||
impl Dht22Sensor {
|
||||
/// Create a new DHT22 sensor on the specified GPIO pin
|
||||
pub fn new(pin: u8) -> Result<Self> {
|
||||
Ok(Self {
|
||||
pin,
|
||||
last_temperature: 0.0,
|
||||
last_humidity: 0.0,
|
||||
last_reading: Instant::now() - Duration::from_secs(10),
|
||||
})
|
||||
}
|
||||
|
||||
/// Read raw data from DHT22 sensor
|
||||
fn read_raw(&mut self) -> Result<(f32, f32)> {
|
||||
// Ensure we don't read too frequently (DHT22 needs 2+ seconds between readings)
|
||||
let elapsed = self.last_reading.elapsed();
|
||||
if elapsed < Duration::from_secs(2) {
|
||||
thread::sleep(Duration::from_secs(2) - elapsed);
|
||||
}
|
||||
|
||||
let gpio = Gpio::new().context("Failed to access GPIO")?;
|
||||
|
||||
// Get a handle to the GPIO pin
|
||||
let mut pin = gpio.get(self.pin)
|
||||
.context(format!("Failed to access GPIO pin {}", self.pin))?;
|
||||
|
||||
// Send start signal
|
||||
pin.set_mode(Mode::Output);
|
||||
pin.set_low();
|
||||
thread::sleep(Duration::from_millis(20)); // At least 18ms for DHT22
|
||||
pin.set_high();
|
||||
|
||||
// Switch to input mode to read response
|
||||
pin.set_mode(Mode::Input);
|
||||
|
||||
// Wait for sensor to respond
|
||||
let mut cycles = 0;
|
||||
while pin.read() == Level::High {
|
||||
cycles += 1;
|
||||
if cycles > 100_000 {
|
||||
return Err(anyhow!("DHT22 did not respond"));
|
||||
}
|
||||
}
|
||||
|
||||
// Read 40 bits of data: 16 bits humidity, 16 bits temperature, 8 bits checksum
|
||||
let mut data = [0u8; 5];
|
||||
|
||||
for i in 0..40 {
|
||||
// Wait for pin to go high (start of bit)
|
||||
let mut cycles = 0;
|
||||
while pin.read() == Level::Low {
|
||||
cycles += 1;
|
||||
if cycles > 100_000 {
|
||||
return Err(anyhow!("DHT22 timeout while waiting for bit start"));
|
||||
}
|
||||
}
|
||||
|
||||
// Measure how long the pin stays high to determine bit value
|
||||
let start = Instant::now();
|
||||
let mut cycles = 0;
|
||||
while pin.read() == Level::High {
|
||||
cycles += 1;
|
||||
if cycles > 100_000 {
|
||||
return Err(anyhow!("DHT22 timeout while reading bit"));
|
||||
}
|
||||
}
|
||||
let duration = start.elapsed();
|
||||
|
||||
// If high pulse is longer than ~30 microseconds, bit is 1, otherwise 0
|
||||
if duration > Duration::from_micros(30) {
|
||||
data[i / 8] |= 1 << (7 - (i % 8));
|
||||
}
|
||||
}
|
||||
|
||||
// Verify checksum
|
||||
let checksum = ((data[0] as u16 + data[1] as u16 + data[2] as u16 + data[3] as u16) & 0xFF) as u8;
|
||||
if checksum != data[4] {
|
||||
return Err(anyhow!("DHT22 checksum failed"));
|
||||
}
|
||||
|
||||
// Parse humidity (16 bits, scale factor 10)
|
||||
let humidity = ((data[0] as u16) << 8 | data[1] as u16) as f32 / 10.0;
|
||||
|
||||
// Parse temperature (16 bits, scale factor 10)
|
||||
// MSB of data[2] is sign bit
|
||||
let temperature = if data[2] & 0x80 != 0 {
|
||||
// Negative temperature
|
||||
-((((data[2] & 0x7F) as u16) << 8 | data[3] as u16) as f32 / 10.0)
|
||||
} else {
|
||||
// Positive temperature
|
||||
((data[2] as u16) << 8 | data[3] as u16) as f32 / 10.0
|
||||
};
|
||||
|
||||
// Store readings
|
||||
self.last_temperature = temperature;
|
||||
self.last_humidity = humidity;
|
||||
self.last_reading = Instant::now();
|
||||
|
||||
debug!("DHT22 read: temperature={:.1}°C, humidity={:.1}%", temperature, humidity);
|
||||
|
||||
Ok((temperature, humidity))
|
||||
}
|
||||
}
|
||||
|
||||
impl TemperatureHumiditySensor for Dht22Sensor {
|
||||
fn read_temperature(&self) -> Result<f32> {
|
||||
// If we've read recently, return cached value
|
||||
if self.last_reading.elapsed() < Duration::from_secs(2) {
|
||||
return Ok(self.last_temperature);
|
||||
}
|
||||
|
||||
// Try to read new values
|
||||
match self.read_raw() {
|
||||
Ok((temp, _)) => Ok(temp),
|
||||
Err(e) => {
|
||||
error!("Failed to read DHT22 temperature: {}", e);
|
||||
if self.last_reading.elapsed() < Duration::from_secs(60) {
|
||||
// If last reading was recent, return cached value
|
||||
warn!("Using cached temperature value");
|
||||
Ok(self.last_temperature)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_humidity(&self) -> Result<f32> {
|
||||
// If we've read recently, return cached value
|
||||
if self.last_reading.elapsed() < Duration::from_secs(2) {
|
||||
return Ok(self.last_humidity);
|
||||
}
|
||||
|
||||
// Try to read new values
|
||||
match self.read_raw() {
|
||||
Ok((_, humidity)) => Ok(humidity),
|
||||
Err(e) => {
|
||||
error!("Failed to read DHT22 humidity: {}", e);
|
||||
if self.last_reading.elapsed() < Duration::from_secs(60) {
|
||||
// If last reading was recent, return cached value
|
||||
warn!("Using cached humidity value");
|
||||
Ok(self.last_humidity)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
86
src/sensors/mod.rs
Normal file
86
src/sensors/mod.rs
Normal file
@ -0,0 +1,86 @@
|
||||
mod controller;
|
||||
mod dht22;
|
||||
|
||||
pub use controller::SensorController;
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Environmental data from sensors
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EnvironmentData {
|
||||
/// Temperature in Celsius
|
||||
pub temperature: f32,
|
||||
/// Relative humidity (0-100%)
|
||||
pub humidity: f32,
|
||||
/// Sky brightness level (0-1, where 0 is completely dark)
|
||||
pub sky_brightness: f32,
|
||||
/// Timestamp when the data was collected
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl Default for EnvironmentData {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
temperature: 20.0, // 20°C
|
||||
humidity: 50.0, // 50%
|
||||
sky_brightness: 0.0, // Dark
|
||||
timestamp: Utc::now(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Sensor configuration parameters
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SensorConfig {
|
||||
/// Whether to use the DHT22 temperature/humidity sensor
|
||||
pub use_dht22: bool,
|
||||
/// GPIO pin for DHT22 data (BCM numbering)
|
||||
pub dht22_pin: u8,
|
||||
/// Whether to use a light sensor for sky brightness
|
||||
pub use_light_sensor: bool,
|
||||
/// GPIO pin for light sensor analog input
|
||||
pub light_sensor_pin: u8,
|
||||
/// Sampling interval in seconds
|
||||
pub sampling_interval: u64,
|
||||
/// Default temperature value when sensor is unavailable (Celsius)
|
||||
pub fallback_temperature: f32,
|
||||
/// Default humidity value when sensor is unavailable (0-100%)
|
||||
pub fallback_humidity: f32,
|
||||
/// Default sky brightness value when sensor is unavailable (0-1)
|
||||
pub fallback_sky_brightness: f32,
|
||||
/// If true, the system can start without sensors
|
||||
pub allow_degraded_mode: bool,
|
||||
}
|
||||
|
||||
impl Default for SensorConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
use_dht22: true,
|
||||
dht22_pin: 4, // GPIO 4 (pin 7 on Raspberry Pi)
|
||||
use_light_sensor: true,
|
||||
light_sensor_pin: 0, // Assuming ADC channel 0
|
||||
sampling_interval: 10, // Sample every 10 seconds
|
||||
fallback_temperature: 25.0,
|
||||
fallback_humidity: 50.0,
|
||||
fallback_sky_brightness: 0.05,
|
||||
allow_degraded_mode: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Interface for temperature/humidity sensors
|
||||
pub trait TemperatureHumiditySensor: Send + Sync {
|
||||
/// Read temperature in Celsius
|
||||
fn read_temperature(&self) -> Result<f32>;
|
||||
|
||||
/// Read relative humidity (0-100%)
|
||||
fn read_humidity(&self) -> Result<f32>;
|
||||
}
|
||||
|
||||
/// Interface for light sensors
|
||||
pub trait LightSensor: Send + Sync {
|
||||
/// Read the light level (0-1, where 0 is dark)
|
||||
fn read_light_level(&self) -> Result<f32>;
|
||||
}
|
||||
55
src/storage/mod.rs
Normal file
55
src/storage/mod.rs
Normal file
@ -0,0 +1,55 @@
|
||||
use anyhow::Result;
|
||||
use log::info;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::camera::MeteorEvent;
|
||||
use crate::config::Config;
|
||||
|
||||
/// Manager for data storage operations
|
||||
pub struct StorageManager {
|
||||
/// Root directory for storage
|
||||
storage_dir: PathBuf,
|
||||
/// Configuration
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl StorageManager {
|
||||
/// Create a new storage manager
|
||||
pub async fn new(config: &Config) -> Result<Self> {
|
||||
let storage_dir = config.storage.raw_video_dir.clone();
|
||||
|
||||
// Create storage directories if they don't exist
|
||||
std::fs::create_dir_all(&storage_dir)?;
|
||||
std::fs::create_dir_all(&config.storage.event_video_dir)?;
|
||||
|
||||
info!("Storage manager initialized with root directory: {:?}", storage_dir);
|
||||
|
||||
Ok(Self {
|
||||
storage_dir,
|
||||
config: config.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Save an event to storage
|
||||
pub async fn save_event(&self, event: &MeteorEvent) -> Result<()> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would save event metadata to a database
|
||||
info!("Saved event {} to storage", event.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clean up old data according to retention policy
|
||||
pub async fn cleanup_old_data(&self) -> Result<()> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would delete old files based on retention policy
|
||||
info!("Cleaned up old data according to retention policy");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the amount of free space available
|
||||
pub async fn get_free_space(&self) -> Result<u64> {
|
||||
// This is a placeholder implementation
|
||||
// In a real system, this would check the actual free space on the filesystem
|
||||
Ok(1000000000) // 1GB
|
||||
}
|
||||
}
|
||||
3
src/streaming/mod.rs
Normal file
3
src/streaming/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
mod rtsp;
|
||||
|
||||
pub use rtsp::{RtspServer, RtspConfig, StreamQuality};
|
||||
312
src/streaming/rtsp.rs
Normal file
312
src/streaming/rtsp.rs
Normal file
@ -0,0 +1,312 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::Utc;
|
||||
use log::{debug, error, info, warn};
|
||||
use opencv::{core, imgproc, prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::{Child, Command, Stdio};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::camera::frame_buffer::Frame;
|
||||
|
||||
/// Stream quality settings
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum StreamQuality {
|
||||
/// Low quality (e.g., 480p, low bitrate)
|
||||
Low,
|
||||
/// Medium quality (e.g., 720p, medium bitrate)
|
||||
Medium,
|
||||
/// High quality (original resolution, high bitrate)
|
||||
High,
|
||||
/// Custom quality settings
|
||||
Custom,
|
||||
}
|
||||
|
||||
impl Default for StreamQuality {
|
||||
fn default() -> Self {
|
||||
Self::Medium
|
||||
}
|
||||
}
|
||||
|
||||
/// RTSP server configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RtspConfig {
|
||||
/// Whether the RTSP server is enabled
|
||||
pub enabled: bool,
|
||||
/// Port for the RTSP server
|
||||
pub port: u16,
|
||||
/// Stream mount point (path)
|
||||
pub mount_point: String,
|
||||
/// Stream quality
|
||||
pub quality: StreamQuality,
|
||||
/// Custom width (if quality is Custom)
|
||||
pub custom_width: Option<u32>,
|
||||
/// Custom height (if quality is Custom)
|
||||
pub custom_height: Option<u32>,
|
||||
/// Custom bitrate in kbps (if quality is Custom)
|
||||
pub custom_bitrate: Option<u32>,
|
||||
/// Custom framerate (if quality is Custom)
|
||||
pub custom_framerate: Option<u32>,
|
||||
/// Stream username (if authentication is enabled)
|
||||
pub username: Option<String>,
|
||||
/// Stream password (if authentication is enabled)
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for RtspConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: false,
|
||||
port: 8554,
|
||||
mount_point: "/meteor".to_string(),
|
||||
quality: StreamQuality::Medium,
|
||||
custom_width: None,
|
||||
custom_height: None,
|
||||
custom_bitrate: None,
|
||||
custom_framerate: None,
|
||||
username: None,
|
||||
password: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// RTSP server for streaming video
|
||||
pub struct RtspServer {
|
||||
/// RTSP server configuration
|
||||
config: RtspConfig,
|
||||
/// Channel for receiving frames
|
||||
frame_rx: Option<mpsc::Receiver<Frame>>,
|
||||
/// Whether the server is running
|
||||
is_running: Arc<Mutex<bool>>,
|
||||
/// GStreamer process
|
||||
gst_process: Option<Child>,
|
||||
/// Frame sender channel
|
||||
frame_tx: Option<mpsc::Sender<Frame>>,
|
||||
}
|
||||
|
||||
impl RtspServer {
|
||||
/// Create a new RTSP server with the given configuration
|
||||
pub fn new(config: RtspConfig) -> Self {
|
||||
Self {
|
||||
config,
|
||||
frame_rx: None,
|
||||
is_running: Arc::new(Mutex::new(false)),
|
||||
gst_process: None,
|
||||
frame_tx: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Start the RTSP server
|
||||
pub async fn start(&mut self) -> Result<()> {
|
||||
if !self.config.enabled {
|
||||
info!("RTSP server is disabled in configuration");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if *is_running {
|
||||
warn!("RTSP server is already running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = true;
|
||||
}
|
||||
|
||||
// Create a channel for frames
|
||||
let (frame_tx, frame_rx) = mpsc::channel(10);
|
||||
self.frame_rx = Some(frame_rx);
|
||||
self.frame_tx = Some(frame_tx);
|
||||
|
||||
// Get stream dimensions
|
||||
let (width, height, bitrate, framerate) = self.get_stream_parameters();
|
||||
|
||||
info!(
|
||||
"Starting RTSP server on port {} with quality {:?} ({}x{}, {}kbps, {}fps)",
|
||||
self.config.port, self.config.quality, width, height, bitrate, framerate
|
||||
);
|
||||
|
||||
// Build GStreamer pipeline
|
||||
let mut command = Command::new("gst-launch-1.0");
|
||||
command.arg("-v");
|
||||
|
||||
// Create pipeline with appsrc to receive frames from our application
|
||||
let pipeline = format!(
|
||||
"appsrc name=src caps=video/x-raw,format=BGR,width={},height={},framerate={}/1 ! \
|
||||
videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency bitrate={} ! \
|
||||
h264parse ! rtph264pay config-interval=1 ! \
|
||||
rtsp2sink location=rtsp://0.0.0.0:{}{} service={} protocols=tcp",
|
||||
width, height, framerate, bitrate,
|
||||
self.config.port, self.config.mount_point,
|
||||
format!("stream{}", self.config.port)
|
||||
);
|
||||
|
||||
command.arg(pipeline);
|
||||
|
||||
// Redirect standard error to pipe so we can read it
|
||||
command.stderr(Stdio::piped());
|
||||
|
||||
// Start the GStreamer process
|
||||
let mut child = command.spawn()
|
||||
.context("Failed to start GStreamer RTSP server")?;
|
||||
|
||||
// Store the process handle
|
||||
self.gst_process = Some(child);
|
||||
|
||||
// Clone needed values for background task
|
||||
let frame_rx = self.frame_rx.take().unwrap();
|
||||
let is_running = self.is_running.clone();
|
||||
|
||||
// Start frame processing task
|
||||
tokio::spawn(async move {
|
||||
info!("RTSP streaming task started");
|
||||
|
||||
let mut frame_rx = frame_rx;
|
||||
|
||||
while {
|
||||
let is_running = is_running.lock().unwrap();
|
||||
*is_running
|
||||
} {
|
||||
match tokio::time::timeout(Duration::from_secs(1), frame_rx.recv()).await {
|
||||
Ok(Some(frame)) => {
|
||||
// Process frame for streaming
|
||||
// Here we would normally send the frame to GStreamer's appsrc
|
||||
// In a full implementation, this would feed frames to the GStreamer pipeline
|
||||
// For this example, we're just logging
|
||||
debug!("Streaming frame to RTSP");
|
||||
},
|
||||
Ok(None) => {
|
||||
error!("Frame channel closed");
|
||||
break;
|
||||
},
|
||||
Err(_) => {
|
||||
// Timeout waiting for frame, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("RTSP streaming task stopped");
|
||||
});
|
||||
|
||||
info!("RTSP server started on rtsp://0.0.0.0:{}{}", self.config.port, self.config.mount_point);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop the RTSP server
|
||||
pub async fn stop(&mut self) -> Result<()> {
|
||||
{
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
if !*is_running {
|
||||
warn!("RTSP server is not running");
|
||||
return Ok(());
|
||||
}
|
||||
*is_running = false;
|
||||
}
|
||||
|
||||
// Stop the GStreamer process
|
||||
if let Some(mut process) = self.gst_process.take() {
|
||||
// Try to terminate gracefully first
|
||||
if let Err(e) = process.kill() {
|
||||
warn!("Failed to kill GStreamer process: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
info!("RTSP server stopped");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Feed a frame to the RTSP server
|
||||
pub async fn feed_frame(&self, frame: &Frame) -> Result<()> {
|
||||
if let Some(frame_tx) = &self.frame_tx {
|
||||
match frame_tx.try_send(frame.clone()) {
|
||||
Ok(_) => {
|
||||
debug!("Frame sent to RTSP server");
|
||||
Ok(())
|
||||
},
|
||||
Err(e) => {
|
||||
if e.is_full() {
|
||||
// Channel is full, drop frame
|
||||
debug!("RTSP frame channel full, dropping frame");
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("Failed to send frame to RTSP server: {}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("RTSP server is not running"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the server configuration
|
||||
pub async fn update_config(&mut self, config: RtspConfig) -> Result<()> {
|
||||
// If running and config changes meaningfully, restart
|
||||
let restart = self.is_running() && (
|
||||
self.config.port != config.port ||
|
||||
self.config.mount_point != config.mount_point ||
|
||||
self.config.quality != config.quality ||
|
||||
self.config.custom_width != config.custom_width ||
|
||||
self.config.custom_height != config.custom_height ||
|
||||
self.config.custom_bitrate != config.custom_bitrate ||
|
||||
self.config.custom_framerate != config.custom_framerate
|
||||
);
|
||||
|
||||
// Update config
|
||||
self.config = config;
|
||||
|
||||
// Restart if needed
|
||||
if restart {
|
||||
self.stop().await?;
|
||||
self.start().await?;
|
||||
} else if self.config.enabled && !self.is_running() {
|
||||
// Start if newly enabled
|
||||
self.start().await?;
|
||||
} else if !self.config.enabled && self.is_running() {
|
||||
// Stop if newly disabled
|
||||
self.stop().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if the server is running
|
||||
pub fn is_running(&self) -> bool {
|
||||
let is_running = self.is_running.lock().unwrap();
|
||||
*is_running
|
||||
}
|
||||
|
||||
/// Get the stream URL
|
||||
pub fn get_url(&self) -> String {
|
||||
format!("rtsp://localhost:{}{}", self.config.port, self.config.mount_point)
|
||||
}
|
||||
|
||||
/// Get stream parameters based on quality setting
|
||||
fn get_stream_parameters(&self) -> (u32, u32, u32, u32) {
|
||||
match self.config.quality {
|
||||
StreamQuality::Low => (640, 480, 500, 15),
|
||||
StreamQuality::Medium => (1280, 720, 1500, 30),
|
||||
StreamQuality::High => (1920, 1080, 3000, 30),
|
||||
StreamQuality::Custom => {
|
||||
let width = self.config.custom_width.unwrap_or(1280);
|
||||
let height = self.config.custom_height.unwrap_or(720);
|
||||
let bitrate = self.config.custom_bitrate.unwrap_or(1500);
|
||||
let framerate = self.config.custom_framerate.unwrap_or(30);
|
||||
(width, height, bitrate, framerate)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RtspServer {
|
||||
fn drop(&mut self) {
|
||||
// Ensure the GStreamer process is stopped
|
||||
if let Some(mut process) = self.gst_process.take() {
|
||||
let _ = process.kill();
|
||||
}
|
||||
|
||||
// Ensure the running flag is set to false
|
||||
let mut is_running = self.is_running.lock().unwrap();
|
||||
*is_running = false;
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user