288 lines
9.6 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

// use anyhow::{anyhow, Context, Result};
// use log::{debug, error, info, warn};
// use std::path::Path;
// use v4l::buffer::Type;
// use v4l::io::traits::CaptureStream;
// use v4l::prelude::*;
// use v4l::video::Capture;
// use v4l::{Format, FourCC};
// use opencv::{core, imgproc, prelude::*};
// use crate::camera::{ExposureMode, Resolution};
// /// V4L2 camera driver for star-light cameras
// pub struct V4l2Camera {
// /// The open device handle
// device: Device,
// /// The current camera format
// format: Format,
// /// Whether the camera is currently streaming
// is_streaming: bool,
// }
// impl V4l2Camera {
// /// Open a camera device by path
// pub fn open<P: AsRef<Path>>(path: P) -> Result<Self> {
// let device = Device::with_path(path.as_ref())
// .context("Failed to open camera device")?;
// info!(
// "Opened camera: {} ({})",
// device.info().card,
// device.info().driver
// );
// // Get the current format
// let format = device
// .format()
// .context("Failed to get camera format")?;
// debug!("Initial camera format: {:?}", format);
// Ok(Self {
// device,
// format,
// is_streaming: false,
// })
// }
// /// Set the camera resolution and pixel format
// pub fn set_format(&mut self, resolution: Resolution) -> Result<()> {
// let (width, height) = resolution.dimensions();
// // Try to set format to MJPEG or YUYV first, then fall back to others
// let formats = [FourCC::new(b"MJPG"), FourCC::new(b"YUYV")];
// let mut success = false;
// let mut last_error = None;
// for &fourcc in &formats {
// let mut format = Format::new(width, height, fourcc);
// match self.device.set_format(&mut format) {
// Ok(_) => {
// self.format = format;
// success = true;
// break;
// }
// Err(e) => {
// last_error = Some(e);
// warn!("Failed to set format {:?}: {}", fourcc, last_error.as_ref().unwrap());
// }
// }
// }
// if !success {
// return Err(anyhow!(
// "Failed to set any supported format: {:?}",
// last_error.unwrap()
// ));
// }
// info!(
// "Set camera format: {}×{} {}",
// self.format.width, self.format.height,
// String::from_utf8_lossy(&self.format.fourcc.repr)
// );
// Ok(())
// }
// /// Set the camera frame rate
// pub fn set_fps(&mut self, fps: u32) -> Result<()> {
// if let Some(params) = self.device.params() {
// let mut params = params.context("Failed to get camera parameters")?;
// params.set_frames_per_second(fps, 1);
// self.device
// .set_params(&params)
// .context("Failed to set frame rate")?;
// info!("Set camera frame rate: {} fps", fps);
// } else {
// warn!("Camera does not support frame rate adjustment");
// }
// Ok(())
// }
// /// Set camera exposure mode and value
// pub fn set_exposure(&mut self, mode: ExposureMode) -> Result<()> {
// // First, set auto/manual mode
// let ctrl_id = v4l::control::id::EXPOSURE_AUTO;
// let auto_value = match mode {
// ExposureMode::Auto => 3, // V4L2_EXPOSURE_AUTO
// ExposureMode::Manual(_) => 1, // V4L2_EXPOSURE_MANUAL
// };
// self.device
// .set_control(ctrl_id, auto_value)
// .context("Failed to set exposure mode")?;
// // If manual, set the exposure value
// if let ExposureMode::Manual(exposure_time) = mode {
// // Exposure time in microseconds
// let ctrl_id = v4l::control::id::EXPOSURE_ABSOLUTE;
// self.device
// .set_control(ctrl_id, exposure_time as i64)
// .context("Failed to set exposure time")?;
// }
// info!("Set camera exposure: {:?}", mode);
// Ok(())
// }
// /// Set camera gain (ISO)
// pub fn set_gain(&mut self, gain: u8) -> Result<()> {
// let ctrl_id = v4l::control::id::GAIN;
// self.device
// .set_control(ctrl_id, gain as i64)
// .context("Failed to set gain")?;
// info!("Set camera gain: {}", gain);
// Ok(())
// }
// /// Lock focus at infinity (if supported)
// pub fn lock_focus_at_infinity(&mut self) -> Result<()> {
// // First, set focus mode to manual
// let auto_focus_id = v4l::control::id::FOCUS_AUTO;
// if let Ok(_) = self.device.set_control(auto_focus_id, 0) {
// // Then set focus to infinity (typically maximum value)
// let focus_id = v4l::control::id::FOCUS_ABSOLUTE;
// // Get the range of the control
// if let Ok(control) = self.device.control(focus_id) {
// let max_focus = control.maximum();
// if let Ok(_) = self.device.set_control(focus_id, max_focus) {
// info!("Locked focus at infinity (value: {})", max_focus);
// return Ok(());
// }
// }
// warn!("Failed to set focus to infinity");
// } else {
// warn!("Camera does not support focus control");
// }
// Ok(())
// }
// /// Start streaming from the camera
// pub fn start_streaming(&mut self) -> Result<V4l2CaptureStream> {
// let queue = MmapStream::with_buffers(&self.device, Type::VideoCapture, 4)
// .context("Failed to create capture stream")?;
// self.is_streaming = true;
// info!("Started camera streaming");
// Ok(V4l2CaptureStream {
// stream: queue,
// format: self.format.clone(),
// })
// }
// /// Stop streaming from the camera
// pub fn stop_streaming(&mut self) -> Result<()> {
// // The streaming will be stopped when the CaptureStream is dropped
// self.is_streaming = false;
// info!("Stopped camera streaming");
// Ok(())
// }
// /// Check if the camera is currently streaming
// pub fn is_streaming(&self) -> bool {
// self.is_streaming
// }
// /// Get current format width
// pub fn width(&self) -> u32 {
// self.format.width
// }
// /// Get current format height
// pub fn height(&self) -> u32 {
// self.format.height
// }
// /// Get current format pixel format
// pub fn pixel_format(&self) -> FourCC {
// self.format.fourcc
// }
// }
// /// Wrapper around V4L2 capture stream
// pub struct V4l2CaptureStream {
// stream: MmapStream,
// format: Format,
// }
// impl V4l2CaptureStream {
// /// Capture a single frame from the camera
// pub fn capture_frame(&mut self) -> Result<core::Mat> {
// let buffer = self.stream.next()
// .context("Failed to capture frame")?;
// let width = self.format.width as i32;
// let height = self.format.height as i32;
// // Convert the buffer to an OpenCV Mat based on the pixel format
// let mat = match self.format.fourcc {
// // MJPEG format
// f if f == FourCC::new(b"MJPG") => {
// // Decode JPEG data
// let data = buffer.data();
// let vec_data = unsafe {
// std::slice::from_raw_parts(data.as_ptr(), data.len())
// }.to_vec();
// let buf = core::Vector::from_slice(&vec_data);
// let img = opencv::imgcodecs::imdecode(&buf, opencv::imgcodecs::IMREAD_COLOR)?;
// img
// },
// // YUYV format
// f if f == FourCC::new(b"YUYV") => {
// let data = buffer.data();
// // Create a Mat from the YUYV data
// let mut yuyv = unsafe {
// let bytes_per_pixel = 2; // YUYV is 2 bytes per pixel
// let step = width as usize * bytes_per_pixel;
// core::Mat::new_rows_cols_with_data(
// height,
// width,
// core::CV_8UC2,
// data.as_ptr() as *mut _,
// step,
// )?
// };
// // Convert YUYV to BGR
// let mut bgr = core::Mat::default()?;
// imgproc::cvt_color(&yuyv, &mut bgr, imgproc::COLOR_YUV2BGR_YUYV, 0)?;
// bgr
// },
// // Unsupported format
// _ => {
// return Err(anyhow!(
// "Unsupported pixel format: {}",
// String::from_utf8_lossy(&self.format.fourcc.repr)
// ));
// }
// };
// Ok(mat)
// }
// }
// impl Drop for V4l2CaptureStream {
// fn drop(&mut self) {
// debug!("V4L2 capture stream dropped");
// }
// }