From ef0f9f85c56043b54a3a6fa1487215d9c1b75466 Mon Sep 17 00:00:00 2001 From: Alireza Vaezi Date: Mon, 28 Jul 2025 18:09:48 -0400 Subject: [PATCH] Add USDA Vision Camera Streaming API and related functionality - Implemented streaming API endpoints for starting, stopping, and retrieving live streams from cameras. - Added support for concurrent streaming and recording operations. - Created test scripts for frame conversion and streaming functionality. - Developed a CameraStreamer class to manage live preview streaming without blocking recording. - Included error handling and logging for camera operations. - Added configuration endpoints for camera settings and real-time updates. - Enhanced testing scenarios for various camera configurations and error handling. --- AI_INTEGRATION_GUIDE.md | 595 ++++++++++++++++++ CAMERA_CONFIG_API.md | 423 +++++++++++++ STREAMING_GUIDE.md | 240 +++++++ api-endpoints.http | 75 ++- camera-api.types.ts | 367 +++++++++++ camera_preview.html | 336 ++++++++++ streaming-api.http | 524 +++++++++++++++ test_frame_conversion.py | 80 +++ test_streaming.py | 199 ++++++ .../api/__pycache__/models.cpython-311.pyc | Bin 12408 -> 15705 bytes .../api/__pycache__/server.cpython-311.pyc | Bin 38538 -> 47317 bytes usda_vision_system/api/models.py | 68 ++ usda_vision_system/api/server.py | 145 ++++- usda_vision_system/camera/__init__.py | 3 +- .../__pycache__/__init__.cpython-311.pyc | Bin 603 -> 671 bytes .../__pycache__/manager.cpython-311.pyc | Bin 23670 -> 30048 bytes .../__pycache__/recorder.cpython-311.pyc | Bin 40946 -> 45890 bytes usda_vision_system/camera/manager.py | 112 ++++ usda_vision_system/camera/recorder.py | 111 ++++ usda_vision_system/camera/streamer.py | 320 ++++++++++ 20 files changed, 3594 insertions(+), 4 deletions(-) create mode 100644 AI_INTEGRATION_GUIDE.md create mode 100644 CAMERA_CONFIG_API.md create mode 100644 STREAMING_GUIDE.md create mode 100644 camera-api.types.ts create mode 100644 camera_preview.html create mode 100644 streaming-api.http create mode 100644 test_frame_conversion.py create mode 100644 test_streaming.py create mode 100644 usda_vision_system/camera/streamer.py diff --git a/AI_INTEGRATION_GUIDE.md b/AI_INTEGRATION_GUIDE.md new file mode 100644 index 0000000..9d881ee --- /dev/null +++ b/AI_INTEGRATION_GUIDE.md @@ -0,0 +1,595 @@ +# ๐Ÿค– AI Integration Guide: USDA Vision Camera Streaming for React Projects + +This guide is specifically designed for AI assistants to understand and implement the USDA Vision Camera streaming functionality in React applications. + +## ๐Ÿ“‹ System Overview + +The USDA Vision Camera system provides live video streaming through REST API endpoints. The streaming uses MJPEG format which is natively supported by HTML `` tags and can be easily integrated into React components. + +### Key Characteristics: +- **Base URL**: `http://vision:8000` (production) or `http://localhost:8000` (development) +- **Stream Format**: MJPEG (Motion JPEG) +- **Content-Type**: `multipart/x-mixed-replace; boundary=frame` +- **Authentication**: None (add if needed for production) +- **CORS**: Enabled for all origins (configure for production) + +### Base URL Configuration: +- **Production**: `http://vision:8000` (requires hostname setup) +- **Development**: `http://localhost:8000` (local testing) +- **Custom IP**: `http://192.168.1.100:8000` (replace with actual IP) +- **Custom hostname**: Configure DNS or /etc/hosts as needed + +## ๐Ÿ”Œ API Endpoints Reference + +### 1. Get Camera List +```http +GET /cameras +``` +**Response:** +```json +{ + "camera1": { + "name": "camera1", + "status": "connected", + "is_recording": false, + "last_checked": "2025-01-28T10:30:00", + "device_info": {...} + }, + "camera2": {...} +} +``` + +### 2. Start Camera Stream +```http +POST /cameras/{camera_name}/start-stream +``` +**Response:** +```json +{ + "success": true, + "message": "Started streaming for camera camera1" +} +``` + +### 3. Stop Camera Stream +```http +POST /cameras/{camera_name}/stop-stream +``` +**Response:** +```json +{ + "success": true, + "message": "Stopped streaming for camera camera1" +} +``` + +### 4. Live Video Stream +```http +GET /cameras/{camera_name}/stream +``` +**Response:** MJPEG video stream +**Usage:** Set as `src` attribute of HTML `` element + +## โš›๏ธ React Integration Examples + +### Basic Camera Stream Component + +```jsx +import React, { useState, useEffect } from 'react'; + +const CameraStream = ({ cameraName, apiBaseUrl = 'http://vision:8000' }) => { + const [isStreaming, setIsStreaming] = useState(false); + const [error, setError] = useState(null); + const [loading, setLoading] = useState(false); + + const startStream = async () => { + setLoading(true); + setError(null); + + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/start-stream`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }); + + if (response.ok) { + setIsStreaming(true); + } else { + const errorData = await response.json(); + setError(errorData.detail || 'Failed to start stream'); + } + } catch (err) { + setError(`Network error: ${err.message}`); + } finally { + setLoading(false); + } + }; + + const stopStream = async () => { + setLoading(true); + + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/stop-stream`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }); + + if (response.ok) { + setIsStreaming(false); + } else { + const errorData = await response.json(); + setError(errorData.detail || 'Failed to stop stream'); + } + } catch (err) { + setError(`Network error: ${err.message}`); + } finally { + setLoading(false); + } + }; + + return ( +
+

Camera: {cameraName}

+ + {/* Video Stream */} +
+ {isStreaming ? ( + {`${cameraName} setError('Stream connection lost')} + /> + ) : ( +
+ No Stream Active +
+ )} +
+ + {/* Controls */} +
+ + + +
+ + {/* Error Display */} + {error && ( +
+ Error: {error} +
+ )} +
+ ); +}; + +export default CameraStream; +``` + +### Multi-Camera Dashboard Component + +```jsx +import React, { useState, useEffect } from 'react'; +import CameraStream from './CameraStream'; + +const CameraDashboard = ({ apiBaseUrl = 'http://vision:8000' }) => { + const [cameras, setCameras] = useState({}); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + useEffect(() => { + fetchCameras(); + + // Refresh camera status every 30 seconds + const interval = setInterval(fetchCameras, 30000); + return () => clearInterval(interval); + }, []); + + const fetchCameras = async () => { + try { + const response = await fetch(`${apiBaseUrl}/cameras`); + if (response.ok) { + const data = await response.json(); + setCameras(data); + setError(null); + } else { + setError('Failed to fetch cameras'); + } + } catch (err) { + setError(`Network error: ${err.message}`); + } finally { + setLoading(false); + } + }; + + if (loading) { + return
Loading cameras...
; + } + + if (error) { + return ( +
+ Error: {error} + +
+ ); + } + + return ( +
+

USDA Vision Camera Dashboard

+ +
+ {Object.entries(cameras).map(([cameraName, cameraInfo]) => ( +
+ + + {/* Camera Status */} +
+
Status: {cameraInfo.status}
+
Recording: {cameraInfo.is_recording ? 'Yes' : 'No'}
+
Last Checked: {new Date(cameraInfo.last_checked).toLocaleString()}
+
+
+ ))} +
+
+ ); +}; + +export default CameraDashboard; +``` + +### Custom Hook for Camera Management + +```jsx +import { useState, useEffect, useCallback } from 'react'; + +const useCameraStream = (cameraName, apiBaseUrl = 'http://vision:8000') => { + const [isStreaming, setIsStreaming] = useState(false); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + const startStream = useCallback(async () => { + setLoading(true); + setError(null); + + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/start-stream`, { + method: 'POST', + }); + + if (response.ok) { + setIsStreaming(true); + return { success: true }; + } else { + const errorData = await response.json(); + const errorMsg = errorData.detail || 'Failed to start stream'; + setError(errorMsg); + return { success: false, error: errorMsg }; + } + } catch (err) { + const errorMsg = `Network error: ${err.message}`; + setError(errorMsg); + return { success: false, error: errorMsg }; + } finally { + setLoading(false); + } + }, [cameraName, apiBaseUrl]); + + const stopStream = useCallback(async () => { + setLoading(true); + + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/stop-stream`, { + method: 'POST', + }); + + if (response.ok) { + setIsStreaming(false); + return { success: true }; + } else { + const errorData = await response.json(); + const errorMsg = errorData.detail || 'Failed to stop stream'; + setError(errorMsg); + return { success: false, error: errorMsg }; + } + } catch (err) { + const errorMsg = `Network error: ${err.message}`; + setError(errorMsg); + return { success: false, error: errorMsg }; + } finally { + setLoading(false); + } + }, [cameraName, apiBaseUrl]); + + const getStreamUrl = useCallback(() => { + return `${apiBaseUrl}/cameras/${cameraName}/stream?t=${Date.now()}`; + }, [cameraName, apiBaseUrl]); + + return { + isStreaming, + loading, + error, + startStream, + stopStream, + getStreamUrl, + }; +}; + +export default useCameraStream; +``` + +## ๐ŸŽจ Styling with Tailwind CSS + +```jsx +const CameraStreamTailwind = ({ cameraName }) => { + const { isStreaming, loading, error, startStream, stopStream, getStreamUrl } = useCameraStream(cameraName); + + return ( +
+

Camera: {cameraName}

+ + {/* Stream Container */} +
+ {isStreaming ? ( + {`${cameraName} setError('Stream connection lost')} + /> + ) : ( +
+ No Stream Active +
+ )} +
+ + {/* Controls */} +
+ + + +
+ + {/* Error Display */} + {error && ( +
+ Error: {error} +
+ )} +
+ ); +}; +``` + +## ๐Ÿ”ง Configuration Options + +### Environment Variables (.env) +```env +# Production configuration (using 'vision' hostname) +REACT_APP_CAMERA_API_URL=http://vision:8000 +REACT_APP_STREAM_REFRESH_INTERVAL=30000 +REACT_APP_STREAM_TIMEOUT=10000 + +# Development configuration (using localhost) +# REACT_APP_CAMERA_API_URL=http://localhost:8000 + +# Custom IP configuration +# REACT_APP_CAMERA_API_URL=http://192.168.1.100:8000 +``` + +### API Configuration +```javascript +const apiConfig = { + baseUrl: process.env.REACT_APP_CAMERA_API_URL || 'http://vision:8000', + timeout: parseInt(process.env.REACT_APP_STREAM_TIMEOUT) || 10000, + refreshInterval: parseInt(process.env.REACT_APP_STREAM_REFRESH_INTERVAL) || 30000, +}; +``` + +### Hostname Setup Guide +```bash +# Option 1: Add to /etc/hosts (Linux/Mac) +echo "127.0.0.1 vision" | sudo tee -a /etc/hosts + +# Option 2: Add to hosts file (Windows) +# Add to C:\Windows\System32\drivers\etc\hosts: +# 127.0.0.1 vision + +# Option 3: Configure DNS +# Point 'vision' hostname to your server's IP address + +# Verify hostname resolution +ping vision +``` + +## ๐Ÿšจ Important Implementation Notes + +### 1. MJPEG Stream Handling +- Use HTML `` tag with `src` pointing to stream endpoint +- Add timestamp query parameter to prevent caching: `?t=${Date.now()}` +- Handle `onError` event for connection issues + +### 2. Error Handling +- Network errors (fetch failures) +- HTTP errors (4xx, 5xx responses) +- Stream connection errors (img onError) +- Timeout handling for long requests + +### 3. Performance Considerations +- Streams consume bandwidth continuously +- Stop streams when components unmount +- Limit concurrent streams based on system capacity +- Consider lazy loading for multiple cameras + +### 4. State Management +- Track streaming state per camera +- Handle loading states during API calls +- Manage error states with user feedback +- Refresh camera list periodically + +## ๐Ÿ“ฑ Mobile Considerations + +```jsx +// Responsive design for mobile +const mobileStyles = { + container: { + padding: '10px', + maxWidth: '100vw', + }, + stream: { + width: '100%', + maxWidth: '100vw', + height: 'auto', + }, + controls: { + display: 'flex', + flexDirection: 'column', + gap: '8px', + }, +}; +``` + +## ๐Ÿงช Testing Integration + +```javascript +// Test API connectivity +const testConnection = async () => { + try { + const response = await fetch(`${apiBaseUrl}/health`); + return response.ok; + } catch { + return false; + } +}; + +// Test camera availability +const testCamera = async (cameraName) => { + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/test-connection`, { + method: 'POST', + }); + return response.ok; + } catch { + return false; + } +}; +``` + +## ๐Ÿ“ Additional Files for AI Integration + +### TypeScript Definitions +- `camera-api.types.ts` - Complete TypeScript definitions for all API types +- `streaming-api.http` - REST Client file with all streaming endpoints +- `STREAMING_GUIDE.md` - Comprehensive user guide for streaming functionality + +### Quick Integration Checklist for AI Assistants + +1. **Copy TypeScript types** from `camera-api.types.ts` +2. **Use API endpoints** from `streaming-api.http` +3. **Implement error handling** as shown in examples +4. **Add CORS configuration** if needed for production +5. **Test with multiple cameras** using provided examples + +### Key Integration Points + +- **Stream URL Format**: `${baseUrl}/cameras/${cameraName}/stream?t=${Date.now()}` +- **Start Stream**: `POST /cameras/{name}/start-stream` +- **Stop Stream**: `POST /cameras/{name}/stop-stream` +- **Camera List**: `GET /cameras` +- **Error Handling**: Always wrap in try-catch blocks +- **Loading States**: Implement for better UX + +### Production Considerations + +- Configure CORS for specific origins +- Add authentication if required +- Implement rate limiting +- Monitor system resources with multiple streams +- Add reconnection logic for network issues + +This documentation provides everything an AI assistant needs to integrate the USDA Vision Camera streaming functionality into React applications, including complete code examples, error handling, and best practices. diff --git a/CAMERA_CONFIG_API.md b/CAMERA_CONFIG_API.md new file mode 100644 index 0000000..cefd91c --- /dev/null +++ b/CAMERA_CONFIG_API.md @@ -0,0 +1,423 @@ +# ๐ŸŽ›๏ธ Camera Configuration API Guide + +This guide explains how to configure camera settings via API endpoints, including all the advanced settings from your config.json. + +## ๐Ÿ“‹ Configuration Categories + +### โœ… **Real-time Configurable (No Restart Required)** +These settings can be changed while the camera is active: + +- **Basic**: `exposure_ms`, `gain`, `target_fps` +- **Image Quality**: `sharpness`, `contrast`, `saturation`, `gamma` +- **Color**: `auto_white_balance`, `color_temperature_preset` +- **Advanced**: `anti_flicker_enabled`, `light_frequency` +- **HDR**: `hdr_enabled`, `hdr_gain_mode` + +### โš ๏ธ **Restart Required** +These settings require camera restart to take effect: + +- **Noise Reduction**: `noise_filter_enabled`, `denoise_3d_enabled` +- **System**: `machine_topic`, `storage_path`, `enabled`, `bit_depth` + +## ๐Ÿ”Œ API Endpoints + +### 1. Get Camera Configuration +```http +GET /cameras/{camera_name}/config +``` + +**Response:** +```json +{ + "name": "camera1", + "machine_topic": "vibratory_conveyor", + "storage_path": "/storage/camera1", + "enabled": true, + "exposure_ms": 1.0, + "gain": 3.5, + "target_fps": 0, + "sharpness": 120, + "contrast": 110, + "saturation": 100, + "gamma": 100, + "noise_filter_enabled": true, + "denoise_3d_enabled": false, + "auto_white_balance": true, + "color_temperature_preset": 0, + "anti_flicker_enabled": true, + "light_frequency": 1, + "bit_depth": 8, + "hdr_enabled": false, + "hdr_gain_mode": 0 +} +``` + +### 2. Update Camera Configuration +```http +PUT /cameras/{camera_name}/config +Content-Type: application/json +``` + +**Request Body (all fields optional):** +```json +{ + "exposure_ms": 2.0, + "gain": 4.0, + "target_fps": 10.0, + "sharpness": 150, + "contrast": 120, + "saturation": 110, + "gamma": 90, + "noise_filter_enabled": true, + "denoise_3d_enabled": false, + "auto_white_balance": false, + "color_temperature_preset": 1, + "anti_flicker_enabled": true, + "light_frequency": 1, + "hdr_enabled": false, + "hdr_gain_mode": 0 +} +``` + +**Response:** +```json +{ + "success": true, + "message": "Camera camera1 configuration updated", + "updated_settings": ["exposure_ms", "gain", "sharpness"] +} +``` + +### 3. Apply Configuration (Restart Camera) +```http +POST /cameras/{camera_name}/apply-config +``` + +**Response:** +```json +{ + "success": true, + "message": "Configuration applied to camera camera1" +} +``` + +## ๐Ÿ“Š Setting Ranges and Descriptions + +### Basic Settings +| Setting | Range | Default | Description | +|---------|-------|---------|-------------| +| `exposure_ms` | 0.1 - 1000.0 | 1.0 | Exposure time in milliseconds | +| `gain` | 0.0 - 20.0 | 3.5 | Camera gain multiplier | +| `target_fps` | 0.0 - 120.0 | 0 | Target FPS (0 = maximum) | + +### Image Quality Settings +| Setting | Range | Default | Description | +|---------|-------|---------|-------------| +| `sharpness` | 0 - 200 | 100 | Image sharpness (100 = no sharpening) | +| `contrast` | 0 - 200 | 100 | Image contrast (100 = normal) | +| `saturation` | 0 - 200 | 100 | Color saturation (color cameras only) | +| `gamma` | 0 - 300 | 100 | Gamma correction (100 = normal) | + +### Color Settings +| Setting | Values | Default | Description | +|---------|--------|---------|-------------| +| `auto_white_balance` | true/false | true | Automatic white balance | +| `color_temperature_preset` | 0-10 | 0 | Color temperature preset (0=auto) | + +### Advanced Settings +| Setting | Values | Default | Description | +|---------|--------|---------|-------------| +| `anti_flicker_enabled` | true/false | true | Reduce artificial lighting flicker | +| `light_frequency` | 0/1 | 1 | Light frequency (0=50Hz, 1=60Hz) | +| `noise_filter_enabled` | true/false | true | Basic noise filtering | +| `denoise_3d_enabled` | true/false | false | Advanced 3D denoising | + +### HDR Settings +| Setting | Values | Default | Description | +|---------|--------|---------|-------------| +| `hdr_enabled` | true/false | false | High Dynamic Range | +| `hdr_gain_mode` | 0-3 | 0 | HDR processing mode | + +## ๐Ÿš€ Usage Examples + +### Example 1: Adjust Exposure and Gain +```bash +curl -X PUT http://localhost:8000/cameras/camera1/config \ + -H "Content-Type: application/json" \ + -d '{ + "exposure_ms": 1.5, + "gain": 4.0 + }' +``` + +### Example 2: Improve Image Quality +```bash +curl -X PUT http://localhost:8000/cameras/camera1/config \ + -H "Content-Type: application/json" \ + -d '{ + "sharpness": 150, + "contrast": 120, + "gamma": 90 + }' +``` + +### Example 3: Configure for Indoor Lighting +```bash +curl -X PUT http://localhost:8000/cameras/camera1/config \ + -H "Content-Type: application/json" \ + -d '{ + "anti_flicker_enabled": true, + "light_frequency": 1, + "auto_white_balance": false, + "color_temperature_preset": 2 + }' +``` + +### Example 4: Enable HDR Mode +```bash +curl -X PUT http://localhost:8000/cameras/camera1/config \ + -H "Content-Type: application/json" \ + -d '{ + "hdr_enabled": true, + "hdr_gain_mode": 1 + }' +``` + +## โš›๏ธ React Integration Examples + +### Camera Configuration Component +```jsx +import React, { useState, useEffect } from 'react'; + +const CameraConfig = ({ cameraName, apiBaseUrl = 'http://localhost:8000' }) => { + const [config, setConfig] = useState(null); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(null); + + // Load current configuration + useEffect(() => { + fetchConfig(); + }, [cameraName]); + + const fetchConfig = async () => { + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/config`); + if (response.ok) { + const data = await response.json(); + setConfig(data); + } else { + setError('Failed to load configuration'); + } + } catch (err) { + setError(`Error: ${err.message}`); + } + }; + + const updateConfig = async (updates) => { + setLoading(true); + try { + const response = await fetch(`${apiBaseUrl}/cameras/${cameraName}/config`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates) + }); + + if (response.ok) { + const result = await response.json(); + console.log('Updated settings:', result.updated_settings); + await fetchConfig(); // Reload configuration + } else { + const error = await response.json(); + setError(error.detail || 'Update failed'); + } + } catch (err) { + setError(`Error: ${err.message}`); + } finally { + setLoading(false); + } + }; + + const handleSliderChange = (setting, value) => { + updateConfig({ [setting]: value }); + }; + + if (!config) return
Loading configuration...
; + + return ( +
+

Camera Configuration: {cameraName}

+ + {/* Basic Settings */} +
+

Basic Settings

+ +
+ + handleSliderChange('exposure_ms', parseFloat(e.target.value))} + /> +
+ +
+ + handleSliderChange('gain', parseFloat(e.target.value))} + /> +
+ +
+ + handleSliderChange('target_fps', parseInt(e.target.value))} + /> +
+
+ + {/* Image Quality Settings */} +
+

Image Quality

+ +
+ + handleSliderChange('sharpness', parseInt(e.target.value))} + /> +
+ +
+ + handleSliderChange('contrast', parseInt(e.target.value))} + /> +
+ +
+ + handleSliderChange('gamma', parseInt(e.target.value))} + /> +
+
+ + {/* Advanced Settings */} +
+

Advanced Settings

+ +
+ +
+ +
+ +
+ +
+ +
+
+ + {error && ( +
+ {error} +
+ )} + + {loading &&
Updating configuration...
} +
+ ); +}; + +export default CameraConfig; +``` + +## ๐Ÿ”„ Configuration Workflow + +### 1. Real-time Adjustments +For settings that don't require restart: +```bash +# Update settings +curl -X PUT /cameras/camera1/config -d '{"exposure_ms": 2.0}' + +# Settings take effect immediately +# Continue recording/streaming without interruption +``` + +### 2. Settings Requiring Restart +For noise reduction and system settings: +```bash +# Update settings +curl -X PUT /cameras/camera1/config -d '{"noise_filter_enabled": false}' + +# Apply configuration (restarts camera) +curl -X POST /cameras/camera1/apply-config + +# Camera reinitializes with new settings +``` + +## ๐Ÿšจ Important Notes + +### Camera State During Updates +- **Real-time settings**: Applied immediately, no interruption +- **Restart-required settings**: Saved to config, applied on next restart +- **Recording**: Continues during real-time updates +- **Streaming**: Continues during real-time updates + +### Error Handling +- Invalid ranges return HTTP 422 with validation errors +- Camera not found returns HTTP 404 +- SDK errors are logged and return HTTP 500 + +### Performance Impact +- **Image quality settings**: Minimal performance impact +- **Noise reduction**: May reduce FPS when enabled +- **HDR**: Significant processing overhead when enabled + +This comprehensive API allows you to control all camera settings programmatically, making it perfect for integration with React dashboards or automated optimization systems! diff --git a/STREAMING_GUIDE.md b/STREAMING_GUIDE.md new file mode 100644 index 0000000..ca55700 --- /dev/null +++ b/STREAMING_GUIDE.md @@ -0,0 +1,240 @@ +# ๐ŸŽฅ USDA Vision Camera Live Streaming Guide + +This guide explains how to use the new live preview streaming functionality that allows you to view camera feeds in real-time without blocking recording operations. + +## ๐ŸŒŸ Key Features + +- **Non-blocking streaming**: Live preview doesn't interfere with recording +- **Separate camera connections**: Streaming uses independent camera instances +- **MJPEG streaming**: Standard web-compatible video streaming +- **Multiple concurrent viewers**: Multiple browsers can view the same stream +- **REST API control**: Start/stop streaming via API endpoints +- **Web interface**: Ready-to-use HTML interface for live preview + +## ๐Ÿ—๏ธ Architecture + +The streaming system creates separate camera connections for preview that are independent from recording: + +``` +Camera Hardware +โ”œโ”€โ”€ Recording Connection (CameraRecorder) +โ”‚ โ”œโ”€โ”€ Used for video file recording +โ”‚ โ”œโ”€โ”€ Triggered by MQTT machine states +โ”‚ โ””โ”€โ”€ High quality, full FPS +โ””โ”€โ”€ Streaming Connection (CameraStreamer) + โ”œโ”€โ”€ Used for live preview + โ”œโ”€โ”€ Controlled via API endpoints + โ””โ”€โ”€ Optimized for web viewing (lower FPS, JPEG compression) +``` + +## ๐Ÿš€ Quick Start + +### 1. Start the System +```bash +python main.py +``` + +### 2. Open the Web Interface +Open `camera_preview.html` in your browser and click "Start Stream" for any camera. + +### 3. API Usage +```bash +# Start streaming for camera1 +curl -X POST http://localhost:8000/cameras/camera1/start-stream + +# View live stream (open in browser) +http://localhost:8000/cameras/camera1/stream + +# Stop streaming +curl -X POST http://localhost:8000/cameras/camera1/stop-stream +``` + +## ๐Ÿ“ก API Endpoints + +### Start Streaming +```http +POST /cameras/{camera_name}/start-stream +``` +**Response:** +```json +{ + "success": true, + "message": "Started streaming for camera camera1" +} +``` + +### Stop Streaming +```http +POST /cameras/{camera_name}/stop-stream +``` +**Response:** +```json +{ + "success": true, + "message": "Stopped streaming for camera camera1" +} +``` + +### Live Stream (MJPEG) +```http +GET /cameras/{camera_name}/stream +``` +**Response:** Multipart MJPEG stream +**Content-Type:** `multipart/x-mixed-replace; boundary=frame` + +## ๐ŸŒ Web Interface Usage + +The included `camera_preview.html` provides a complete web interface: + +1. **Camera Grid**: Shows all configured cameras +2. **Stream Controls**: Start/Stop/Refresh buttons for each camera +3. **Live Preview**: Real-time video feed display +4. **Status Information**: System and camera status +5. **Responsive Design**: Works on desktop and mobile + +### Features: +- โœ… Real-time camera status +- โœ… One-click stream start/stop +- โœ… Automatic stream refresh +- โœ… System health monitoring +- โœ… Error handling and status messages + +## ๐Ÿ”ง Technical Details + +### Camera Streamer Configuration +- **Preview FPS**: 10 FPS (configurable) +- **JPEG Quality**: 70% (configurable) +- **Frame Buffer**: 5 frames (prevents memory buildup) +- **Timeout**: 200ms per frame capture + +### Memory Management +- Automatic frame buffer cleanup +- Queue-based frame management +- Proper camera resource cleanup on stop + +### Thread Safety +- Thread-safe streaming operations +- Independent from recording threads +- Proper synchronization with locks + +## ๐Ÿงช Testing + +### Run the Test Script +```bash +python test_streaming.py +``` + +This will test: +- โœ… API endpoint functionality +- โœ… Stream start/stop operations +- โœ… Concurrent recording and streaming +- โœ… Error handling + +### Manual Testing +1. Start the system: `python main.py` +2. Open `camera_preview.html` in browser +3. Start streaming for a camera +4. Trigger recording via MQTT or manual API +5. Verify both work simultaneously + +## ๐Ÿ”„ Concurrent Operations + +The system supports these concurrent operations: + +| Operation | Recording | Streaming | Notes | +|-----------|-----------|-----------|-------| +| Recording Only | โœ… | โŒ | Normal operation | +| Streaming Only | โŒ | โœ… | Preview without recording | +| Both Concurrent | โœ… | โœ… | **Independent connections** | + +### Example: Concurrent Usage +```bash +# Start streaming +curl -X POST http://localhost:8000/cameras/camera1/start-stream + +# Start recording (while streaming continues) +curl -X POST http://localhost:8000/cameras/camera1/start-recording \ + -H "Content-Type: application/json" \ + -d '{"filename": "test_recording.avi"}' + +# Both operations run independently! +``` + +## ๐Ÿ› ๏ธ Configuration + +### Stream Settings (in CameraStreamer) +```python +self.preview_fps = 10.0 # Lower FPS for preview +self.preview_quality = 70 # JPEG quality (1-100) +self._frame_queue.maxsize = 5 # Frame buffer size +``` + +### Camera Settings +The streamer uses the same camera configuration as recording: +- Exposure time from `camera_config.exposure_ms` +- Gain from `camera_config.gain` +- Optimized trigger mode for continuous streaming + +## ๐Ÿšจ Important Notes + +### Camera Access Patterns +- **Recording**: Blocks camera during active recording +- **Streaming**: Uses separate connection, doesn't block +- **Health Checks**: Brief, non-blocking camera tests +- **Multiple Streams**: Multiple browsers can view same stream + +### Performance Considerations +- Streaming uses additional CPU/memory resources +- Lower preview FPS reduces system load +- JPEG compression reduces bandwidth usage +- Frame queue prevents memory buildup + +### Error Handling +- Automatic camera resource cleanup +- Graceful handling of camera disconnections +- Stream auto-restart capabilities +- Detailed error logging + +## ๐Ÿ” Troubleshooting + +### Stream Not Starting +1. Check camera availability: `GET /cameras` +2. Verify camera not in error state +3. Check system logs for camera initialization errors +4. Try camera reconnection: `POST /cameras/{name}/reconnect` + +### Poor Stream Quality +1. Adjust `preview_quality` setting (higher = better quality) +2. Increase `preview_fps` for smoother video +3. Check network bandwidth +4. Verify camera exposure/gain settings + +### Browser Issues +1. Try different browser (Chrome/Firefox recommended) +2. Check browser console for JavaScript errors +3. Verify CORS settings in API server +4. Clear browser cache and refresh + +## ๐Ÿ“ˆ Future Enhancements + +Potential improvements for the streaming system: + +- ๐Ÿ”„ WebRTC support for lower latency +- ๐Ÿ“ฑ Mobile app integration +- ๐ŸŽ›๏ธ Real-time camera setting adjustments +- ๐Ÿ“Š Stream analytics and monitoring +- ๐Ÿ” Authentication and access control +- ๐ŸŒ Multi-camera synchronized viewing + +## ๐Ÿ“ž Support + +For issues with streaming functionality: + +1. Check the system logs: `usda_vision_system.log` +2. Run the test script: `python test_streaming.py` +3. Verify API health: `http://localhost:8000/health` +4. Check camera status: `http://localhost:8000/cameras` + +--- + +**โœ… Live streaming is now ready for production use!** diff --git a/api-endpoints.http b/api-endpoints.http index 0476502..85c00ca 100644 --- a/api-endpoints.http +++ b/api-endpoints.http @@ -1,14 +1,74 @@ ############################################################################### # USDA Vision Camera System - Complete API Endpoints Documentation -# Base URL: http://localhost:8000 +# +# CONFIGURATION: +# - Default Base URL: http://localhost:8000 (local development) +# - Production Base URL: http://vision:8000 (when using hostname 'vision') +# - Custom hostname: Update @baseUrl variable below +# +# HOSTNAME SETUP: +# To use 'vision' hostname instead of 'localhost': +# 1. Add to /etc/hosts: 127.0.0.1 vision +# 2. Or configure DNS to point 'vision' to the server IP +# 3. Update camera_preview.html: API_BASE = 'http://vision:8000' ############################################################################### +# Base URL Configuration - Change this to match your setup +@baseUrl = http://vision:8000 +# Alternative configurations: +# @baseUrl = http://localhost:8000 # Local development +# @baseUrl = http://192.168.1.100:8000 # Specific IP address +# @baseUrl = http://your-server:8000 # Custom hostname + +############################################################################### +# CONFIGURATION GUIDE +############################################################################### + +### HOSTNAME CONFIGURATION OPTIONS: + +# Option 1: Using 'vision' hostname (recommended for production) +# - Requires hostname resolution setup +# - Add to /etc/hosts: 127.0.0.1 vision +# - Or configure DNS: vision -> server IP address +# - Update camera_preview.html: API_BASE = 'http://vision:8000' +# - Set @baseUrl = http://vision:8000 + +# Option 2: Using localhost (development) +# - Works immediately on local machine +# - Set @baseUrl = http://localhost:8000 +# - Update camera_preview.html: API_BASE = 'http://localhost:8000' + +# Option 3: Using specific IP address +# - Replace with actual server IP +# - Set @baseUrl = http://192.168.1.100:8000 +# - Update camera_preview.html: API_BASE = 'http://192.168.1.100:8000' + +# Option 4: Custom hostname +# - Configure DNS or /etc/hosts for custom name +# - Set @baseUrl = http://your-custom-name:8000 +# - Update camera_preview.html: API_BASE = 'http://your-custom-name:8000' + +### NETWORK CONFIGURATION: +# - Default port: 8000 +# - CORS enabled for all origins (configure for production) +# - No authentication required (add if needed) + +### CLIENT CONFIGURATION FILES TO UPDATE: +# 1. camera_preview.html - Update API_BASE constant +# 2. React projects - Update apiConfig.baseUrl +# 3. This file - Update @baseUrl variable +# 4. Any custom scripts - Update base URL + +### TESTING CONNECTIVITY: +# Test if the API is reachable: +GET {{baseUrl}}/health + ############################################################################### # SYSTEM ENDPOINTS ############################################################################### ### Root endpoint - API information -GET http://localhost:8000/ +GET {{baseUrl}}/ # Response: SuccessResponse # { # "success": true, @@ -427,3 +487,14 @@ Content-Type: application/json # - fps omitted: Uses camera config default # 6. Filenames automatically get datetime prefix: YYYYMMDD_HHMMSS_filename.avi # 7. Recovery endpoints should be used in order: test-connection โ†’ reconnect โ†’ restart-grab โ†’ full-reset โ†’ reinitialize + + + +### Start streaming for camera1 +curl -X POST http://localhost:8000/cameras/camera1/start-stream + +# View live stream (open in browser) +# http://localhost:8000/cameras/camera1/stream + +### Stop streaming +curl -X POST http://localhost:8000/cameras/camera1/stop-stream \ No newline at end of file diff --git a/camera-api.types.ts b/camera-api.types.ts new file mode 100644 index 0000000..3610ac8 --- /dev/null +++ b/camera-api.types.ts @@ -0,0 +1,367 @@ +/** + * TypeScript definitions for USDA Vision Camera System API + * + * This file provides complete type definitions for AI assistants + * to integrate the camera streaming functionality into React/TypeScript projects. + */ + +// ============================================================================= +// BASE CONFIGURATION +// ============================================================================= + +export interface ApiConfig { + baseUrl: string; + timeout?: number; + refreshInterval?: number; +} + +export const defaultApiConfig: ApiConfig = { + baseUrl: 'http://vision:8000', // Production default, change to 'http://localhost:8000' for development + timeout: 10000, + refreshInterval: 30000, +}; + +// ============================================================================= +// CAMERA TYPES +// ============================================================================= + +export interface CameraDeviceInfo { + friendly_name?: string; + port_type?: string; + serial_number?: string; + device_index?: number; + error?: string; +} + +export interface CameraInfo { + name: string; + status: 'connected' | 'disconnected' | 'error' | 'not_found' | 'available'; + is_recording: boolean; + last_checked: string; // ISO date string + last_error?: string | null; + device_info?: CameraDeviceInfo; + current_recording_file?: string | null; + recording_start_time?: string | null; // ISO date string +} + +export interface CameraListResponse { + [cameraName: string]: CameraInfo; +} + +// ============================================================================= +// STREAMING TYPES +// ============================================================================= + +export interface StreamStartRequest { + // No body required - camera name is in URL path +} + +export interface StreamStartResponse { + success: boolean; + message: string; +} + +export interface StreamStopRequest { + // No body required - camera name is in URL path +} + +export interface StreamStopResponse { + success: boolean; + message: string; +} + +export interface StreamStatus { + isStreaming: boolean; + streamUrl?: string; + error?: string; +} + +// ============================================================================= +// RECORDING TYPES +// ============================================================================= + +export interface StartRecordingRequest { + filename?: string; + exposure_ms?: number; + gain?: number; + fps?: number; +} + +export interface StartRecordingResponse { + success: boolean; + message: string; + filename?: string; +} + +export interface StopRecordingResponse { + success: boolean; + message: string; +} + +// ============================================================================= +// SYSTEM TYPES +// ============================================================================= + +export interface SystemStatusResponse { + status: string; + uptime: string; + api_server_running: boolean; + camera_manager_running: boolean; + mqtt_client_connected: boolean; + total_cameras: number; + active_recordings: number; + active_streams?: number; +} + +export interface HealthResponse { + status: 'healthy' | 'unhealthy'; + timestamp: string; +} + +// ============================================================================= +// ERROR TYPES +// ============================================================================= + +export interface ApiError { + detail: string; + status_code?: number; +} + +export interface StreamError extends Error { + type: 'network' | 'api' | 'stream' | 'timeout'; + cameraName: string; + originalError?: Error; +} + +// ============================================================================= +// HOOK TYPES +// ============================================================================= + +export interface UseCameraStreamResult { + isStreaming: boolean; + loading: boolean; + error: string | null; + startStream: () => Promise<{ success: boolean; error?: string }>; + stopStream: () => Promise<{ success: boolean; error?: string }>; + getStreamUrl: () => string; + refreshStream: () => void; +} + +export interface UseCameraListResult { + cameras: CameraListResponse; + loading: boolean; + error: string | null; + refreshCameras: () => Promise; +} + +export interface UseCameraRecordingResult { + isRecording: boolean; + loading: boolean; + error: string | null; + currentFile: string | null; + startRecording: (options?: StartRecordingRequest) => Promise<{ success: boolean; error?: string }>; + stopRecording: () => Promise<{ success: boolean; error?: string }>; +} + +// ============================================================================= +// COMPONENT PROPS TYPES +// ============================================================================= + +export interface CameraStreamProps { + cameraName: string; + apiConfig?: ApiConfig; + autoStart?: boolean; + onStreamStart?: (cameraName: string) => void; + onStreamStop?: (cameraName: string) => void; + onError?: (error: StreamError) => void; + className?: string; + style?: React.CSSProperties; +} + +export interface CameraDashboardProps { + apiConfig?: ApiConfig; + cameras?: string[]; // If provided, only show these cameras + showRecordingControls?: boolean; + showStreamingControls?: boolean; + refreshInterval?: number; + onCameraSelect?: (cameraName: string) => void; + className?: string; +} + +export interface CameraControlsProps { + cameraName: string; + apiConfig?: ApiConfig; + showRecording?: boolean; + showStreaming?: boolean; + onAction?: (action: 'start-stream' | 'stop-stream' | 'start-recording' | 'stop-recording', cameraName: string) => void; +} + +// ============================================================================= +// API CLIENT TYPES +// ============================================================================= + +export interface CameraApiClient { + // System endpoints + getHealth(): Promise; + getSystemStatus(): Promise; + + // Camera endpoints + getCameras(): Promise; + getCameraStatus(cameraName: string): Promise; + testCameraConnection(cameraName: string): Promise<{ success: boolean; message: string }>; + + // Streaming endpoints + startStream(cameraName: string): Promise; + stopStream(cameraName: string): Promise; + getStreamUrl(cameraName: string): string; + + // Recording endpoints + startRecording(cameraName: string, options?: StartRecordingRequest): Promise; + stopRecording(cameraName: string): Promise; +} + +// ============================================================================= +// UTILITY TYPES +// ============================================================================= + +export type CameraAction = 'start-stream' | 'stop-stream' | 'start-recording' | 'stop-recording' | 'test-connection'; + +export interface CameraActionResult { + success: boolean; + message: string; + error?: string; +} + +export interface StreamingState { + [cameraName: string]: { + isStreaming: boolean; + isLoading: boolean; + error: string | null; + lastStarted?: Date; + }; +} + +export interface RecordingState { + [cameraName: string]: { + isRecording: boolean; + isLoading: boolean; + error: string | null; + currentFile: string | null; + startTime?: Date; + }; +} + +// ============================================================================= +// EVENT TYPES +// ============================================================================= + +export interface CameraEvent { + type: 'stream-started' | 'stream-stopped' | 'stream-error' | 'recording-started' | 'recording-stopped' | 'recording-error'; + cameraName: string; + timestamp: Date; + data?: any; +} + +export type CameraEventHandler = (event: CameraEvent) => void; + +// ============================================================================= +// CONFIGURATION TYPES +// ============================================================================= + +export interface StreamConfig { + fps: number; + quality: number; // 1-100 + timeout: number; + retryAttempts: number; + retryDelay: number; +} + +export interface CameraStreamConfig extends StreamConfig { + cameraName: string; + autoReconnect: boolean; + maxReconnectAttempts: number; +} + +// ============================================================================= +// CONTEXT TYPES (for React Context) +// ============================================================================= + +export interface CameraContextValue { + cameras: CameraListResponse; + streamingState: StreamingState; + recordingState: RecordingState; + apiClient: CameraApiClient; + + // Actions + startStream: (cameraName: string) => Promise; + stopStream: (cameraName: string) => Promise; + startRecording: (cameraName: string, options?: StartRecordingRequest) => Promise; + stopRecording: (cameraName: string) => Promise; + refreshCameras: () => Promise; + + // State + loading: boolean; + error: string | null; +} + +// ============================================================================= +// EXAMPLE USAGE TYPES +// ============================================================================= + +/** + * Example usage in React component: + * + * ```typescript + * import { CameraStreamProps, UseCameraStreamResult } from './camera-api.types'; + * + * const CameraStream: React.FC = ({ + * cameraName, + * apiConfig = defaultApiConfig, + * autoStart = false, + * onStreamStart, + * onStreamStop, + * onError + * }) => { + * const { + * isStreaming, + * loading, + * error, + * startStream, + * stopStream, + * getStreamUrl + * }: UseCameraStreamResult = useCameraStream(cameraName, apiConfig); + * + * // Component implementation... + * }; + * ``` + */ + +/** + * Example API client usage: + * + * ```typescript + * const apiClient: CameraApiClient = new CameraApiClientImpl(defaultApiConfig); + * + * // Start streaming + * const result = await apiClient.startStream('camera1'); + * if (result.success) { + * const streamUrl = apiClient.getStreamUrl('camera1'); + * // Use streamUrl in img tag + * } + * ``` + */ + +/** + * Example hook usage: + * + * ```typescript + * const MyComponent = () => { + * const { cameras, loading, error, refreshCameras } = useCameraList(); + * const { isStreaming, startStream, stopStream } = useCameraStream('camera1'); + * + * // Component logic... + * }; + * ``` + */ + +export default {}; diff --git a/camera_preview.html b/camera_preview.html new file mode 100644 index 0000000..99d321e --- /dev/null +++ b/camera_preview.html @@ -0,0 +1,336 @@ + + + + + + USDA Vision Camera Live Preview + + + +
+

๐ŸŽฅ USDA Vision Camera Live Preview

+ +
+ +
+ +
+

๐Ÿ“ก System Information

+
Loading system status...
+ +

๐Ÿ”— API Endpoints

+
+

Live Stream: GET /cameras/{camera_name}/stream

+

Start Stream: POST /cameras/{camera_name}/start-stream

+

Stop Stream: POST /cameras/{camera_name}/stop-stream

+

Camera Status: GET /cameras

+
+
+
+ + + + diff --git a/streaming-api.http b/streaming-api.http new file mode 100644 index 0000000..8e06df9 --- /dev/null +++ b/streaming-api.http @@ -0,0 +1,524 @@ +### USDA Vision Camera Streaming API +### +### CONFIGURATION: +### - Production: http://vision:8000 (requires hostname setup) +### - Development: http://localhost:8000 +### - Custom: Update @baseUrl below to match your setup +### +### This file contains streaming-specific API endpoints for live camera preview +### Use with VS Code REST Client extension or similar tools. + +# Base URL - Update to match your configuration +@baseUrl = http://vision:8000 +# Alternative: @baseUrl = http://localhost:8000 + +### ============================================================================= +### STREAMING ENDPOINTS (NEW FUNCTIONALITY) +### ============================================================================= + +### Start camera streaming for live preview +### This creates a separate camera connection that doesn't interfere with recording +POST {{baseUrl}}/cameras/camera1/start-stream +Content-Type: application/json + +### Expected Response: +# { +# "success": true, +# "message": "Started streaming for camera camera1" +# } + +### + +### Stop camera streaming +POST {{baseUrl}}/cameras/camera1/stop-stream +Content-Type: application/json + +### Expected Response: +# { +# "success": true, +# "message": "Stopped streaming for camera camera1" +# } + +### + +### Get live MJPEG stream (open in browser or use as img src) +### This endpoint returns a continuous MJPEG stream +### Content-Type: multipart/x-mixed-replace; boundary=frame +GET {{baseUrl}}/cameras/camera1/stream + +### Usage in HTML: +# Live Stream + +### Usage in React: +# + +### + +### Start streaming for camera2 +POST {{baseUrl}}/cameras/camera2/start-stream +Content-Type: application/json + +### + +### Get live stream for camera2 +GET {{baseUrl}}/cameras/camera2/stream + +### + +### Stop streaming for camera2 +POST {{baseUrl}}/cameras/camera2/stop-stream +Content-Type: application/json + +### ============================================================================= +### CONCURRENT OPERATIONS TESTING +### ============================================================================= + +### Test Scenario: Streaming + Recording Simultaneously +### This demonstrates that streaming doesn't block recording + +### Step 1: Start streaming first +POST {{baseUrl}}/cameras/camera1/start-stream +Content-Type: application/json + +### + +### Step 2: Start recording (while streaming continues) +POST {{baseUrl}}/cameras/camera1/start-recording +Content-Type: application/json + +{ + "filename": "concurrent_test.avi" +} + +### + +### Step 3: Check both are running +GET {{baseUrl}}/cameras/camera1 + +### Expected Response shows both recording and streaming active: +# { +# "camera1": { +# "name": "camera1", +# "status": "connected", +# "is_recording": true, +# "current_recording_file": "concurrent_test.avi", +# "recording_start_time": "2025-01-28T10:30:00.000Z" +# } +# } + +### + +### Step 4: Stop recording (streaming continues) +POST {{baseUrl}}/cameras/camera1/stop-recording +Content-Type: application/json + +### + +### Step 5: Verify streaming still works +GET {{baseUrl}}/cameras/camera1/stream + +### + +### Step 6: Stop streaming +POST {{baseUrl}}/cameras/camera1/stop-stream +Content-Type: application/json + +### ============================================================================= +### MULTIPLE CAMERA STREAMING +### ============================================================================= + +### Start streaming on multiple cameras simultaneously +POST {{baseUrl}}/cameras/camera1/start-stream +Content-Type: application/json + +### + +POST {{baseUrl}}/cameras/camera2/start-stream +Content-Type: application/json + +### + +### Check status of all cameras +GET {{baseUrl}}/cameras + +### + +### Access multiple streams (open in separate browser tabs) +GET {{baseUrl}}/cameras/camera1/stream + +### + +GET {{baseUrl}}/cameras/camera2/stream + +### + +### Stop all streaming +POST {{baseUrl}}/cameras/camera1/stop-stream +Content-Type: application/json + +### + +POST {{baseUrl}}/cameras/camera2/stop-stream +Content-Type: application/json + +### ============================================================================= +### ERROR TESTING +### ============================================================================= + +### Test with invalid camera name +POST {{baseUrl}}/cameras/invalid_camera/start-stream +Content-Type: application/json + +### Expected Response: +# { +# "detail": "Camera streamer not found: invalid_camera" +# } + +### + +### Test stream endpoint without starting stream first +GET {{baseUrl}}/cameras/camera1/stream + +### Expected: May return error or empty stream depending on camera state + +### + +### Test starting stream when camera is in error state +POST {{baseUrl}}/cameras/camera1/start-stream +Content-Type: application/json + +### If camera has issues, expected response: +# { +# "success": false, +# "message": "Failed to start streaming for camera camera1" +# } + +### ============================================================================= +### INTEGRATION EXAMPLES FOR AI ASSISTANTS +### ============================================================================= + +### React Component Integration: +# const CameraStream = ({ cameraName }) => { +# const [isStreaming, setIsStreaming] = useState(false); +# +# const startStream = async () => { +# const response = await fetch(`${baseUrl}/cameras/${cameraName}/start-stream`, { +# method: 'POST' +# }); +# if (response.ok) { +# setIsStreaming(true); +# } +# }; +# +# return ( +#
+# +# {isStreaming && ( +# +# )} +#
+# ); +# }; + +### JavaScript Fetch Example: +# const streamAPI = { +# async startStream(cameraName) { +# const response = await fetch(`${baseUrl}/cameras/${cameraName}/start-stream`, { +# method: 'POST', +# headers: { 'Content-Type': 'application/json' } +# }); +# return response.json(); +# }, +# +# async stopStream(cameraName) { +# const response = await fetch(`${baseUrl}/cameras/${cameraName}/stop-stream`, { +# method: 'POST', +# headers: { 'Content-Type': 'application/json' } +# }); +# return response.json(); +# }, +# +# getStreamUrl(cameraName) { +# return `${baseUrl}/cameras/${cameraName}/stream?t=${Date.now()}`; +# } +# }; + +### Vue.js Integration: +# +# +# + +### ============================================================================= +### TROUBLESHOOTING +### ============================================================================= + +### If streams don't start: +# 1. Check camera status: GET /cameras +# 2. Verify system health: GET /health +# 3. Test camera connection: POST /cameras/{name}/test-connection +# 4. Check if camera is already recording (shouldn't matter, but good to know) + +### If stream image doesn't load: +# 1. Verify stream was started: POST /cameras/{name}/start-stream +# 2. Check browser console for CORS errors +# 3. Try accessing stream URL directly in browser +# 4. Add timestamp to prevent caching: ?t=${Date.now()} + +### If concurrent operations fail: +# 1. This should work - streaming and recording use separate connections +# 2. Check system logs for resource conflicts +# 3. Verify sufficient system resources (CPU/Memory) +# 4. Test with one camera first, then multiple + +### Performance Notes: +# - Streaming uses ~10 FPS by default (configurable) +# - JPEG quality set to 70% (configurable) +# - Each stream uses additional CPU/memory +# - Multiple concurrent streams may impact performance + +### ============================================================================= +### CAMERA CONFIGURATION ENDPOINTS (NEW) +### ============================================================================= + +### Get camera configuration +GET {{baseUrl}}/cameras/camera1/config + +### Expected Response: +# { +# "name": "camera1", +# "machine_topic": "vibratory_conveyor", +# "storage_path": "/storage/camera1", +# "enabled": true, +# "exposure_ms": 1.0, +# "gain": 3.5, +# "target_fps": 0, +# "sharpness": 120, +# "contrast": 110, +# "saturation": 100, +# "gamma": 100, +# "noise_filter_enabled": true, +# "denoise_3d_enabled": false, +# "auto_white_balance": true, +# "color_temperature_preset": 0, +# "anti_flicker_enabled": true, +# "light_frequency": 1, +# "bit_depth": 8, +# "hdr_enabled": false, +# "hdr_gain_mode": 0 +# } + +### + +### Update basic camera settings (real-time, no restart required) +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "exposure_ms": 2.0, + "gain": 4.0, + "target_fps": 10.0 +} + +### + +### Update image quality settings +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "sharpness": 150, + "contrast": 120, + "saturation": 110, + "gamma": 90 +} + +### + +### Update advanced settings +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "anti_flicker_enabled": true, + "light_frequency": 1, + "auto_white_balance": false, + "color_temperature_preset": 2 +} + +### + +### Enable HDR mode +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "hdr_enabled": true, + "hdr_gain_mode": 1 +} + +### + +### Update noise reduction settings (requires restart) +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "noise_filter_enabled": false, + "denoise_3d_enabled": true +} + +### + +### Apply configuration (restart camera with new settings) +POST {{baseUrl}}/cameras/camera1/apply-config + +### Expected Response: +# { +# "success": true, +# "message": "Configuration applied to camera camera1" +# } + +### + +### Get camera2 configuration +GET {{baseUrl}}/cameras/camera2/config + +### + +### Update camera2 for outdoor lighting +PUT {{baseUrl}}/cameras/camera2/config +Content-Type: application/json + +{ + "exposure_ms": 0.5, + "gain": 2.0, + "sharpness": 130, + "contrast": 115, + "anti_flicker_enabled": true, + "light_frequency": 1 +} + +### ============================================================================= +### CONFIGURATION TESTING SCENARIOS +### ============================================================================= + +### Scenario 1: Low light optimization +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "exposure_ms": 5.0, + "gain": 8.0, + "noise_filter_enabled": true, + "denoise_3d_enabled": true +} + +### + +### Scenario 2: High speed capture +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "exposure_ms": 0.2, + "gain": 1.0, + "target_fps": 30.0, + "sharpness": 180 +} + +### + +### Scenario 3: Color accuracy for food inspection +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "auto_white_balance": false, + "color_temperature_preset": 1, + "saturation": 120, + "contrast": 105, + "gamma": 95 +} + +### + +### Scenario 4: HDR for high contrast scenes +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "hdr_enabled": true, + "hdr_gain_mode": 2, + "exposure_ms": 1.0, + "gain": 3.0 +} + +### ============================================================================= +### ERROR TESTING FOR CONFIGURATION +### ============================================================================= + +### Test invalid camera name +GET {{baseUrl}}/cameras/invalid_camera/config + +### + +### Test invalid exposure range +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "exposure_ms": 2000.0 +} + +### Expected: HTTP 422 validation error + +### + +### Test invalid gain range +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{ + "gain": 50.0 +} + +### Expected: HTTP 422 validation error + +### + +### Test empty configuration update +PUT {{baseUrl}}/cameras/camera1/config +Content-Type: application/json + +{} + +### Expected: HTTP 400 "No configuration updates provided" diff --git a/test_frame_conversion.py b/test_frame_conversion.py new file mode 100644 index 0000000..3f25385 --- /dev/null +++ b/test_frame_conversion.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +""" +Test script to verify the frame conversion fix works correctly. +""" + +import sys +import os +import numpy as np + +# Add the current directory to Python path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +# Add camera SDK to path +sys.path.append(os.path.join(os.path.dirname(__file__), "camera_sdk")) + +try: + import mvsdk + print("โœ… mvsdk imported successfully") +except ImportError as e: + print(f"โŒ Failed to import mvsdk: {e}") + sys.exit(1) + +def test_frame_conversion(): + """Test the frame conversion logic""" + print("๐Ÿงช Testing frame conversion logic...") + + # Simulate frame data + width, height = 640, 480 + frame_size = width * height * 3 # RGB + + # Create mock frame data + mock_frame_data = np.random.randint(0, 255, frame_size, dtype=np.uint8) + + # Create a mock frame buffer (simulate memory address) + frame_buffer = mock_frame_data.ctypes.data + + # Create mock FrameHead + class MockFrameHead: + def __init__(self): + self.iWidth = width + self.iHeight = height + self.uBytes = frame_size + + frame_head = MockFrameHead() + + try: + # Test the conversion logic (similar to what's in streamer.py) + frame_data_buffer = (mvsdk.c_ubyte * frame_head.uBytes).from_address(frame_buffer) + frame_data = np.frombuffer(frame_data_buffer, dtype=np.uint8) + frame = frame_data.reshape((frame_head.iHeight, frame_head.iWidth, 3)) + + print(f"โœ… Frame conversion successful!") + print(f" Frame shape: {frame.shape}") + print(f" Frame dtype: {frame.dtype}") + print(f" Frame size: {frame.size} bytes") + + return True + + except Exception as e: + print(f"โŒ Frame conversion failed: {e}") + return False + +def main(): + print("๐Ÿ”ง Frame Conversion Test") + print("=" * 40) + + success = test_frame_conversion() + + if success: + print("\nโœ… Frame conversion fix is working correctly!") + print("๐Ÿ“‹ The streaming issue should be resolved after system restart.") + else: + print("\nโŒ Frame conversion fix needs more work.") + + print("\n๐Ÿ’ก To apply the fix:") + print("1. Restart the USDA vision system") + print("2. Test streaming again") + +if __name__ == "__main__": + main() diff --git a/test_streaming.py b/test_streaming.py new file mode 100644 index 0000000..47672ec --- /dev/null +++ b/test_streaming.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 +""" +Test script for camera streaming functionality. + +This script tests the new streaming capabilities without interfering with recording. +""" + +import sys +import os +import time +import requests +import threading +from datetime import datetime + +# Add the current directory to Python path +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +def test_api_endpoints(): + """Test the streaming API endpoints""" + base_url = "http://localhost:8000" + + print("๐Ÿงช Testing Camera Streaming API Endpoints") + print("=" * 50) + + # Test system status + try: + response = requests.get(f"{base_url}/system/status", timeout=5) + if response.status_code == 200: + print("โœ… System status endpoint working") + data = response.json() + print(f" System: {data.get('status', 'Unknown')}") + print(f" Camera Manager: {'Running' if data.get('camera_manager_running') else 'Stopped'}") + else: + print(f"โŒ System status endpoint failed: {response.status_code}") + except Exception as e: + print(f"โŒ System status endpoint error: {e}") + + # Test camera list + try: + response = requests.get(f"{base_url}/cameras", timeout=5) + if response.status_code == 200: + print("โœ… Camera list endpoint working") + cameras = response.json() + print(f" Found {len(cameras)} cameras: {list(cameras.keys())}") + + # Test streaming for each camera + for camera_name in cameras.keys(): + test_camera_streaming(base_url, camera_name) + + else: + print(f"โŒ Camera list endpoint failed: {response.status_code}") + except Exception as e: + print(f"โŒ Camera list endpoint error: {e}") + +def test_camera_streaming(base_url, camera_name): + """Test streaming for a specific camera""" + print(f"\n๐ŸŽฅ Testing streaming for {camera_name}") + print("-" * 30) + + # Test start streaming + try: + response = requests.post(f"{base_url}/cameras/{camera_name}/start-stream", timeout=10) + if response.status_code == 200: + print(f"โœ… Start stream endpoint working for {camera_name}") + data = response.json() + print(f" Response: {data.get('message', 'No message')}") + else: + print(f"โŒ Start stream failed for {camera_name}: {response.status_code}") + print(f" Error: {response.text}") + return + except Exception as e: + print(f"โŒ Start stream error for {camera_name}: {e}") + return + + # Wait a moment for stream to initialize + time.sleep(2) + + # Test stream endpoint (just check if it responds) + try: + response = requests.get(f"{base_url}/cameras/{camera_name}/stream", timeout=5, stream=True) + if response.status_code == 200: + print(f"โœ… Stream endpoint responding for {camera_name}") + print(f" Content-Type: {response.headers.get('content-type', 'Unknown')}") + + # Read a small amount of data to verify it's working + chunk_count = 0 + for chunk in response.iter_content(chunk_size=1024): + chunk_count += 1 + if chunk_count >= 3: # Read a few chunks then stop + break + + print(f" Received {chunk_count} data chunks") + else: + print(f"โŒ Stream endpoint failed for {camera_name}: {response.status_code}") + except Exception as e: + print(f"โŒ Stream endpoint error for {camera_name}: {e}") + + # Test stop streaming + try: + response = requests.post(f"{base_url}/cameras/{camera_name}/stop-stream", timeout=5) + if response.status_code == 200: + print(f"โœ… Stop stream endpoint working for {camera_name}") + data = response.json() + print(f" Response: {data.get('message', 'No message')}") + else: + print(f"โŒ Stop stream failed for {camera_name}: {response.status_code}") + except Exception as e: + print(f"โŒ Stop stream error for {camera_name}: {e}") + +def test_concurrent_recording_and_streaming(): + """Test that streaming doesn't interfere with recording""" + base_url = "http://localhost:8000" + + print("\n๐Ÿ”„ Testing Concurrent Recording and Streaming") + print("=" * 50) + + try: + # Get available cameras + response = requests.get(f"{base_url}/cameras", timeout=5) + if response.status_code != 200: + print("โŒ Cannot get camera list for concurrent test") + return + + cameras = response.json() + if not cameras: + print("โŒ No cameras available for concurrent test") + return + + camera_name = list(cameras.keys())[0] # Use first camera + print(f"Using camera: {camera_name}") + + # Start streaming + print("1. Starting streaming...") + response = requests.post(f"{base_url}/cameras/{camera_name}/start-stream", timeout=10) + if response.status_code != 200: + print(f"โŒ Failed to start streaming: {response.text}") + return + + time.sleep(2) + + # Start recording + print("2. Starting recording...") + response = requests.post(f"{base_url}/cameras/{camera_name}/start-recording", + json={"filename": "test_concurrent_recording.avi"}, timeout=10) + if response.status_code == 200: + print("โœ… Recording started successfully while streaming") + else: + print(f"โŒ Failed to start recording while streaming: {response.text}") + + # Let both run for a few seconds + print("3. Running both streaming and recording for 5 seconds...") + time.sleep(5) + + # Stop recording + print("4. Stopping recording...") + response = requests.post(f"{base_url}/cameras/{camera_name}/stop-recording", timeout=5) + if response.status_code == 200: + print("โœ… Recording stopped successfully") + else: + print(f"โŒ Failed to stop recording: {response.text}") + + # Stop streaming + print("5. Stopping streaming...") + response = requests.post(f"{base_url}/cameras/{camera_name}/stop-stream", timeout=5) + if response.status_code == 200: + print("โœ… Streaming stopped successfully") + else: + print(f"โŒ Failed to stop streaming: {response.text}") + + print("โœ… Concurrent test completed successfully!") + + except Exception as e: + print(f"โŒ Concurrent test error: {e}") + +def main(): + """Main test function""" + print("๐Ÿš€ USDA Vision Camera Streaming Test") + print("=" * 50) + print(f"Test started at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") + print() + + # Wait for system to be ready + print("โณ Waiting for system to be ready...") + time.sleep(3) + + # Run tests + test_api_endpoints() + test_concurrent_recording_and_streaming() + + print("\n" + "=" * 50) + print("๐Ÿ Test completed!") + print("\n๐Ÿ“‹ Next Steps:") + print("1. Open camera_preview.html in your browser") + print("2. Click 'Start Stream' for any camera") + print("3. Verify live preview works without blocking recording") + print("4. Test concurrent recording and streaming") + +if __name__ == "__main__": + main() diff --git a/usda_vision_system/api/__pycache__/models.cpython-311.pyc b/usda_vision_system/api/__pycache__/models.cpython-311.pyc index 99a7af96cb3b2536d1a678feb3e383574d886f80..e7d53f0ff76988df8a641c938d506fb9a37619a0 100644 GIT binary patch delta 4138 zcmZ`+Yfu~471qkjOFRTf0)d2BBxDdTV`DN-Cl9CK+Pp;lc4&>f)~=ASkd)OzY)u-; zPCfOc#KpaIqNGksr?pGQo`ww5DeW(J^2bxRliiLwRWzM6nf`J!)3Mw9@U;EWbFPFC z7G3UsI`=&9x%ce7mnY{wiPikPx>~1!=g*J6<8QFts4+92e%NtK`+`<;SR?HHwno?| zmfzl`(cFPI`%H0EHUP5yMY1+w@VLhH%=#a-PpX-JlLxgn=6&+_+IFDzb!NcMI(O{| zIFhUgV+a%E^BMzrr|!4$(rg6Hzp5bG+Zs_b#R!@y?MaQmOzqNYL{`uWy98D!xm$Xh zg)zaKeWpspQn9RQv#Sxyg|g?Mm$NAsDg<4TO~unTaiMaXQI$|#WTboANT?~W(F?Ug zU6D;C*i>yBT>Vov25J*8%&5A^$oQ0zsVGoQkxj!kn?}K0WTW4*iR1ji)HM|tHQ%+| z*1}Z7n|-Eg!JuwiHr4|DmLmQ7B7Iwdertg~jjx$96d7;@26~}Quoq=v+{yxn2}w8# zY}yNg&@84*MKPSuX&`>CVD!Rnp`$2BLs5{97X!&uC}Fj5a7rM}R5ecbe7PBK9AYMp1@eZca9R zP#T9`Mkn*qj3|42k*I{_^6)-D%y!(*OOcQmjdGp+z5Dw6dpJS#@$*5M+uPso%2Y%* z<*|4R^Ky6&*2~Mi>JP+Wxz6)Jbdjm&LE-_XEP8UhhqzJ4%tPT|SmI=H zE&_!BvmkO2NsNke#_UCjr$FM#N@Dh7qA>3jIbMGInakNcbXVCj@q1$)>6oO38wM+!Tuw$9&YdR&%m!Mqo@8nd5u_8F2N&U z$qio$@pF*lF+K$KRp}R`ynj4aVumBb#~=$^*b8NXgTi9XRHNPu9|(D{>uMr5x=dmP z=+nZKN|3T5;AWIPN{%6Q0>OfC65$lWX@oNfW`vg!rVze~a2COXa1P--J&;)SG zor7Xd6#S*x0filSRH|urtqiUiHyrnNE2ria$AaouNI4b~V@u|wbGcPv7oe%^LfYDv zJha-rHn6c@nK-9ddDY6NtbAhlTIbT>;vR+Np{XpNws)*J*VqliJ(F@Gq}ao1+#XKZ z!->(Q@}y~5udrcgDjSB-oHIGUdSZ>=7*ZzYl-7{i8cMZ>5+m0xCU-4Oh0cHGxVhgIaV9FLu9J%ISiZ0G6Y!I5t2Gg8#rDRoDdu8Le;tnfZMCBqW zE)u7q4a?OE8-b>>k+g+N4z3#4+BSA6BcfvQsTN<#;!7O5c3^2}@p*;yK~q^D46$eL z>fnY^84D=gvugKjs(V&p&B@M{{Wtp*7U)@(olU#?R!wWNGBTsM{Hn{Ja`_duAz8j+ zyji2L{y4lU>j!6Dd$M`~#WkzCW>YS3kk{LE);F*!ZVcZ$dVintjd`VSLG4>e^}!-o zlm3#{PKg_%+(q1S~!_^?QX+@sWs2rB~ z8POBrWq+nDw+m-?1$v|0+6H#o0sh~|pQ$(>kUfGJk^Q-S0D`++6_OEtB@;pe09olO zB|mD>RnncrifjwrFx-}kcoe$P{{n2^FnSfX_cL8}Vk9wgc_eLUfZ~OwvJL5alfq(C zSyQ^YE-{gqxI6*%RNuIzqSGhF5@VOg(zRQT0zp(lJBK$Z3>G`W!s*zF`41ZfTptMw7xzY z5>)CTfz5Gm;xq0c->+ZQ&EgaxB+0O0oS7vz42!xDG7(lthtbJ|u+G6E zUU~rqLGp!hLLWgAVHMu!i{!Ji26EQ4kBO`=nMN2=+gib_k-s%uVkGjRzKJX}eo+Cp z7>#rkfCS9@OqZ}1L!igTYe>LA$kvF>#gAbdm)HIFf`lN-(N zGH;L`i(PjGMF_tmXDnY~uFxUhwftUp6$Jj=aAgZznmmf1AY)&77Q zA<(?8Q*(lSgoor@o4^n{yiePHyg8$9*kh${qfQdlnIDoyN4x0>GHxQ^ej~kyaGiYJ zu?M2zm{yR1@R+>k2r(GY%a@OG!;PR_!>rJ=|v_;5=AEA*U z|Ll6L3`w`kPUgE`J~fFl0fCl3Eq7Ya67p#I(Nd#DMGJ@)46OlNL^|nolIT>>e5G~L z+|#1_6^QX#7F}F_smITpwK6{Dk!EL1x*k)>mxb~d>`db$O)(^&eT>{Mw?w1N`un{f GYyStMreOmB delta 1090 zcmZvbOH5Ni6owg~m0m2R0SXigmqH6gpin>%ABiyp38kntL6n%5_6BNhAKXh(n(&YW zNGc1P1BosWH{c`D1nXm=#7EQ(CPa-fbVJ;@aY5X%(8M`o43Kb>`7(3+*JQ^s^2pM1*#PVm~-KJ zkp`f+kUN7K!V52oI}|wc#I!kaqp^l?P81O$^pp$}CyY1b!)D1(Z40#}l0_k*5Pg2% zphXP>h8!TK?92=3auK0IT!MLX9@mXi#1OQaCP_Ct=Z9&HbD@A3gIDHu;)3zCd?+ou zMg&+YGr}W-icG=hvSxT&s0VZTN3IVY5Hs+*+(G)7gV*v+Bcgzqr9*CkjVc|uDw>GM zc08_#azQFMq!1-bz-S$pnL-zm5`kt(ftltcoFe97_Fxx$Ezx4)P2=>Dvc19T2h($sLjCcsSH32fgLM+tW{2$_2ZE$yp54C*33Oel-LYidvo<<$<913j$ znP!L1+n$mTIP2QrNQH{Lriu<;)^(C7Q%&`&boQ&hGChXQh}9%0Z|EX1TIP5AcQjuR zP9M6w<5Eo{baN!n8TNPtSw1Vspzl*^5SD@763mR#j3U zwu`x)CCwZvh%a==K_C_#Of~lILzBb8!)9Z*ojoP?bg=tbzq4j#jVXE2#@ddx80!wK jEXzI1l@!G#`};Y#-DKcy(KVq%w#MH#^^&~)0F|G!V0 zvEihr^Zfkj{rCR&zyIC-`~Ua7M_(4-xhd+ND=Nwt;7TMO89#F3qRvS2FE&0BnW!A9 zB!Y){M4xfU=*jbFeWnTXkQvhQJo!G$gmuWup9_3d6Sg7SMDtTgggD3FZKx zIF$E6;YLCnY>lq5y#b39AhaT2VOj0*j)Z6j(rW--5NIPl*CSXtuuHmcX?UVF`atm$ zDq?2umu)QQ!^{l`8(E*;QMU;ns}bx74g{{P7M^4kbT+lJbNWk5dGBVGM#e#e9SA5U zmoZ#=5oTxee)Z6%`(H2zx7ejjGQGqr@|hHCn5g-&B$7OtV($ z=btY80`R?d=`@tD^RTnwE&Z)(HvTsZlf?N@-i!DDP`hiIzo%F%u;h`$*eM~W|c9Y751mu zE?aOkRsBu%)2Qxg+zdX(anrC1T&&34o}3$m6qJ6r28-~#3g zI%O0df<^r8+8lM$Da`p8fShy4?VFP5X-vS5qYoo|9KflP#g>Ki6#GqMIjLtk^Zp1; zBTk!c3ONn|^gHwD2G;DXYRjq_GxHIS12|PmlQNp%60_S;krU1yl_rEtdF-vGYR!{a zD8$aU87qS!cW5fO(oSbowS$?P>+V+7I7*nSs)Nl}2r)5zYd7hS?r8l7k$740V&{ry zvB2jLK97L&ntBm6)bBYtr4E$EH?Z8hXHXzVCTE9xwOE_t^%0>xYb7ftR%L1n};jm5d3 z*g5E1c#is(TvKWIIFGaTaQUPJPoi;Soj)8%vQ@){M9(BWc$FbHbcEME{jdiRO8~*bNu*%`G99|+k;1538}@^jDoObslO0+v#g; zu%`hwg=0M%G<=)>9{YYzqvl^wLL2*ik6nw}^*eQRkX83OO1_5~Zz6mj;h)&nUK4qX z?eDejx~=!PLV1cpSCJQG%Ej)H(3IQf8ufzzf&KFqX7Rb0Q8*9Ml)1te;*Wb@Q5Ad_ z`Th~%5PNz<75M@C`i8Ega(Q&{lN*;UnL~jWl0CxPlc^xnBhdpJ%Vr8cv)NgoG#@K_ zjeTo#ccvPqxp!&v9QDxaz?@~pu;{-7gqNY&jh?-?pAbGlG(qm3F;{CvNuekRpCb1N zM0irr3TM^q!k$*5X0PvAq7IgaRIGbnh2bUD48Kn}MUE48F;u}WFVm@m^MN>-!-TD}+F))Kw=?kqD=RkS>)X z2$0rwLIuix+2K$X99OZ6xd!%``-@b$p>p=x#C*1Ob52C{pz1I=EF4ko77i0A<2I4rmgGx>`EutM`>WfuB&~S5y{g`rAeQAMCz~)<9dWGYV|0EN$4bU z{)FW5x?Q28lhTK_b?Ce*!cnTCVX~T)_I@y-+=)YLDC^OZC>$=OSrU!Nh2CJ20s(R{ z*E5qqOVcHJ{jM>|4=RJ@opV*gDn-Ur%>@9-+dMpN9*&!bV}@ZUZ?s>pY(3E(U|Wr;2M0gg{N)dxGfwrhCcvt&gYHS zo$KCquA6rD#hrbz!AL?7jNRf*j=$pwDW%=wt@qfvu6A~7=W=V%2n~FAt#fUw@J6el zSEahD>gefIUG2>2RWUMPF60B!oLP&ciJjaNs?PJMJvqmPzZISlJ(^46fPhOVLo;`K zIh$OqX2TQJjBIEmMeNG1X0~%fS;RUp$5eFBJ-G+NRh%2177s7MiDI53rxa#N@Vda}4Xx|Ap*Q28 zPLdmq3u4Qq*sc*oxGkf28%E!4Y>KV$$J{7Tcn;U#Zdw4&$mIX$C|L$T;BB3Lw?PU(*nHPP3fE;Dqgk?( zmLr_BC=qPW-ZX(O2D&gSzXa%_EV>lv^d1TPWk45aG)nOy<=*j7=4Db|eVBUcLjvG1sF;tuq* z1JpexxhCDAaXAWp^Nt+I5?t)_j*WwcR5~~%`A3e*+F@_V<&h?#Nd@Dc zj0QzWjJdsj*F?Z0!5V=uAYxbgFd0$y^Jv^@%`*6xA;KRK{s-Yt2!BTSUj*=0?;3mb zA=6x*Hl`E8e%-myq%@!n%}xWjn`tMc7Mzf}h^Z=WYPxRfeB0FdjbqcMd*Y^hE@*C4 zR(&K7PwcVED05)zFRXdUeg|{X;0)Jk46C6u_WeLlE_6mPpGQB z6zPAeKcPi3UvM3k9>Ciz-4&C)gX})eB?Q3lqgimk8DMaa&8w){-bi zx=gUuU2;9;O6ZX?WVp+bt`IDaOHEHTB`T3J3iB7noxL$XB~9G_4Woc|xr- z*)zpXOc2UT%Wi$pA{ZR3uZvdpEbdt@TwQKhSE#yH*s(5G_1)YYcxI%#qQo#WjLCs8 zrbXjXpT=0j-OXbSHju0QChALkJWwf}5vb&NG_$t>Q8=4}=N!tF*#M5i`J`7kE`E-D zf}BttS5armOY>--QH9cW8-g9!Zj@k$c^}gadGar*280R80yq~WNh6}FRt|{B0ht_{#Iu29O|P}V0q zIA(}o{lRLe?ja%O*rVJpEO~y>^X@mdy}9?Y<1!%r@$*LS1>g=l`U6RGmbA!rMS9Ra zNdt$x9?3JvBfD_LoNOLA~VqV4S@#~?n@*KnwX9kUX<1ye_4XtoFgln5;e^9SzRqgE=*sgAQ4zlyN1SeS5fu zo(hI*$#{+CVW%zSzPD0h<}mER5je@_uUm-WSqb1-HI-g-}O2i=P*GH|1${6~6gg6-|HVf;`N z#QFd5TI1R_;SF0kVw<5iS9LYFqgPa26LWfVd5{P1VSGoMlam?T&%ZjzTi=Zm?ITl^ z!eQ%evlosd;A{)eIFfGBJah%T;4`EW;1PpF=WQ<1VA$Tn_wdwmLA|}o0Y_4l5)nqi z^D@x}Z`^DZk_A#rfOFk>31o+qdd@p@?#-O)cIC``j(Cz5;T%@t$2r7=a0>JIaT#mg znGP_5$`l)q?HP?NAIIC`#oOTJG33FRClFh45N`{`+dyv*FzgT|nxZ%RuaMH2=SN^+ z*t1)kW*p?nTQ_ALkO$AeF_sOws=P{%)?uYIiTes@Lq^l2!GS zyB&W~)kdtEpIEYjp$hRi!d~CDin)enyq1B;j5rH&lv zkx($pzt4z&?!YE;Gy2;DUlXm^OR3I7M@Pz{zVJYH9Lzbh5`IdpO~F^myy!22r%3mGs2qNfAQ!@5 zNw8T!gn1_L)TG-6=ez91sm-EMWPh0YL!^rCL`f=$454WS8P5yDD@E`;>}K`SdgVk&}xgiHA@;{mqlh(nytjSe2ER}ESpwZ2NDAQ-U)B2g#2Y`?!*!yUw6*jDtvF5LD~gv+ z|5P*#p%|`H?qN6egE^;t?8+lX@JPS;$nw@bSOJ<=8d3~AYMc5v3l&Yj*GD;{%yGsb zc8JksXBLa?Y0=8JS+&RwItj|GmDPCM8`gDi-8djo_=>9q=>d^__R*c7-0vT~MnH#G z&#kT)ghH~;FCBI%OGT?M`$>vtNc19`g; zx)IhQU=&XAK$eyx7!eu}z$yvU1t6=30|6f$MSZ;p5&{lQmWjsgh#tbUUVOreX+8D> zUC)6%{mDUnS8mSggph^3x=*ra`3GX0&Do&ngd?bYmDbK)WAn)f`z3Q5OHt4yWXT2K zNk}b9I7IdAfhUX+Y(X-ek9tsP4xjZn|MATwKbmcsaQoqemhzU$8a%o51(lEHxTaa^ zL$MF@RsqM9PwJ#6p=P*+Ab#SMP!U2BqME1^0s{NsiRUX;$Amd<_VTBm*a#N<7_i>? O=cbbPySuRPPW?a0ZRU*t delta 5085 zcmb7I3viUx72f;rX0yrWMUu_?u^Y09n?OQB5*`W?QxYJWP!bhmB`(=NVeMu&+}#Z( z3K2wuiXy$#;!GVxFcnb2{)!^4gB9y@raILY|M3B%ZS7bqj!s9hQ+vL&*Dk+<@M)x<;%9!SI}SBRoGwDRn%YHRV@3GeD;1vmxJ2Lo)lln&U5+s zGZjON!82p4!IM6w7z~H#Kk{^yYIE2u7SHsg6&>wu+Msg8SW`%2y4gK2p!z-BfiEVn zzc-*8eX9R}!e>$sFDD2>jor>m#ntA$tW11uKIe+e1+@V7Z%> zX$LKhtU}s|Q#lWCmnovZ7877ZXKK(ypLTk9sb)*urPRX+uLINr8UXVEP{ij0DgZSE zuDEf9;{Bwev<1`?j3aZ$_@rC%lnBahcm!Vw> z$d@3rl;4y2|lv&JQ`8B--{#8_c5%Mos$Eu(gf7JO)O(Vn@XTHz_08E^B{cNkm^ zV!QpPmR6eXV!T#-YtObe(U?+)=;sk)K@3e<;7 zZct1<5)&!TB2nXPXDwp0v(Sv0e6`r`bXj9UP@Hl)G7)St_$C7Q77DoJvN+Ran=!J5 zK#v=A`@$-}6&+r{R={lpE~Bng&EXK$%4w7h~1QdD>^H_A?_3=<3@z3`g|Tt*&1JIXAg5X4@uY%*Gd5VG)l=|kA6i&;>-Tc49147o$$ z;6gi}VyaVkt4qJjRNsY6ov7Z+X#dvMG+M7989mPD?e~U+qbo~0S#!U_)`>NBt_2Ta z`aZyZz{7xb0O=>hpiAM~=tGZF2UUM4nvPD?eMP=GR$szyCH{`&N!l(eIViEAc4ShI zo_ocQJ3E)k`OCb)P=F7uQiB5le^5O?r9{0L&-Z}GK>*Hyo+=4+^YNL%$bLq>96hn* zX_}xNZ#Zc*%P8m*w=EpGCennyQNWXcrvP%TN6~s3@C@J>0qrD~9tjU~kGo&x?r3yi zXLkS3R<@9=p4z8|y4^ls_qaB`9csQKlm;*!UF2ja ziHt>`neT!5Lg8r2(@GX!rj&YjG3rQfl1%OXK*-)32>U%0R5EnAM)G2hJ6r5ol3>Ax zxzcH`jw~q{c@}1Vu!GJgF2mo%pM$dFfGBYjeWgPc*&2R=z75wlu3X-sa;jlfT5ID< zlLaKHlsqT)EiGd+#H&k}nPrK3QRFn2n_r_*$}dZs?FpdX=1Som!r$yjdl3V_2fPG$ zS?p=fWv_^1%>}E^bFAcNiSTdZYX;)JOdB;2_U# zIbyWF0J)QZu=sdIA$whzTbGQJvkE+oH1wv8(y4lsm()mFnaS)dyLVyh8#c0 z=wDz|I#(qhcsJz;HHWLO$9&5_}^frR<8KQ zO*70cMtff;7@xoIrA^EP65`TezV_FVL(@@sXmd5z|HsWuF%*ioI4?R)6Tgqsg>Oq6 zTPV7R7m2U7d<0Lwf9nfMwoC;$pn4(|I3DT_dFg;{5zpM-yKr)h{}V=J7Dl~}r037j zIt%z5Fa-F507+S_9xbpU$43%y6Rp}Q_Ky~^mEwibH>fP_+GbagB%&pozUb)_$(O4= z0q*ho`-1;3|LTb`veB~|O~bi&{D`%RHFq3k_h_~|uVfifv+@Op3jK1i@vhqX^A2dV z&>8hO)c#;hvW%+#=&luPtyaDLs8RrTO?c7c4yn3@t`6O3@jZg=sEMh-shl=@M~<0M z5Rm!ncI5U-XQQI(Y0Z@9E~hd)S%c-p;e!0k{CRlN09{g{f>zWQW*W8Qp}Z*G-La7Q zwCsCtQ~t*h3B=^^&l)bdpdphqq?@V2hkc2z#~#sqdreB&BP351Jq^o9jJ&^vh6YsK91Mhcj~dyIb>hB# zZN&vJG76i8XvvL|HCX0_YlU%t?Gm}yiy`A6(BnMn+HhYaJK;h~_gWa_@}(gfpfTYS z-ru3z&$L(fx3UFT3%3E55$FlQ@Y-Mx_pVhb!yrO<33z~(jFsyrE`n=B+n7takBOl% z&dRiZjGa+3uYtVe6u-gE{ghdFvp91ghgFJm2j*9I!2nJ<4@KEd-10$m^0gXzYJ~e> zrt&D$HXN)~8YWdoF8L*pCQb&s+yUK0uhRp=DbZIcd_T#=2ajGwC11d;}&jQQ_oOe_um0B8@LU)chd^k^elWDIV z_OTgKCYGdF_Jq^+0VW~k-C{VMnz8-nH!S(QOxyBAGqcGF4bmDwmV%&o@7IM_$dT!2 ztp-&Y7V&6Z4HyE*L?(ko{(3QmMvGRhe8|L&+NqH;lBH`+DcNgtWpX>FUJ6(OXauAK zG60!?Y=9GR9bgTCZdw}%_;?LS*8%1L76YRB!VKv~^fjRc!@N|c{-tOx74JORnX%p! zx9F@P27434lBf0+NNLEqoDovk!pQF%_2S#7Dp-}MIO-m`n0j?96*RS~C*be(_DS-3 zy6iz^QM>!yes>@J^n(SBi^?DfMph>e0gi*7ipv>^R!z*n!gX_hz@z$tkso6u@tD=* zki|67fw(wh@?S=;$;wPZA`iF%E`U)3I|vr>0i^*PEGDM_ delta 108 zcmbQwdYgrJIWI340|>ls&xoDKE6Hd!QC*iWogsyF5n~j43R^IPCi}#^XTr=y%s@p& vEFgjvNcd@TOs-=zoqUKfnTrp|X9VKnu*v#NULp^;>@I*&13L&7@c^X&7a$ja diff --git a/usda_vision_system/camera/__pycache__/manager.cpython-311.pyc b/usda_vision_system/camera/__pycache__/manager.cpython-311.pyc index 22c54b789a996532dd305000c921be8754b8b4de..4755cf03153e219375e0fcc7fa3659481d8c3222 100644 GIT binary patch delta 7339 zcmb7I3v8R$b^b43q9{_NB#NX&Ql{RdUY0D&k|oD_TC(g&e#miR*)mJ>r)=619A@??@i!7_#bd9}ahyn&!>UOAXw`dR~EhTW3#|`?gM}5#W(Mc=gosv)6JhTAxl-`Eq^Eq9l7^ zTp?CMnJZDIhIAFAYtquykgiQjJ0T6SCwSFo@Nz*}Lt5P$NN+Z!Wz<4Oqo{@zz$>;v_!s&t}ovC61-Q6%^Th>^wR}djo%g$w0cCASu3Lvh0xZkCqR$xJG2zG#Y zu9w@xJ?RN}PWq_JlwN`TJgbISm`d%+H6-c(=~Ub|Wh(7TcQheVdxrV%O!^b=G+g4fq2m;|hOZe2P{xt+uo_Ug-7*{2{+* z!aw74OTGDM@D`zs$esW=XcO{iW8GSdY6s@N%iP*9`&ipVn%Vzoi)EK(5sU6MlBa}1 z{klSG#$n}RrBvD_=R=6Asn0t_MIVi;*M=vD)(%Hv zU{A3(^=8#U$O`@}a$BEI>T-qL{Hvp@f;_u{!-~xjvo$T-+Lml>i?;Tdt$kIH>7WdN zbr_t7AFV1eEl)P!tsY?ehigny?AB3_9z9x@BPBKLQ$vXh2YM&JyQrs4ez#50vm@(n zCjq>t=~njHF~JnvLs+<%geU=zuGnZ|ZHe-P7S0IodA z9#(eQRvYG=dEeXC8XQHc5CJ`(Rw8hd9zzOw#N}WSmy+9wp2A$-T^~|jfFL>)*ePkL z?U0-)@8vUSNSZ(m+`-NMa|oMBN=1s#!P7v+b(0?NDSyD{2B-6T zDIYc;;sJKMvU+H3j4rj*^*L;Q0b6DVeCNSUJt3boX3jG;W;N77!SIo0UIS~Wsv{pn zj#lYpq%Y!k4$8+`;XywsAXrVP(L z)iixp1dF*7vm@vWg<#DIN_NL>g8l_I&Q4SNtC%ZsM~WfzG16)>UIsFl4UjMe8iUog zb`w3DZ2diH{>s1ttK43-`E{t$5A$P9Ym%+G6}q5|UIj=~*39-!6+{L?@GyJOz#8sZNXMF*Y zK8HC+5JnNWdvnJ_VUklACkAyRdt?L5Ha}?xB$JyBySQ@~ zX=gv!*#I{G-Oj^#d<)B{@l21ACt8<1%1$1gkrL9wJq_;)&G>-c1gn8>&yw{zS z=fbW!9hZl`1006wyX^g4&W??G*HTb`hlboIspmvW0Qw%ZOYhIv-vO)O{{HvSBmdp< zB%QK94HV|C^Te{<-8lgemEOY9pvF#-b<+K6uzLGO(rL!0Cnnq=qA&CXvi0t1CFeo5 z1MHJM$3X?Pd*4#P`t{R&<;6biavTA-GP7(HFu$MO^V7-nz> z<_28zZF&X^P|&#E?Va#>0@G&`s{ozABEIt5kU}e_ZFs4@ZW0QT`v70$PHn}iR)Dy6 zYC?q3N+HVn{V#JbV1)+;$(4xvz)$36FDQ5I!KDP3%+EFKdzO0RSnLv+C8sfmKS6R; z!~SU5PRiMWu8b}0Rj}hlB|TU7gz}PA1YwrU${=JzR{Q}PCl`O-m464;(s$r5J(uLO zavt6pij2%uoW}-+3T2{#9V>DaU;TQy5GqdA6od+aZoZ^|+Nx`RTVfQIAeb^)`i3B? zW)*CH*r+)oh~$#uMa9)mS%Y=Jnl-a!a4jr#NQ`v7Rp|3X!YO-8>#W z=#Cu}qm|?EE*ek9j3=Y~ee`Ry16d*a6tYlBZ8&Q5roRxWK3bJy9V9BbMDmoZlEe?V z^bga~9jZapUMHfVaQv+lN4 z_LgPcvnwIzUYQbd?p4c?Zd3F^^xR1Ry)xjHd~mk#IfVB98lGMRf&FD$eG#1G$c$^i z7s_-)A7#-2Q-@0?IRl0Zw`8f^KZv{&K~}r~W1VE=Af3eNOey=M$y&pY<#Y)>08g!a zFN^6bmi5jhz4NAZQQs2Nw?x$~d~4H>;If^V2jKN6LHG|=DIiW{`=}!@6$1C44v6gz zj2^hNt_x@M(gv_Le=fb(-{La_x}hac)~bnFO<~PBarnm_o}@?kgAVUBrM^ID;$lkpemH@q zbl<8znZGH7OfOm=34IS&CB|VC4I%BzAC1# zimIz9iu(lrX5qHhG49otG_RJM5 zwYJ0YeagxADXddU6fCd77=~Rb3wI?Es!P;)a$dL~KO&qbF7-_FGyG7Hbb|~PIy?c< z5%iq%IYLtosl+kvpYY9;N{z%sW`;~)lPKRb{`okV*AR+_l^zq~-{WF!jD1|ML zR|NmqP^NarFdXc-WsMTYr0+=rO2-Qxx9~GYm>(t6j(Ty`8Ms7(W6u*$dyAm@nX*j( z-KeXF=odI~UlLXoIoU<`P21+gubzpSw#~l~H4VYLFdhwa;>y5^-n1$xb@mm5@vAw@ z2FH@Y5iLEoa1;RFL!%P0Xc&tb#-fI?6|;3NbYH4mx@0JgmL2=(1ps){>e%L{AN^8q zfm@vHq7P1Iu0{Is4N(d0fe2y@_;O3PMfk2A;EqMp zqmkb!v-c?Fca=(@+4H+iS^p;aG<(1EO*QvfxCdi%{8B8R8FEZ)lOs=|m{m~rSy=^< zv&DL|TrncBYklP`x4~kDt6lJ019X*mAii>5QewGWHLFZ{t@1@B-UEZz0+r#l{oQBJ z!a9{$vDWz%_?ib!Mc}s5s^oA|-}BRc2*;BdiNk|yCb{ngeOGtT}sZ^z0YAisf1&W}{pq^t8<8@PIyFJb8G*&Cjm z`Nc*TsE_^tVJ%FDsUckwVQwElTzB3ROvL8UPny%?62;>=>4qj=z-38HrZAo@;qGkx zGM-*1QAG}`Rj?Xd0%w5*W`WrXaow`Hamm~`Z(TG$6*E6|W%o*f6|WkT?k{=t&_YhM zYzW>(!*I+n95oEDn8Bl0RYKTaxom4%vNbK*nq#)+D?=-K8$@=xniYd(*-*V?s9rR< zVg^^#;DXv_>$0h3$<#7`X3^9WGxb~?h@7W>zQ+EmAUArA|ho-kdeotlZb;<9U zT}nv67KVqcVV7bU9#<1BW1Y`z3vWTYDo#vIP0%(V=~D>&G6F9bDW23Qo=<2N0zV4i zA&E916e7Hc5Jtdok^VKp96($-J~8D9(Kj&TO#}tPH?c_W4}|DjK!xewVdhPQd4vxT z{t@AO2;3vvk>V$gdq{nR@G-(C2wH@nBH&s}{~6#G9I=sZLcp^Z2RjhKS3!uE)_yBt zMQv13yQ(Nw?v`CYw<_RwenR?24~|L12H0j)8s|a~jw_App{_{TyeeeEHk;D9DrCZr zVr8E!vT*oUvTl)>lz7^B3>#(2Z8wA9?8Mnj~JT^lv^QH;E~<>Q*5 zcqXw(Pz;|Wv;G39HB7poSyLQq`!p%SVn~4HzJ>n_7QJIRLFQ&gw|;0yQ=C^ S6BxtB_<-j>d_pM~;eP?U4Rmz? delta 3309 zcmai0eQZTmIrrJI ziL&m={=IY0z4zR6fA^eo-xpu9Cm*wt+fJuL!sp3fE}oj-ceTXL=09wEjy0I11CqS) zf+Ux|Ym%h*@ozjQTV&IOd}C@3~t8}NIGt_4yY{p z2Ly^gt}WnRTA+y*C^#MAiCAJrx5N_DN!|huXQO|KaUV@)2o1cG1_uycDH{^rj2h1P zR4u}_ej0cKH0L4|lx*JIjm@zrP0T(v6wLU4^eIb+N?vDhZR5qJqMQr77dv?if^Jc@ zWQw;_+XK5w{exbUa74FrC7R^2!c{u+hKx_N^ee8lJ|$D5s8_e8|4`~;fnur9d8z34 zcK7e>?&YfP-`f43+x^SgpIU}P@3}%%dv=)b@38C{EVv(Jh_F~wk$zNGW3drQycagP zn^-gSxd&JcsBT{i(W4j1%6u%UgyS()(^*WnV>>KpJVdh&z^~jLHo@LDc;fc3sdP=% zlC|+5b}m_TXXa?5$;5PQhLh8ogNMr}tKsXqqpl&+oj~xG8mv@9q<+hcur3_yS)KGP zlFA7vZn%d)5PO^?vZ0$*jeD(vP(DRkv1E{>7Z6k{LIXXJPdTD2hL>`ucbPb;R}fG5Jb)9-k$ zn%N2HY~Q{;L3RwjNfO~Kqog}$Bhgc_gc8Q+h()>BwAkPwxZd72y1GNJ-S8zv(>cv7 zOem+r7^a$H>{!^$?bwcE$T511O>c!l?^d>y4tSj=7EJHmGGbyw@WX9~2M5V$&N~Vm zeS^q=u)z@~k>Ht=Ki+~IEj$PnzNSjSaY1fHW5F|U#MjIoq*Y&|nJvKe?uDat>RuaP zv*0e&=Go7jBhk*qhF&FUy>mPZ5q}f=Q~HAckE~13Mnvn58ATJ+%&AM}d}py!rF<2f zz~!Dk+hbDoLDxtF{I#csb%VY4alpt@vSf{E%B;#?Lpd7_hfx+GA$-dV0PqKbrvFHw z20mlw)4Tf~GTZAk_7e2&+*|oQGGRA@Hx~^j;L6V0usC&MoiEa;!aSGHlh!ap(i;TC ziM~-)G!4&14bP=y3<8;-L$z9nfV(puaSVC|boW<4V}Gj+%d@2Z4L5rLuJyl{~y z57=N!&t;soAMnx|YADw3ka zlOy2;4}#Z%TvFm`)r_FqBB_*;koh=iCJ3G-5V;_t zRFsWOlu#D&Zxfs(SSL!*)g!@j2nJi2C#?iOXARqEDQs-rQ%-69|~Q2yyZPaDP4Bwq%Ss<4yN5}ENhM|713rot#q1_Bp``YV;WxX&nU6IEy&;_$1JWs$! z`+OtooTgq#2`!d13NqE!rMxfU{`7F$&h=b&OwYyRSOU@hH#l53+Q;JP@G-bO z`XcgT8@pw}a`)bNb9I=OoFt%&kw*xKC#31m<9-%}>WSt>N&w>~bY*TrQpg12L{~$e zE2JYw^$p=@T!|#+Qeop-;?vY6>WKKjT61OQZp#@Qm%U3wft?bQ=mWbyB=ZK;gr0QkOZF(D*wjJ;Q%Q z${!QFNALl`hXi*BK0#Q*cp$lxpehTsD5&#hcsRRhvdTJax-q{Z(fem{LueH!xX5GFbJq;acCGl@lhty@=yh32OL!NkB`Ooo%A2w%I-IP>$E4fz> zH+dI(3x2~lvoZLRYb*$1wW42lsPcEHmeAK$C5;rjxl7XO4*z8zoK4Deab-7ufE}0; ilKMva6Ll{;*0(G@^;@wWvoUmOeS9eNkN-!nSn$8Wyze0Z diff --git a/usda_vision_system/camera/__pycache__/recorder.cpython-311.pyc b/usda_vision_system/camera/__pycache__/recorder.cpython-311.pyc index 2ca6a05d187ab8b35793414830c453ca7250394c..61c2c78970652b3b815542fa7532f9b956874d2b 100644 GIT binary patch delta 5181 zcmb7Hdr(x@8NcW51CWKiz+G703%jta^3w2fL7s-F$SR1S)JT!LAgHK!@x_1}tF4)- zPED?j-nS+jORn@&PI)5QFd6K9C&Qcsvk~TGm^qk@FsBG5(ns;@O!4l7VOHQ_#Uz;A z=>omiN(BjN_VA7wb}|8z33Hab%L-YbXM>*V&Kb*)1+ZcR zTp`RwFk4|Rj^J&e+d(gJr@M8-Y$nZ2z95n!*mDT%Hpbxye3S5!(>w%uMlT&`iSWC7 zG(mcD7Es2tJ?Iiws2X@x2zKK5R!Y);RU%@bww2G~winO8wT zua9IH?*}CBOG~L{+76XzOe9H)y-aP0QX68a6)T1rOICE?QB(#z077x50>?Ou1}8g9 ziaJYrs54ISu*?}Cs53A4Re@uir6^F@S+eY`4eXOu&gub?s@Qp;go+M%rYZU|*DCxq z>EEeF8iQ&rCI(*NkYg~)G0;96RWZ;$tL|g)rWiaI#d9jWmlCKJVhGaJ1h8{I*>gfl zwEih7y4WIQD0sjsDs8q3A>B*mXbn_LF(H^0oLqxc+35zbPgOZ>RwKru2#+PEBT>Mz zU{W(t^ysNr)tm!aF;S#NQ%{SMGN)ow&sGr2ya7Tv=PGcFvm6B~rwsjpcOrdW`G-2o zRS?UZ0YW+FDsYUmJOwH{Gs({OgEN!LnUKK5R5D+|EOV#Ah^&x4j)d?_yHEAjj-`$X z#iG#CP{vnUnTb|qg<))rn2Q*&kG|1o3#F6_lQ$#wJ&%~3%((U#b6m5NIfewYoX`Y# zHLDWZpFo+SkLy^}vlx41tcw3-iK@8h`Q02#ucjWbDHQ)tQjX03mDCg~*28BT9;DeK zDW#{-vYxpHUS3r^G1I!3Iu+A8X|ktuQrBMIJO!U)D6K1MO-oxct(VZ0Rg*KNm%4tL zQK+5PpVZS2w;*fBkWx@b?&PZNhlIfi$?43J_O=)VaTb``#UmhHB5kyI2t$@!@gQMe? z0|y1Sbjn$}C+3d|Ef~>v?_K&7U1CO{(T^wWg1Dy9G&{w<7B_{SV$ZGE#=L@DuP_eH z0$X0Wfc7n*qaF<)EEbDMABi7UH0&SsObi`7aHw&zRl3od?j0144vr7Gg@K{L18#9} zz~i2n7#$z+lsD`@I5fE5(+G+JxdErtL!Zhg7;Nnhk5_EI0$lDplCTl3$6iQQoS=%>$EsZq}9iHDW^5ddmI?TrIxFFir&hl zK20^b-an_BT=Yt+ouy(~`7h8lxd%^EO>V@msU~;h64m6EJV!OTFPEt{&3HjRPgUBR z1>NX``+!Gmqzl6!9C2b3UDxkBHYkpG#FyyCNh$!xEO2sOP*5dfaW5=;=wk_OpqJc= zh5WGHzylf(CwE}7oHi)`0sHcH2W`f0bkbsLX~zgk$PDG%upwtj&v3c3dTcC);j8hN zkyXuj)#gRcM7ZMFiK|oot{_)SxLVBBF6I@?7W;BZVeOnPSlAfMYa)3~Qd3)IL+xA= zS>1j?OSnS9m4~c$%vtUjO*84(m=9yN-B)wfg>8+Cxt7^BHhOyBG!a!*Bek1?TnFJgFxL^PsP^w86|E7PQm^7NbEX#?tuTCg-y|t< z;*xfm#qQHyH~5F=`bo`(Ah(fl8!@*rR9@xpC*{o%mQ@N#`V17)0%Nw<_t@0|T+}Rc z*k*gK?()~n*>5%lxi-SJVXiH-YPH{fy~!J)C{ZXLkUHMhN4D*O8pvc0pY3|-oPKVK z)NKxOorLSeT<629Q|yCQRKW1JGZFP1{IaPbAdpurZXLH(zX8Vpb%ThSKc58l1KohJP5Z>moJXxMuev zpGo*gxM7AMzn$>gF~2?3(2CpoNW%{Ah4q9lCj8oveHG?ychbuI={T(x##}pYdz>`v z#tmbk?0k}4MzWii7-Skji{@NnuJZTX+%;c=HwmQ89W)OU^Ds6Khgvt_jYFhWh|o5L zY3VU}P$3pMFP!$sLBWkZq;`9d>m^(-=6XZ-ixgWU%WOL~u7u$`ifbMt6&r9xkKCCu zpKyIY4ZDXlsCcUDaYHAmc16m!CR#rF_<5H4q=N4Su>+kY+ z;qARazK`&InC}Z!*Utg(rbw!5!l^v z({C9`%Z{LVCo%8D=AEJ1b#n(vtt%2+)sJFx%dxqUmxc($>fnZgyg+yXD}%=7R${X( zF>F#73dvs;fAO28ZVsK%DOUij7%e@C)#J}y-ck7uCO6y!j z%!f7v`iDifEe6d;2`uOzB_gUDj8xBQVYcQnIENbHTw9k`6GYS~s6kZMw$P2+`W4-E zn%i7%_amCykLbGV0_WG~#i1M0^^Ql-ymY6-ivAGLZl2Mhx1`HmCFq;LyIp!cnv*{0 zv!geqfAyV5A4{iq6r%3~*LHXl68{1*ise+iD^>2PLhniLJzc4Pf(3_8{`iQu=;r&< ztv&hZZQ$pn|LifcHa3tn^oic2f>X2ARJ(k_VeRJ=un?DTD_eMZGy5DRhR z!k{P)o)AB$OAA!!X)p;?7^pB(kxGS}Y8KUUsGxu7iG@`2QLTlFJSqwp#vn_k25vo% z2Htpf02LfSy3Dz%C5Fxqkh?X3l;=M|tBQbdLE|~$5$S)P#kEw(<#kZ)OArgX@goNg zoe=LzDVLqeZ_*Y0;GskAaY3Auc3ysi^)XV?l?+rHD7tb6Z9HZ`8+1#|!>4b?%P-joh^P%^nFEMq~UeErTRE%eHINP8Y zbL1E2xk@QF<)^`U@pSVdxFYN!A5=>tbjl8WBC|*rBYYa{lVe=dAR#_CmO!uYG#!IM zaiOUQ4$B8k`bH)_Q+_J;s4Uw($4UV5k8q#xjPP8% zI1^@PnOt&qNQL{t(dlD%n4H&{rKUOXpB1p2VojINlj=>~+d67X$D@fG$Pfp*s@Q8L zA9kGp_La%nZU&Gg>UtKyMbX|k6TdkovX(a*qga5y`X&4_j8a)f%FR?#TK^_(4!;AC;gHQt#&`mqoBqNiMo>#acE2)7cV`|<$G2T|KU9ZF@aKMqxE(_yXsqw;V0G;ucmpOVz-zW~~JBGLc= diff --git a/usda_vision_system/camera/manager.py b/usda_vision_system/camera/manager.py index 84def7b..b0c4b9d 100644 --- a/usda_vision_system/camera/manager.py +++ b/usda_vision_system/camera/manager.py @@ -22,6 +22,7 @@ from ..core.events import EventSystem, EventType, Event, publish_camera_status_c from ..core.timezone_utils import format_filename_timestamp from .recorder import CameraRecorder from .monitor import CameraMonitor +from .streamer import CameraStreamer from .sdk_config import initialize_sdk_with_suppression @@ -40,6 +41,7 @@ class CameraManager: # Camera management self.available_cameras: List[Any] = [] # mvsdk camera device info self.camera_recorders: Dict[str, CameraRecorder] = {} # camera_name -> recorder + self.camera_streamers: Dict[str, CameraStreamer] = {} # camera_name -> streamer self.camera_monitor: Optional[CameraMonitor] = None # Threading @@ -71,6 +73,9 @@ class CameraManager: # Initialize camera recorders self._initialize_recorders() + # Initialize camera streamers + self._initialize_streamers() + self.logger.info("Camera manager started successfully") return True @@ -93,6 +98,12 @@ class CameraManager: recorder.stop_recording() recorder.cleanup() + # Stop all active streaming + with self._lock: + for streamer in self.camera_streamers.values(): + if streamer.is_streaming(): + streamer.stop_streaming() + self.logger.info("Camera manager stopped") def _discover_cameras(self) -> None: @@ -427,3 +438,104 @@ class CameraManager: self.logger.error(f"Error reinitializing camera {camera_name}: {e}") self.state_manager.update_camera_status(name=camera_name, status="error", device_info={"error": str(e)}) return False + + def _initialize_streamers(self) -> None: + """Initialize camera streamers for configured cameras""" + with self._lock: + for camera_config in self.config.cameras: + if not camera_config.enabled: + continue + + try: + # Find matching physical camera + device_info = self._find_camera_device(camera_config.name) + if device_info is None: + self.logger.warning(f"No physical camera found for streaming: {camera_config.name}") + continue + + # Create streamer + streamer = CameraStreamer(camera_config=camera_config, device_info=device_info, state_manager=self.state_manager, event_system=self.event_system) + + # Add streamer to the list + self.camera_streamers[camera_config.name] = streamer + self.logger.info(f"Successfully created streamer for camera: {camera_config.name}") + + except Exception as e: + self.logger.error(f"Error initializing streamer for {camera_config.name}: {e}") + + def get_camera_streamer(self, camera_name: str) -> Optional[CameraStreamer]: + """Get camera streamer for a specific camera""" + return self.camera_streamers.get(camera_name) + + def start_camera_streaming(self, camera_name: str) -> bool: + """Start streaming for a specific camera""" + streamer = self.camera_streamers.get(camera_name) + if not streamer: + self.logger.error(f"Camera streamer not found: {camera_name}") + return False + + return streamer.start_streaming() + + def stop_camera_streaming(self, camera_name: str) -> bool: + """Stop streaming for a specific camera""" + streamer = self.camera_streamers.get(camera_name) + if not streamer: + self.logger.error(f"Camera streamer not found: {camera_name}") + return False + + return streamer.stop_streaming() + + def is_camera_streaming(self, camera_name: str) -> bool: + """Check if a camera is currently streaming""" + streamer = self.camera_streamers.get(camera_name) + if not streamer: + return False + + return streamer.is_streaming() + + def get_camera_config(self, camera_name: str) -> Optional[CameraConfig]: + """Get camera configuration""" + return self.config.get_camera_by_name(camera_name) + + def update_camera_config(self, camera_name: str, **kwargs) -> bool: + """Update camera configuration and save to config file""" + try: + # Update the configuration + success = self.config.update_camera_config(camera_name, **kwargs) + if success: + self.logger.info(f"Updated configuration for camera {camera_name}: {kwargs}") + return True + else: + self.logger.error(f"Failed to update configuration for camera {camera_name}") + return False + except Exception as e: + self.logger.error(f"Error updating camera configuration: {e}") + return False + + def apply_camera_config(self, camera_name: str) -> bool: + """Apply current configuration to active camera (requires camera restart)""" + try: + # Get the recorder for this camera + recorder = self.camera_recorders.get(camera_name) + if not recorder: + self.logger.error(f"Camera recorder not found: {camera_name}") + return False + + # Stop recording if active + was_recording = recorder.is_recording() + if was_recording: + recorder.stop_recording() + + # Reinitialize the camera with new settings + success = self.reinitialize_failed_camera(camera_name) + + if success: + self.logger.info(f"Successfully applied configuration to camera {camera_name}") + return True + else: + self.logger.error(f"Failed to apply configuration to camera {camera_name}") + return False + + except Exception as e: + self.logger.error(f"Error applying camera configuration: {e}") + return False diff --git a/usda_vision_system/camera/recorder.py b/usda_vision_system/camera/recorder.py index 187754f..ea91753 100644 --- a/usda_vision_system/camera/recorder.py +++ b/usda_vision_system/camera/recorder.py @@ -328,6 +328,117 @@ class CameraRecorder: self.logger.error(f"Error updating camera settings: {e}") return False + def update_advanced_camera_settings(self, **kwargs) -> bool: + """Update advanced camera settings dynamically""" + if not self.hCamera: + self.logger.error("Camera not initialized") + return False + + try: + settings_updated = False + + # Update basic settings + if "exposure_ms" in kwargs and kwargs["exposure_ms"] is not None: + mvsdk.CameraSetAeState(self.hCamera, 0) + exposure_us = int(kwargs["exposure_ms"] * 1000) + mvsdk.CameraSetExposureTime(self.hCamera, exposure_us) + self.camera_config.exposure_ms = kwargs["exposure_ms"] + settings_updated = True + + if "gain" in kwargs and kwargs["gain"] is not None: + gain_value = int(kwargs["gain"] * 100) + mvsdk.CameraSetAnalogGain(self.hCamera, gain_value) + self.camera_config.gain = kwargs["gain"] + settings_updated = True + + if "target_fps" in kwargs and kwargs["target_fps"] is not None: + self.camera_config.target_fps = kwargs["target_fps"] + settings_updated = True + + # Update image quality settings + if "sharpness" in kwargs and kwargs["sharpness"] is not None: + mvsdk.CameraSetSharpness(self.hCamera, kwargs["sharpness"]) + self.camera_config.sharpness = kwargs["sharpness"] + settings_updated = True + + if "contrast" in kwargs and kwargs["contrast"] is not None: + mvsdk.CameraSetContrast(self.hCamera, kwargs["contrast"]) + self.camera_config.contrast = kwargs["contrast"] + settings_updated = True + + if "gamma" in kwargs and kwargs["gamma"] is not None: + mvsdk.CameraSetGamma(self.hCamera, kwargs["gamma"]) + self.camera_config.gamma = kwargs["gamma"] + settings_updated = True + + if "saturation" in kwargs and kwargs["saturation"] is not None and not self.monoCamera: + mvsdk.CameraSetSaturation(self.hCamera, kwargs["saturation"]) + self.camera_config.saturation = kwargs["saturation"] + settings_updated = True + + # Update noise reduction settings + if "noise_filter_enabled" in kwargs and kwargs["noise_filter_enabled"] is not None: + # Note: Noise filter settings may require camera restart to take effect + self.camera_config.noise_filter_enabled = kwargs["noise_filter_enabled"] + settings_updated = True + + if "denoise_3d_enabled" in kwargs and kwargs["denoise_3d_enabled"] is not None: + # Note: 3D denoise settings may require camera restart to take effect + self.camera_config.denoise_3d_enabled = kwargs["denoise_3d_enabled"] + settings_updated = True + + # Update color settings (for color cameras) + if not self.monoCamera: + if "auto_white_balance" in kwargs and kwargs["auto_white_balance"] is not None: + mvsdk.CameraSetWbMode(self.hCamera, kwargs["auto_white_balance"]) + self.camera_config.auto_white_balance = kwargs["auto_white_balance"] + settings_updated = True + + if "color_temperature_preset" in kwargs and kwargs["color_temperature_preset"] is not None: + if not self.camera_config.auto_white_balance: + mvsdk.CameraSetPresetClrTemp(self.hCamera, kwargs["color_temperature_preset"]) + self.camera_config.color_temperature_preset = kwargs["color_temperature_preset"] + settings_updated = True + + # Update advanced settings + if "anti_flicker_enabled" in kwargs and kwargs["anti_flicker_enabled"] is not None: + mvsdk.CameraSetAntiFlick(self.hCamera, kwargs["anti_flicker_enabled"]) + self.camera_config.anti_flicker_enabled = kwargs["anti_flicker_enabled"] + settings_updated = True + + if "light_frequency" in kwargs and kwargs["light_frequency"] is not None: + mvsdk.CameraSetLightFrequency(self.hCamera, kwargs["light_frequency"]) + self.camera_config.light_frequency = kwargs["light_frequency"] + settings_updated = True + + # Update HDR settings (if supported) + if "hdr_enabled" in kwargs and kwargs["hdr_enabled"] is not None: + try: + mvsdk.CameraSetHDR(self.hCamera, 1 if kwargs["hdr_enabled"] else 0) + self.camera_config.hdr_enabled = kwargs["hdr_enabled"] + settings_updated = True + except AttributeError: + self.logger.warning("HDR functions not available in this SDK version") + + if "hdr_gain_mode" in kwargs and kwargs["hdr_gain_mode"] is not None: + try: + if self.camera_config.hdr_enabled: + mvsdk.CameraSetHDRGainMode(self.hCamera, kwargs["hdr_gain_mode"]) + self.camera_config.hdr_gain_mode = kwargs["hdr_gain_mode"] + settings_updated = True + except AttributeError: + self.logger.warning("HDR gain mode functions not available in this SDK version") + + if settings_updated: + updated_settings = [k for k, v in kwargs.items() if v is not None] + self.logger.info(f"Updated camera settings: {updated_settings}") + + return settings_updated + + except Exception as e: + self.logger.error(f"Error updating advanced camera settings: {e}") + return False + def start_recording(self, filename: str) -> bool: """Start video recording""" with self._lock: diff --git a/usda_vision_system/camera/streamer.py b/usda_vision_system/camera/streamer.py new file mode 100644 index 0000000..6bfcadc --- /dev/null +++ b/usda_vision_system/camera/streamer.py @@ -0,0 +1,320 @@ +""" +Camera Streamer for the USDA Vision Camera System. + +This module provides live preview streaming from GigE cameras without blocking recording. +It creates a separate camera connection for streaming that doesn't interfere with recording. +""" + +import sys +import os +import threading +import time +import logging +import cv2 +import numpy as np +import contextlib +from typing import Optional, Dict, Any, Generator +from datetime import datetime +import queue + +# Add camera SDK to path +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "camera_sdk")) +import mvsdk + +from ..core.config import CameraConfig +from ..core.state_manager import StateManager +from ..core.events import EventSystem +from .sdk_config import ensure_sdk_initialized + + +@contextlib.contextmanager +def suppress_camera_errors(): + """Context manager to temporarily suppress camera SDK error output""" + # Save original file descriptors + original_stderr = os.dup(2) + original_stdout = os.dup(1) + + try: + # Redirect stderr and stdout to devnull + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, 2) # stderr + os.dup2(devnull, 1) # stdout (in case SDK uses stdout) + os.close(devnull) + + yield + + finally: + # Restore original file descriptors + os.dup2(original_stderr, 2) + os.dup2(original_stdout, 1) + os.close(original_stderr) + os.close(original_stdout) + + +class CameraStreamer: + """Provides live preview streaming from cameras without blocking recording""" + + def __init__(self, camera_config: CameraConfig, device_info: Any, state_manager: StateManager, event_system: EventSystem): + self.camera_config = camera_config + self.device_info = device_info + self.state_manager = state_manager + self.event_system = event_system + self.logger = logging.getLogger(f"{__name__}.{camera_config.name}") + + # Camera handle and properties (separate from recorder) + self.hCamera: Optional[int] = None + self.cap = None + self.monoCamera = False + self.frame_buffer = None + self.frame_buffer_size = 0 + + # Streaming state + self.streaming = False + self._streaming_thread: Optional[threading.Thread] = None + self._stop_streaming_event = threading.Event() + self._frame_queue = queue.Queue(maxsize=5) # Buffer for latest frames + self._lock = threading.RLock() + + # Stream settings (optimized for preview) + self.preview_fps = 10.0 # Lower FPS for preview to reduce load + self.preview_quality = 70 # JPEG quality for streaming + + def start_streaming(self) -> bool: + """Start streaming preview frames""" + with self._lock: + if self.streaming: + self.logger.warning("Streaming already active") + return True + + try: + # Initialize camera for streaming + if not self._initialize_camera(): + return False + + # Start streaming thread + self._stop_streaming_event.clear() + self._streaming_thread = threading.Thread(target=self._streaming_loop, daemon=True) + self._streaming_thread.start() + + self.streaming = True + self.logger.info(f"Started streaming for camera: {self.camera_config.name}") + return True + + except Exception as e: + self.logger.error(f"Error starting streaming: {e}") + self._cleanup_camera() + return False + + def stop_streaming(self) -> bool: + """Stop streaming preview frames""" + with self._lock: + if not self.streaming: + return True + + try: + # Signal streaming thread to stop + self._stop_streaming_event.set() + + # Wait for thread to finish + if self._streaming_thread and self._streaming_thread.is_alive(): + self._streaming_thread.join(timeout=5.0) + + # Cleanup camera resources + self._cleanup_camera() + + self.streaming = False + self.logger.info(f"Stopped streaming for camera: {self.camera_config.name}") + return True + + except Exception as e: + self.logger.error(f"Error stopping streaming: {e}") + return False + + def get_latest_frame(self) -> Optional[bytes]: + """Get the latest frame as JPEG bytes for streaming""" + try: + # Get latest frame from queue (non-blocking) + frame = self._frame_queue.get_nowait() + + # Encode as JPEG + _, buffer = cv2.imencode(".jpg", frame, [cv2.IMWRITE_JPEG_QUALITY, self.preview_quality]) + return buffer.tobytes() + + except queue.Empty: + return None + except Exception as e: + self.logger.error(f"Error getting latest frame: {e}") + return None + + def get_frame_generator(self) -> Generator[bytes, None, None]: + """Generator for MJPEG streaming""" + while self.streaming: + frame_bytes = self.get_latest_frame() + if frame_bytes: + yield (b"--frame\r\n" b"Content-Type: image/jpeg\r\n\r\n" + frame_bytes + b"\r\n") + else: + time.sleep(0.1) # Wait a bit if no frame available + + def _initialize_camera(self) -> bool: + """Initialize camera for streaming (separate from recording)""" + try: + self.logger.info(f"Initializing camera for streaming: {self.camera_config.name}") + + # Ensure SDK is initialized + ensure_sdk_initialized() + + # Check if device_info is valid + if self.device_info is None: + self.logger.error("No device info provided for camera initialization") + return False + + # Initialize camera (suppress output to avoid MVCAMAPI error messages) + with suppress_camera_errors(): + self.hCamera = mvsdk.CameraInit(self.device_info, -1, -1) + self.logger.info("Camera initialized successfully for streaming") + + # Get camera capabilities + self.cap = mvsdk.CameraGetCapability(self.hCamera) + + # Determine if camera is monochrome + self.monoCamera = self.cap.sIspCapacity.bMonoSensor != 0 + + # Set output format based on camera type and bit depth + if self.monoCamera: + mvsdk.CameraSetIspOutFormat(self.hCamera, mvsdk.CAMERA_MEDIA_TYPE_MONO8) + else: + mvsdk.CameraSetIspOutFormat(self.hCamera, mvsdk.CAMERA_MEDIA_TYPE_BGR8) + + # Configure camera settings for streaming (optimized for preview) + self._configure_streaming_settings() + + # Allocate frame buffer + bytes_per_pixel = 1 if self.monoCamera else 3 + self.frame_buffer_size = self.cap.sResolutionRange.iWidthMax * self.cap.sResolutionRange.iHeightMax * bytes_per_pixel + self.frame_buffer = mvsdk.CameraAlignMalloc(self.frame_buffer_size, 16) + + # Start camera + mvsdk.CameraPlay(self.hCamera) + self.logger.info("Camera started successfully for streaming") + + return True + + except Exception as e: + self.logger.error(f"Error initializing camera for streaming: {e}") + self._cleanup_camera() + return False + + def _configure_streaming_settings(self): + """Configure camera settings optimized for streaming""" + try: + # Set trigger mode to free run for continuous streaming + mvsdk.CameraSetTriggerMode(self.hCamera, 0) + + # Set exposure (use a reasonable default for preview) + exposure_us = int(self.camera_config.exposure_ms * 1000) + mvsdk.CameraSetExposureTime(self.hCamera, exposure_us) + + # Set gain + mvsdk.CameraSetAnalogGain(self.hCamera, int(self.camera_config.gain)) + + # Set frame rate for streaming (lower than recording) + if hasattr(mvsdk, "CameraSetFrameSpeed"): + mvsdk.CameraSetFrameSpeed(self.hCamera, int(self.preview_fps)) + + self.logger.info(f"Streaming settings configured: exposure={self.camera_config.exposure_ms}ms, gain={self.camera_config.gain}, fps={self.preview_fps}") + + except Exception as e: + self.logger.warning(f"Could not configure some streaming settings: {e}") + + def _streaming_loop(self): + """Main streaming loop that captures frames continuously""" + self.logger.info("Starting streaming loop") + + try: + while not self._stop_streaming_event.is_set(): + try: + # Capture frame with timeout + pRawData, FrameHead = mvsdk.CameraGetImageBuffer(self.hCamera, 200) # 200ms timeout + + # Process frame + mvsdk.CameraImageProcess(self.hCamera, pRawData, self.frame_buffer, FrameHead) + + # Convert to OpenCV format + frame = self._convert_frame_to_opencv(FrameHead) + + if frame is not None: + # Add frame to queue (replace oldest if queue is full) + try: + self._frame_queue.put_nowait(frame) + except queue.Full: + # Remove oldest frame and add new one + try: + self._frame_queue.get_nowait() + self._frame_queue.put_nowait(frame) + except queue.Empty: + pass + + # Release buffer + mvsdk.CameraReleaseImageBuffer(self.hCamera, pRawData) + + # Control frame rate + time.sleep(1.0 / self.preview_fps) + + except Exception as e: + if not self._stop_streaming_event.is_set(): + self.logger.error(f"Error in streaming loop: {e}") + time.sleep(0.1) # Brief pause before retrying + + except Exception as e: + self.logger.error(f"Fatal error in streaming loop: {e}") + finally: + self.logger.info("Streaming loop ended") + + def _convert_frame_to_opencv(self, FrameHead) -> Optional[np.ndarray]: + """Convert camera frame to OpenCV format""" + try: + # Convert the frame buffer memory address to a proper buffer + # that numpy can work with using mvsdk.c_ubyte + frame_data_buffer = (mvsdk.c_ubyte * FrameHead.uBytes).from_address(self.frame_buffer) + + if self.monoCamera: + # Monochrome camera + frame_data = np.frombuffer(frame_data_buffer, dtype=np.uint8) + frame = frame_data.reshape((FrameHead.iHeight, FrameHead.iWidth)) + # Convert to 3-channel for consistency + frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGR) + else: + # Color camera (BGR format) + frame_data = np.frombuffer(frame_data_buffer, dtype=np.uint8) + frame = frame_data.reshape((FrameHead.iHeight, FrameHead.iWidth, 3)) + + return frame + + except Exception as e: + self.logger.error(f"Error converting frame: {e}") + return None + + def _cleanup_camera(self): + """Clean up camera resources""" + try: + if self.frame_buffer: + mvsdk.CameraAlignFree(self.frame_buffer) + self.frame_buffer = None + + if self.hCamera is not None: + mvsdk.CameraUnInit(self.hCamera) + self.hCamera = None + + self.logger.info("Camera resources cleaned up for streaming") + + except Exception as e: + self.logger.error(f"Error cleaning up camera resources: {e}") + + def is_streaming(self) -> bool: + """Check if streaming is active""" + return self.streaming + + def __del__(self): + """Destructor to ensure cleanup""" + if self.streaming: + self.stop_streaming()