Add MQTT publish request and response models, and implement publish route
- Introduced MQTTPublishRequest and MQTTPublishResponse models for handling MQTT message publishing. - Implemented a new POST route for publishing MQTT messages, including error handling and logging. - Enhanced the StandaloneAutoRecorder with improved logging during manual recording start. - Updated the frontend to include an MQTT Debug Panel for better monitoring and debugging capabilities.
This commit is contained in:
@@ -334,3 +334,21 @@ class SuccessResponse(BaseModel):
|
||||
message: str
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
timestamp: str = Field(default_factory=lambda: datetime.now().isoformat())
|
||||
|
||||
|
||||
class MQTTPublishRequest(BaseModel):
|
||||
"""MQTT publish request model"""
|
||||
|
||||
topic: str = Field(..., description="MQTT topic to publish to")
|
||||
payload: str = Field(..., description="Message payload")
|
||||
qos: int = Field(default=0, ge=0, le=2, description="Quality of Service level (0-2)")
|
||||
retain: bool = Field(default=False, description="Whether to retain the message")
|
||||
|
||||
|
||||
class MQTTPublishResponse(BaseModel):
|
||||
"""MQTT publish response model"""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
topic: str
|
||||
payload: str
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Dict
|
||||
from fastapi import FastAPI, HTTPException, Query
|
||||
from ...core.state_manager import StateManager
|
||||
from ...mqtt.client import MQTTClient
|
||||
from ..models import MQTTStatusResponse, MQTTEventsHistoryResponse, MQTTEventResponse
|
||||
from ..models import MQTTStatusResponse, MQTTEventsHistoryResponse, MQTTEventResponse, MQTTPublishRequest, MQTTPublishResponse
|
||||
|
||||
|
||||
def register_mqtt_routes(
|
||||
@@ -70,3 +70,33 @@ def register_mqtt_routes(
|
||||
logger.error(f"Error getting MQTT events: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.post("/mqtt/publish", response_model=MQTTPublishResponse)
|
||||
async def publish_mqtt_message(request: MQTTPublishRequest):
|
||||
"""Publish an MQTT message (for testing/debugging)"""
|
||||
try:
|
||||
if not mqtt_client.is_connected():
|
||||
raise HTTPException(status_code=503, detail="MQTT client is not connected")
|
||||
|
||||
success = mqtt_client.publish_message(
|
||||
topic=request.topic,
|
||||
payload=request.payload,
|
||||
qos=request.qos,
|
||||
retain=request.retain
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(f"Published MQTT message: {request.topic} -> {request.payload}")
|
||||
return MQTTPublishResponse(
|
||||
success=True,
|
||||
message=f"Message published successfully to {request.topic}",
|
||||
topic=request.topic,
|
||||
payload=request.payload
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to publish MQTT message")
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error publishing MQTT message: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -249,19 +249,30 @@ class StandaloneAutoRecorder:
|
||||
self.logger.info(f"📹 AUTO-RECORDER: Starting recording with settings - Exposure: {camera_config.exposure_ms}ms, Gain: {camera_config.gain}, FPS: {camera_config.target_fps}")
|
||||
|
||||
# Use camera manager to start recording with camera's default settings
|
||||
success = self.camera_manager.manual_start_recording(
|
||||
camera_name=camera_name,
|
||||
filename=filename,
|
||||
exposure_ms=camera_config.exposure_ms if camera_config else None,
|
||||
gain=camera_config.gain if camera_config else None,
|
||||
fps=camera_config.target_fps if camera_config else None
|
||||
)
|
||||
self.logger.info(f"📹 AUTO-RECORDER: Calling manual_start_recording for {camera_name} with filename: {filename}")
|
||||
try:
|
||||
success = self.camera_manager.manual_start_recording(
|
||||
camera_name=camera_name,
|
||||
filename=filename,
|
||||
exposure_ms=camera_config.exposure_ms if camera_config else None,
|
||||
gain=camera_config.gain if camera_config else None,
|
||||
fps=camera_config.target_fps if camera_config else None
|
||||
)
|
||||
self.logger.info(f"📹 AUTO-RECORDER: manual_start_recording returned: {success}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"❌ AUTO-RECORDER: Exception calling manual_start_recording: {e}", exc_info=True)
|
||||
success = False
|
||||
|
||||
if success:
|
||||
self.logger.info(f"✅ AUTO-RECORDER: Successfully started auto-recording: {camera_name} -> {filename}")
|
||||
self.active_recordings[camera_name] = filename
|
||||
else:
|
||||
self.logger.error(f"❌ AUTO-RECORDER: Failed to start auto-recording for camera {camera_name} (manual_start_recording returned False)")
|
||||
# Check camera status for more details
|
||||
if self.state_manager:
|
||||
camera_info = self.state_manager.get_camera_status(camera_name)
|
||||
if camera_info:
|
||||
self.logger.error(f"❌ AUTO-RECORDER: Camera status - is_recording: {camera_info.is_recording}, state: {camera_info.state}")
|
||||
else:
|
||||
# Standalone mode - use own recorder
|
||||
recorder = self._get_camera_recorder(camera_name)
|
||||
|
||||
94
docs/DESIGN_RECOMMENDATION_SUMMARY.md
Normal file
94
docs/DESIGN_RECOMMENDATION_SUMMARY.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# Database Design Recommendation Summary
|
||||
|
||||
## Critical Issue Identified
|
||||
|
||||
Your current design has a **fundamental flaw** that prevents it from working correctly with repetitions:
|
||||
|
||||
### The Problem
|
||||
|
||||
The phase tables (`soaking`, `airdrying`, `cracking`, `shelling`) have this constraint:
|
||||
```sql
|
||||
CONSTRAINT unique_soaking_per_experiment UNIQUE (experiment_id)
|
||||
```
|
||||
|
||||
This means you can **only have ONE soaking record per experiment**, even if you have 3 repetitions! This breaks your entire repetition system.
|
||||
|
||||
### Why This Happens
|
||||
|
||||
When you create an experiment with 3 repetitions:
|
||||
1. ✅ 3 rows are created in `experiment_repetitions`
|
||||
2. ❌ But you can only create 1 row in `soaking` (due to UNIQUE constraint)
|
||||
3. ❌ The other 2 repetitions cannot have soaking data!
|
||||
|
||||
## Design Assessment
|
||||
|
||||
### Current Approach: Separate Tables (❌ Not Recommended)
|
||||
|
||||
**Problems:**
|
||||
- ❌ UNIQUE constraint breaks repetitions
|
||||
- ❌ Schema duplication (same structure 4 times)
|
||||
- ❌ Hard to query "all phases for a repetition"
|
||||
- ❌ Sequential timing calculations are complex and error-prone
|
||||
- ❌ No automatic phase creation when repetitions are created
|
||||
|
||||
### Recommended Approach: Unified Table (✅ Best Practice)
|
||||
|
||||
**Benefits:**
|
||||
- ✅ Properly supports repetitions (one phase per repetition)
|
||||
- ✅ Automatic phase creation via database trigger
|
||||
- ✅ Simple sequential time calculations
|
||||
- ✅ Easy to query all phases for a repetition
|
||||
- ✅ Single source of truth
|
||||
- ✅ Easier to maintain and extend
|
||||
|
||||
## Recommended Solution
|
||||
|
||||
I've created a migration file that implements a **unified `experiment_phase_executions` table**:
|
||||
|
||||
### Key Features:
|
||||
|
||||
1. **Single Table for All Phases**
|
||||
- Uses `phase_type` enum to distinguish phases
|
||||
- One row per phase per repetition
|
||||
- Proper UNIQUE constraint: `(repetition_id, phase_type)`
|
||||
|
||||
2. **Automatic Phase Creation**
|
||||
- Database trigger automatically creates phase executions when a repetition is created
|
||||
- Based on the experiment's phase configuration (`has_soaking`, `has_airdrying`, etc.)
|
||||
|
||||
3. **Automatic Sequential Timing**
|
||||
- Database trigger calculates sequential start times
|
||||
- If soaking ends at 5pm, airdrying automatically starts at 5pm
|
||||
- If airdrying ends at 5:12pm, cracking automatically starts at 5:12pm
|
||||
|
||||
4. **Backward Compatibility Views**
|
||||
- Views created for `soaking_view`, `airdrying_view`, etc.
|
||||
- Existing code can continue to work (with minor updates)
|
||||
|
||||
## Files Created
|
||||
|
||||
1. **`docs/database_design_analysis.md`** - Detailed analysis with comparison matrix
|
||||
2. **`management-dashboard-web-app/supabase/migrations/00012_unified_phase_executions.sql`** - Complete migration implementation
|
||||
|
||||
## Migration Path
|
||||
|
||||
1. Review the analysis document
|
||||
2. Test the migration on a development database
|
||||
3. Update application code to use the new table structure
|
||||
4. Migrate existing data (if any)
|
||||
5. Drop old phase tables after verification
|
||||
|
||||
## Alternative: Fix Current Design
|
||||
|
||||
If you prefer to keep separate tables, you MUST:
|
||||
1. Remove `UNIQUE (experiment_id)` constraints from all phase tables
|
||||
2. Keep only `UNIQUE (repetition_id)` constraints
|
||||
3. Add trigger to auto-create phase entries when repetitions are created
|
||||
4. Fix sequential timing calculations to use `repetition_id` instead of `experiment_id`
|
||||
|
||||
However, this still has the drawbacks of schema duplication and complexity.
|
||||
|
||||
## Recommendation
|
||||
|
||||
**Use the unified table approach** - it's cleaner, more maintainable, and properly supports your repetition model.
|
||||
|
||||
307
docs/database_design_analysis.md
Normal file
307
docs/database_design_analysis.md
Normal file
@@ -0,0 +1,307 @@
|
||||
# Database Design Analysis: Experiment Phases & Repetitions
|
||||
|
||||
## Current Design Issues
|
||||
|
||||
### 1. **Critical UNIQUE Constraint Problem**
|
||||
|
||||
The phase tables (`soaking`, `airdrying`, `cracking`, `shelling`) have a fundamental flaw:
|
||||
|
||||
```sql
|
||||
-- Current problematic constraints
|
||||
CONSTRAINT unique_soaking_per_experiment UNIQUE (experiment_id),
|
||||
CONSTRAINT unique_soaking_per_repetition UNIQUE (repetition_id)
|
||||
```
|
||||
|
||||
**Problem**: The `UNIQUE (experiment_id)` constraint prevents having multiple phase records for different repetitions of the same experiment. If an experiment has 3 repetitions, you can only store phase data for ONE repetition!
|
||||
|
||||
**Impact**: This completely breaks the repetition system - you cannot have separate phase executions for each repetition.
|
||||
|
||||
### 2. **Ambiguous Data Model**
|
||||
|
||||
The phase tables reference both `experiment_id` AND `repetition_id`, creating confusion:
|
||||
- If data belongs to the experiment, why have `repetition_id`?
|
||||
- If data belongs to the repetition, why have `UNIQUE (experiment_id)`?
|
||||
|
||||
### 3. **Missing Automatic Phase Creation**
|
||||
|
||||
When repetitions are created, corresponding phase entries are NOT automatically created. This leads to:
|
||||
- Incomplete data model
|
||||
- Manual work required
|
||||
- Potential data inconsistencies
|
||||
|
||||
### 4. **Sequential Timing Calculation Issues**
|
||||
|
||||
The triggers that calculate sequential start times look up by `experiment_id`:
|
||||
```sql
|
||||
SELECT s.scheduled_end_time INTO NEW.scheduled_start_time
|
||||
FROM public.soaking s
|
||||
WHERE s.experiment_id = NEW.experiment_id -- WRONG! Should be repetition_id
|
||||
```
|
||||
|
||||
This will fail when you have multiple repetitions because:
|
||||
- It can't determine which repetition's soaking to use
|
||||
- It might pick the wrong repetition's data
|
||||
|
||||
## Design Assessment: Separate Tables vs Unified Table
|
||||
|
||||
### Current Approach: Separate Tables (❌ Not Recommended)
|
||||
|
||||
**Pros:**
|
||||
- Clear separation of concerns
|
||||
- Type-specific columns can be different
|
||||
- Easier to understand at a glance
|
||||
|
||||
**Cons:**
|
||||
- ❌ Schema duplication (same structure repeated 4 times)
|
||||
- ❌ Harder to query across phases
|
||||
- ❌ More complex to maintain
|
||||
- ❌ Difficult to enforce sequential relationships
|
||||
- ❌ More tables to manage
|
||||
- ❌ Harder to get "all phases for a repetition" queries
|
||||
- ❌ Current UNIQUE constraints break the repetition model
|
||||
|
||||
### Recommended Approach: Unified Phase Execution Table (✅ Best Practice)
|
||||
|
||||
**Pros:**
|
||||
- ✅ Single source of truth for phase executions
|
||||
- ✅ Easy to query all phases for a repetition
|
||||
- ✅ Simple sequential phase calculations
|
||||
- ✅ Easier to enforce business rules (one phase per repetition)
|
||||
- ✅ Less schema duplication
|
||||
- ✅ Better for reporting and analytics
|
||||
- ✅ Easier to add new phases in the future
|
||||
- ✅ Can use database views for phase-specific access
|
||||
|
||||
**Cons:**
|
||||
- Requires phase-specific columns to be nullable (or use JSONB)
|
||||
- Slightly more complex queries for phase-specific data
|
||||
|
||||
## Recommended Solution: Unified Phase Execution Table
|
||||
|
||||
### Option 1: Single Table with Phase Type Enum (Recommended)
|
||||
|
||||
```sql
|
||||
CREATE TABLE public.experiment_phase_executions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
phase_type TEXT NOT NULL CHECK (phase_type IN ('soaking', 'airdrying', 'cracking', 'shelling')),
|
||||
|
||||
-- Scheduling fields (common to all phases)
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Phase-specific parameters (nullable, only relevant for specific phases)
|
||||
-- Soaking
|
||||
soaking_duration_minutes INTEGER CHECK (soaking_duration_minutes > 0),
|
||||
|
||||
-- Airdrying
|
||||
duration_minutes INTEGER CHECK (duration_minutes > 0),
|
||||
|
||||
-- Cracking
|
||||
machine_type_id UUID REFERENCES public.machine_types(id),
|
||||
|
||||
-- Status tracking
|
||||
status TEXT NOT NULL DEFAULT 'pending'
|
||||
CHECK (status IN ('pending', 'scheduled', 'in_progress', 'completed', 'cancelled')),
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure one execution per phase type per repetition
|
||||
CONSTRAINT unique_phase_per_repetition UNIQUE (repetition_id, phase_type)
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX idx_phase_executions_repetition_id ON public.experiment_phase_executions(repetition_id);
|
||||
CREATE INDEX idx_phase_executions_phase_type ON public.experiment_phase_executions(phase_type);
|
||||
CREATE INDEX idx_phase_executions_status ON public.experiment_phase_executions(status);
|
||||
|
||||
-- Function to automatically calculate sequential start times
|
||||
CREATE OR REPLACE FUNCTION calculate_sequential_phase_start_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
prev_phase_end_time TIMESTAMP WITH TIME ZONE;
|
||||
experiment_phase_config UUID;
|
||||
phase_order TEXT[] := ARRAY['soaking', 'airdrying', 'cracking', 'shelling'];
|
||||
current_phase_index INT;
|
||||
prev_phase_name TEXT;
|
||||
BEGIN
|
||||
-- Get the experiment phase configuration
|
||||
SELECT e.phase_id INTO experiment_phase_config
|
||||
FROM public.experiments e
|
||||
JOIN public.experiment_repetitions er ON e.id = er.experiment_id
|
||||
WHERE er.id = NEW.repetition_id;
|
||||
|
||||
-- Find current phase index
|
||||
SELECT array_position(phase_order, NEW.phase_type) INTO current_phase_index;
|
||||
|
||||
-- If not the first phase, get previous phase's end time
|
||||
IF current_phase_index > 1 THEN
|
||||
prev_phase_name := phase_order[current_phase_index - 1];
|
||||
|
||||
SELECT scheduled_end_time INTO prev_phase_end_time
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE repetition_id = NEW.repetition_id
|
||||
AND phase_type = prev_phase_name
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1;
|
||||
|
||||
-- If previous phase exists, use its end time as start time
|
||||
IF prev_phase_end_time IS NOT NULL THEN
|
||||
NEW.scheduled_start_time := prev_phase_end_time;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Calculate end time based on duration
|
||||
IF NEW.phase_type = 'soaking' AND NEW.soaking_duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time + (NEW.soaking_duration_minutes || ' minutes')::INTERVAL;
|
||||
ELSIF NEW.phase_type = 'airdrying' AND NEW.duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time + (NEW.duration_minutes || ' minutes')::INTERVAL;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to automatically calculate sequential times
|
||||
CREATE TRIGGER trigger_calculate_sequential_phase_times
|
||||
BEFORE INSERT OR UPDATE ON public.experiment_phase_executions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION calculate_sequential_phase_start_time();
|
||||
|
||||
-- Function to automatically create phase executions when repetition is created
|
||||
CREATE OR REPLACE FUNCTION create_phase_executions_for_repetition()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
exp_phase_config RECORD;
|
||||
phase_type_list TEXT[] := ARRAY[]::TEXT[];
|
||||
phase_name TEXT;
|
||||
soaking_start_time TIMESTAMP WITH TIME ZONE;
|
||||
BEGIN
|
||||
-- Get experiment phase configuration
|
||||
SELECT ep.*, e.soaking_duration_hr, e.air_drying_time_min
|
||||
INTO exp_phase_config
|
||||
FROM public.experiments e
|
||||
JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
JOIN public.experiment_repetitions er ON e.id = er.experiment_id
|
||||
WHERE er.id = NEW.id;
|
||||
|
||||
-- Build list of phases to create based on experiment configuration
|
||||
IF exp_phase_config.has_soaking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'soaking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_airdrying THEN
|
||||
phase_type_list := array_append(phase_type_list, 'airdrying');
|
||||
END IF;
|
||||
IF exp_phase_config.has_cracking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'cracking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_shelling THEN
|
||||
phase_type_list := array_append(phase_type_list, 'shelling');
|
||||
END IF;
|
||||
|
||||
-- Create phase executions for each required phase
|
||||
FOREACH phase_name IN ARRAY phase_type_list
|
||||
LOOP
|
||||
INSERT INTO public.experiment_phase_executions (
|
||||
repetition_id,
|
||||
phase_type,
|
||||
scheduled_start_time,
|
||||
scheduled_end_time,
|
||||
status,
|
||||
created_by,
|
||||
-- Phase-specific parameters
|
||||
CASE
|
||||
WHEN phase_name = 'soaking' THEN
|
||||
soaking_duration_minutes := (exp_phase_config.soaking_duration_hr * 60)::INTEGER
|
||||
WHEN phase_name = 'airdrying' THEN
|
||||
duration_minutes := exp_phase_config.air_drying_time_min
|
||||
END
|
||||
)
|
||||
VALUES (
|
||||
NEW.id,
|
||||
phase_name,
|
||||
NOW(), -- Default start time, will be updated when scheduled
|
||||
NULL, -- Will be calculated by trigger
|
||||
'pending',
|
||||
NEW.created_by
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to auto-create phases when repetition is created
|
||||
CREATE TRIGGER trigger_create_phase_executions
|
||||
AFTER INSERT ON public.experiment_repetitions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION create_phase_executions_for_repetition();
|
||||
```
|
||||
|
||||
### Option 2: Keep Separate Tables but Fix Constraints (Alternative)
|
||||
|
||||
If you prefer to keep separate tables, you MUST fix the constraints:
|
||||
|
||||
```sql
|
||||
-- Remove experiment_id UNIQUE constraints
|
||||
ALTER TABLE public.soaking DROP CONSTRAINT IF EXISTS unique_soaking_per_experiment;
|
||||
ALTER TABLE public.airdrying DROP CONSTRAINT IF EXISTS unique_airdrying_per_experiment;
|
||||
ALTER TABLE public.cracking DROP CONSTRAINT IF EXISTS unique_cracking_per_experiment;
|
||||
ALTER TABLE public.shelling DROP CONSTRAINT IF EXISTS unique_shelling_per_experiment;
|
||||
|
||||
-- Keep only repetition_id UNIQUE constraints (one phase per repetition)
|
||||
-- These already exist, which is good
|
||||
|
||||
-- Optionally, remove experiment_id if not needed
|
||||
-- OR keep it for easier querying but without UNIQUE constraint
|
||||
```
|
||||
|
||||
Then add triggers to auto-create phase entries when repetitions are created (similar to Option 1's trigger).
|
||||
|
||||
## Comparison Matrix
|
||||
|
||||
| Aspect | Separate Tables (Current) | Unified Table (Recommended) |
|
||||
|--------|---------------------------|------------------------------|
|
||||
| **Repetition Support** | ❌ Broken (UNIQUE constraint) | ✅ Works correctly |
|
||||
| **Query Complexity** | ❌ Requires UNION or multiple queries | ✅ Simple single query |
|
||||
| **Sequential Calculations** | ❌ Complex, error-prone | ✅ Simple, reliable |
|
||||
| **Schema Maintenance** | ❌ 4x duplication | ✅ Single source |
|
||||
| **Auto-creation** | ❌ Manual or missing | ✅ Automatic via trigger |
|
||||
| **Adding New Phases** | ❌ Requires new table | ✅ Just add enum value |
|
||||
| **Reporting** | ❌ Complex joins/unions | ✅ Straightforward |
|
||||
| **Data Integrity** | ❌ Can have inconsistencies | ✅ Easier to enforce |
|
||||
|
||||
## Recommendation
|
||||
|
||||
**Use Option 1 (Unified Table)** because:
|
||||
1. ✅ Fixes the repetition constraint issue
|
||||
2. ✅ Simplifies the data model
|
||||
3. ✅ Makes sequential phase calculations straightforward
|
||||
4. ✅ Automatically creates phases when repetitions are created
|
||||
5. ✅ Better for future scalability
|
||||
6. ✅ Easier to maintain and query
|
||||
|
||||
You can still create database **views** for phase-specific access if needed:
|
||||
```sql
|
||||
CREATE VIEW soaking_executions AS
|
||||
SELECT * FROM experiment_phase_executions WHERE phase_type = 'soaking';
|
||||
|
||||
CREATE VIEW airdrying_executions AS
|
||||
SELECT * FROM experiment_phase_executions WHERE phase_type = 'airdrying';
|
||||
-- etc.
|
||||
```
|
||||
|
||||
This gives you the benefits of a unified table while maintaining phase-specific interfaces for your application code.
|
||||
|
||||
## Migration Path
|
||||
|
||||
1. Create new `experiment_phase_executions` table
|
||||
2. Migrate existing data from separate tables
|
||||
3. Create views for backward compatibility (if needed)
|
||||
4. Update application code to use new table
|
||||
5. Drop old tables after verification
|
||||
|
||||
@@ -0,0 +1,300 @@
|
||||
-- Unified Phase Executions Table
|
||||
-- This migration replaces the separate phase tables (soaking, airdrying, cracking, shelling)
|
||||
-- with a unified table that properly supports repetitions
|
||||
|
||||
-- =============================================
|
||||
-- 1. CREATE UNIFIED PHASE EXECUTIONS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_phase_executions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
phase_type TEXT NOT NULL CHECK (phase_type IN ('soaking', 'airdrying', 'cracking', 'shelling')),
|
||||
|
||||
-- Scheduling fields (common to all phases)
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Phase-specific parameters (nullable, only relevant for specific phases)
|
||||
-- Soaking
|
||||
soaking_duration_minutes INTEGER CHECK (soaking_duration_minutes > 0),
|
||||
|
||||
-- Airdrying
|
||||
duration_minutes INTEGER CHECK (duration_minutes > 0),
|
||||
|
||||
-- Cracking
|
||||
machine_type_id UUID REFERENCES public.machine_types(id),
|
||||
|
||||
-- Status tracking
|
||||
status TEXT NOT NULL DEFAULT 'pending'
|
||||
CHECK (status IN ('pending', 'scheduled', 'in_progress', 'completed', 'cancelled')),
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure one execution per phase type per repetition
|
||||
CONSTRAINT unique_phase_per_repetition UNIQUE (repetition_id, phase_type)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_repetition_id
|
||||
ON public.experiment_phase_executions(repetition_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_phase_type
|
||||
ON public.experiment_phase_executions(phase_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_status
|
||||
ON public.experiment_phase_executions(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_scheduled_start_time
|
||||
ON public.experiment_phase_executions(scheduled_start_time);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_machine_type_id
|
||||
ON public.experiment_phase_executions(machine_type_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_created_by
|
||||
ON public.experiment_phase_executions(created_by);
|
||||
|
||||
-- =============================================
|
||||
-- 3. FUNCTION: Calculate Sequential Phase Start Times
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION calculate_sequential_phase_start_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
prev_phase_end_time TIMESTAMP WITH TIME ZONE;
|
||||
phase_order TEXT[] := ARRAY['soaking', 'airdrying', 'cracking', 'shelling'];
|
||||
current_phase_index INT;
|
||||
prev_phase_name TEXT;
|
||||
BEGIN
|
||||
-- Find current phase index in the sequence
|
||||
SELECT array_position(phase_order, NEW.phase_type) INTO current_phase_index;
|
||||
|
||||
-- If not the first phase, get previous phase's end time from the same repetition
|
||||
IF current_phase_index > 1 THEN
|
||||
prev_phase_name := phase_order[current_phase_index - 1];
|
||||
|
||||
SELECT scheduled_end_time INTO prev_phase_end_time
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE repetition_id = NEW.repetition_id
|
||||
AND phase_type = prev_phase_name
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1;
|
||||
|
||||
-- If previous phase exists and has an end time, use it as start time
|
||||
IF prev_phase_end_time IS NOT NULL THEN
|
||||
NEW.scheduled_start_time := prev_phase_end_time;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Calculate end time based on duration (for phases with duration)
|
||||
IF NEW.phase_type = 'soaking' AND NEW.soaking_duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time +
|
||||
(NEW.soaking_duration_minutes || ' minutes')::INTERVAL;
|
||||
ELSIF NEW.phase_type = 'airdrying' AND NEW.duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time +
|
||||
(NEW.duration_minutes || ' minutes')::INTERVAL;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 4. FUNCTION: Auto-create Phase Executions for New Repetition
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION create_phase_executions_for_repetition()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
exp_phase_config RECORD;
|
||||
phase_type_list TEXT[] := ARRAY[]::TEXT[];
|
||||
phase_name TEXT;
|
||||
BEGIN
|
||||
-- Get experiment phase configuration
|
||||
-- Note: Phase durations may need to be set later when scheduling
|
||||
SELECT
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
ep.cracking_machine_type_id
|
||||
INTO exp_phase_config
|
||||
FROM public.experiments e
|
||||
JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
WHERE e.id = NEW.experiment_id;
|
||||
|
||||
-- Build list of phases to create based on experiment configuration
|
||||
IF exp_phase_config.has_soaking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'soaking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_airdrying THEN
|
||||
phase_type_list := array_append(phase_type_list, 'airdrying');
|
||||
END IF;
|
||||
IF exp_phase_config.has_cracking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'cracking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_shelling THEN
|
||||
phase_type_list := array_append(phase_type_list, 'shelling');
|
||||
END IF;
|
||||
|
||||
-- Create phase executions for each required phase
|
||||
FOREACH phase_name IN ARRAY phase_type_list
|
||||
LOOP
|
||||
INSERT INTO public.experiment_phase_executions (
|
||||
repetition_id,
|
||||
phase_type,
|
||||
scheduled_start_time,
|
||||
status,
|
||||
created_by,
|
||||
-- Phase-specific parameters
|
||||
soaking_duration_minutes,
|
||||
duration_minutes,
|
||||
machine_type_id
|
||||
)
|
||||
VALUES (
|
||||
NEW.id,
|
||||
phase_name,
|
||||
NOW(), -- Default start time, will be updated when scheduled or by sequential calculation
|
||||
'pending',
|
||||
NEW.created_by,
|
||||
-- Set phase-specific parameters
|
||||
-- Note: Durations will be set when the repetition is scheduled
|
||||
-- These can be NULL initially and updated later
|
||||
NULL, -- soaking_duration_minutes (set when scheduled)
|
||||
NULL, -- duration_minutes (set when scheduled)
|
||||
CASE WHEN phase_name = 'cracking'
|
||||
THEN exp_phase_config.cracking_machine_type_id
|
||||
ELSE NULL END
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 5. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Trigger to automatically calculate sequential times and durations
|
||||
CREATE TRIGGER trigger_calculate_sequential_phase_times
|
||||
BEFORE INSERT OR UPDATE ON public.experiment_phase_executions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION calculate_sequential_phase_start_time();
|
||||
|
||||
-- Trigger to auto-create phases when repetition is created
|
||||
CREATE TRIGGER trigger_create_phase_executions
|
||||
AFTER INSERT ON public.experiment_repetitions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION create_phase_executions_for_repetition();
|
||||
|
||||
-- Trigger for updated_at
|
||||
CREATE TRIGGER set_updated_at_phase_executions
|
||||
BEFORE UPDATE ON public.experiment_phase_executions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE VIEWS FOR PHASE-SPECIFIC ACCESS (Backward Compatibility)
|
||||
-- =============================================
|
||||
|
||||
-- These views allow existing code to work with phase-specific "tables"
|
||||
CREATE OR REPLACE VIEW public.soaking_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
soaking_duration_minutes,
|
||||
scheduled_end_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'soaking';
|
||||
|
||||
CREATE OR REPLACE VIEW public.airdrying_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
duration_minutes,
|
||||
scheduled_end_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'airdrying';
|
||||
|
||||
CREATE OR REPLACE VIEW public.cracking_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
machine_type_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'cracking';
|
||||
|
||||
CREATE OR REPLACE VIEW public.shelling_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'shelling';
|
||||
|
||||
-- =============================================
|
||||
-- 7. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.experiment_phase_executions TO authenticated;
|
||||
GRANT SELECT ON public.soaking_view TO authenticated;
|
||||
GRANT SELECT ON public.airdrying_view TO authenticated;
|
||||
GRANT SELECT ON public.cracking_view TO authenticated;
|
||||
GRANT SELECT ON public.shelling_view TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 8. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_phase_executions ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 9. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Phase executions are viewable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are insertable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are updatable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are deletable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
@@ -40,6 +40,12 @@ export default defineConfig({
|
||||
changeOrigin: true,
|
||||
rewrite: (path) => path.replace(/^\/api/, ''),
|
||||
},
|
||||
'/media-api': {
|
||||
// Proxy media-api requests to the media-api container
|
||||
target: 'http://host.docker.internal:8090',
|
||||
changeOrigin: true,
|
||||
rewrite: (path) => path.replace(/^\/media-api/, ''),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -20,10 +20,10 @@ transcoding_semaphore = threading.Semaphore(MAX_CONCURRENT_TRANSCODING)
|
||||
|
||||
app = FastAPI(title="Media API", version="0.1.0")
|
||||
|
||||
# CORS for dashboard at exp-dash:8080 (and localhost for convenience)
|
||||
# CORS for dashboard - allow all origins to support access from different IPs/hostnames
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://exp-dash:8080", "http://localhost:8080"],
|
||||
allow_origins=["*"], # Allow all origins for flexibility
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"]
|
||||
@@ -143,15 +143,25 @@ def get_video_mime_type(path: pathlib.Path) -> str:
|
||||
return mime_types.get(ext, "video/mp4")
|
||||
|
||||
|
||||
def open_file_range(path: pathlib.Path, start: int, end: Optional[int]):
|
||||
def generate_file_range(path: pathlib.Path, start: int, end: Optional[int], chunk_size: int = 8192):
|
||||
"""
|
||||
Generator that yields file chunks in a memory-efficient way.
|
||||
This prevents MemoryError when streaming large video files.
|
||||
"""
|
||||
file_size = path.stat().st_size
|
||||
if end is None or end >= file_size:
|
||||
end = file_size - 1
|
||||
length = end - start + 1
|
||||
|
||||
remaining = end - start + 1
|
||||
with open(path, 'rb') as f:
|
||||
f.seek(start)
|
||||
chunk = f.read(length)
|
||||
return chunk, file_size, start, end
|
||||
while remaining > 0:
|
||||
read_size = min(chunk_size, remaining)
|
||||
chunk = f.read(read_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
remaining -= len(chunk)
|
||||
|
||||
|
||||
@app.head("/videos/{file_id:path}/stream")
|
||||
@@ -212,14 +222,22 @@ def stream_file(request: Request, file_id: str):
|
||||
if start > end:
|
||||
raise HTTPException(status_code=416, detail="Range Not Satisfiable")
|
||||
|
||||
chunk, size, actual_start, actual_end = open_file_range(p, start, end)
|
||||
# Calculate content length
|
||||
content_length = end - start + 1
|
||||
|
||||
# Use streaming response to avoid loading entire chunk into memory
|
||||
# This prevents MemoryError for large video files
|
||||
headers = {
|
||||
**base_headers,
|
||||
"Content-Range": f"bytes {actual_start}-{actual_end}/{size}",
|
||||
"Content-Length": str(len(chunk)),
|
||||
"Content-Range": f"bytes {start}-{end}/{file_size}",
|
||||
"Content-Length": str(content_length),
|
||||
}
|
||||
return Response(content=chunk, status_code=206, headers=headers)
|
||||
return StreamingResponse(
|
||||
generate_file_range(p, start, end),
|
||||
media_type=content_type,
|
||||
headers=headers,
|
||||
status_code=206
|
||||
)
|
||||
|
||||
# Convenience endpoint: pass file_id via query instead of path (accepts raw or URL-encoded)
|
||||
@app.head("/videos/stream")
|
||||
|
||||
@@ -8,6 +8,7 @@ import { CameraCountWidget } from './widgets/CameraCountWidget'
|
||||
import { CameraCard } from './components/CameraCard'
|
||||
import { CameraPreviewModal } from './components/CameraPreviewModal'
|
||||
import { CameraConfigModal } from './components/CameraConfigModal'
|
||||
import { MqttDebugPanel } from './components/MqttDebugPanel'
|
||||
|
||||
// Get WebSocket URL from environment or construct it
|
||||
const getWebSocketUrl = () => {
|
||||
@@ -38,6 +39,7 @@ export default function App() {
|
||||
const [previewCamera, setPreviewCamera] = useState<string | null>(null)
|
||||
const [configModalOpen, setConfigModalOpen] = useState(false)
|
||||
const [selectedCamera, setSelectedCamera] = useState<string | null>(null)
|
||||
const [debugPanelOpen, setDebugPanelOpen] = useState(false)
|
||||
|
||||
// WebSocket connection
|
||||
const { isConnected, subscribe } = useWebSocket(getWebSocketUrl())
|
||||
@@ -513,7 +515,25 @@ export default function App() {
|
||||
setNotification({ type: 'error', message: error })
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* MQTT Debug Panel */}
|
||||
<MqttDebugPanel
|
||||
isOpen={debugPanelOpen}
|
||||
onClose={() => setDebugPanelOpen(false)}
|
||||
/>
|
||||
|
||||
{/* Debug Button - Bottom Right */}
|
||||
<button
|
||||
onClick={() => setDebugPanelOpen(true)}
|
||||
className="fixed bottom-6 right-6 z-40 bg-purple-600 text-white px-4 py-2 rounded-lg shadow-lg hover:bg-purple-700 transition-colors flex items-center gap-2"
|
||||
title="Open MQTT Debug Panel"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
Debug
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
120
vision-system-remote/src/components/MqttDebugPanel.tsx
Normal file
120
vision-system-remote/src/components/MqttDebugPanel.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
import React, { useState } from 'react'
|
||||
import { visionApi } from '../services/api'
|
||||
|
||||
interface MqttDebugPanelProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
}
|
||||
|
||||
export const MqttDebugPanel: React.FC<MqttDebugPanelProps> = ({ isOpen, onClose }) => {
|
||||
const [loading, setLoading] = useState<string | null>(null)
|
||||
const [message, setMessage] = useState<{ type: 'success' | 'error'; text: string } | null>(null)
|
||||
|
||||
const publishMessage = async (topic: string, payload: string) => {
|
||||
const action = `${topic.split('/').pop()} → ${payload}`
|
||||
setLoading(action)
|
||||
setMessage(null)
|
||||
|
||||
try {
|
||||
const result = await visionApi.publishMqttMessage(topic, payload, 0, false)
|
||||
if (result.success) {
|
||||
setMessage({ type: 'success', text: `Published: ${action}` })
|
||||
setTimeout(() => setMessage(null), 3000)
|
||||
} else {
|
||||
setMessage({ type: 'error', text: `Failed: ${result.message}` })
|
||||
}
|
||||
} catch (error: any) {
|
||||
setMessage({ type: 'error', text: `Error: ${error.message || 'Failed to publish message'}` })
|
||||
} finally {
|
||||
setLoading(null)
|
||||
}
|
||||
}
|
||||
|
||||
if (!isOpen) return null
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 bg-black bg-opacity-50 z-50 flex items-center justify-center p-4">
|
||||
<div className="bg-white rounded-lg shadow-xl max-w-2xl w-full max-h-[90vh] overflow-y-auto">
|
||||
{/* Header */}
|
||||
<div className="px-6 py-4 border-b border-gray-200 flex items-center justify-between">
|
||||
<h2 className="text-xl font-semibold text-gray-900">MQTT Debug Panel</h2>
|
||||
<button
|
||||
onClick={onClose}
|
||||
className="text-gray-400 hover:text-gray-600 transition-colors"
|
||||
>
|
||||
<svg className="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="p-6 space-y-6">
|
||||
{/* Message Status */}
|
||||
{message && (
|
||||
<div className={`p-3 rounded-md ${
|
||||
message.type === 'success'
|
||||
? 'bg-green-50 text-green-800 border border-green-200'
|
||||
: 'bg-red-50 text-red-800 border border-red-200'
|
||||
}`}>
|
||||
{message.text}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Vibratory Conveyor Section */}
|
||||
<div className="border border-gray-200 rounded-lg p-4">
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">Vibratory Conveyor</h3>
|
||||
<p className="text-sm text-gray-600 mb-4">Topic: <code className="bg-gray-100 px-2 py-1 rounded">vision/vibratory_conveyor/state</code></p>
|
||||
<div className="flex gap-3">
|
||||
<button
|
||||
onClick={() => publishMessage('vision/vibratory_conveyor/state', 'on')}
|
||||
disabled={loading !== null}
|
||||
className="flex-1 px-4 py-2 bg-green-600 text-white rounded-md hover:bg-green-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
|
||||
>
|
||||
{loading === 'vibratory_conveyor → on' ? 'Publishing...' : 'Turn ON'}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => publishMessage('vision/vibratory_conveyor/state', 'off')}
|
||||
disabled={loading !== null}
|
||||
className="flex-1 px-4 py-2 bg-red-600 text-white rounded-md hover:bg-red-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
|
||||
>
|
||||
{loading === 'vibratory_conveyor → off' ? 'Publishing...' : 'Turn OFF'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Blower Separator Section */}
|
||||
<div className="border border-gray-200 rounded-lg p-4">
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">Blower Separator</h3>
|
||||
<p className="text-sm text-gray-600 mb-4">Topic: <code className="bg-gray-100 px-2 py-1 rounded">vision/blower_separator/state</code></p>
|
||||
<div className="flex gap-3">
|
||||
<button
|
||||
onClick={() => publishMessage('vision/blower_separator/state', 'on')}
|
||||
disabled={loading !== null}
|
||||
className="flex-1 px-4 py-2 bg-green-600 text-white rounded-md hover:bg-green-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
|
||||
>
|
||||
{loading === 'blower_separator → on' ? 'Publishing...' : 'Turn ON'}
|
||||
</button>
|
||||
<button
|
||||
onClick={() => publishMessage('vision/blower_separator/state', 'off')}
|
||||
disabled={loading !== null}
|
||||
className="flex-1 px-4 py-2 bg-red-600 text-white rounded-md hover:bg-red-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
|
||||
>
|
||||
{loading === 'blower_separator → off' ? 'Publishing...' : 'Turn OFF'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Info */}
|
||||
<div className="bg-blue-50 border border-blue-200 rounded-lg p-4">
|
||||
<p className="text-sm text-blue-800">
|
||||
<strong>Note:</strong> These buttons publish MQTT messages to test auto-recording.
|
||||
Check the logs to see if cameras start/stop recording automatically.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -239,6 +239,13 @@ class VisionApiClient {
|
||||
return this.request(`/mqtt/events?limit=${limit}`)
|
||||
}
|
||||
|
||||
async publishMqttMessage(topic: string, payload: string, qos: number = 0, retain: boolean = false): Promise<{ success: boolean; message: string; topic: string; payload: string }> {
|
||||
return this.request('/mqtt/publish', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ topic, payload, qos, retain }),
|
||||
})
|
||||
}
|
||||
|
||||
async startRecording(cameraName: string, filename?: string): Promise<StartRecordingResponse> {
|
||||
return this.request(`/cameras/${cameraName}/start-recording`, {
|
||||
method: 'POST',
|
||||
|
||||
Reference in New Issue
Block a user