diff --git a/.gitignore b/.gitignore index 293689c..86cb65d 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,49 @@ management-dashboard-web-app/users.txt result result-* .direnv/ +# Python +__pycache__/ +*.py[cod] +*.egg-info/ +.venv/ +.uv/ +*.env +.env.*.local +.host-ip +.pytest_cache/ +.mypy_cache/ + +# Node / Vite +node_modules/ +dist/ +.build/ +.vite/ +*.log + +# Editor/OS +.DS_Store +Thumbs.db +.vscode/ +.idea/ + +# API storage & logs +camera-management-api/storage/ +camera-management-api/usda_vision_system.log + +# Docker +*.pid + +camera-management-api/camera_sdk/ +camera-management-api/core +management-dashboard-web-app/users.txt + +# Jupyter Notebooks +*.ipynb +supabase/.temp/cli-latest + +# Archive env backups (may contain secrets) +archive/management-dashboard-web-app/env-backups/ +# Nix +result +result-* +.direnv/ diff --git a/README.md b/README.md index 8ae816f..a7a2b06 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,13 @@ A unified monorepo combining the camera API service and the web dashboard for US - `camera-management-api/` - Python API service for camera management (USDA-Vision-Cameras) - `management-dashboard-web-app/` - React web dashboard for experiment management (pecan_experiments) - `supabase/` - Database configuration, migrations, and seed data (shared infrastructure) +- `media-api/` - Python service for video/thumbnail serving (port 8090) +- `video-remote/` - Frontend for video browsing (port 3001) +- `vision-system-remote/` - Camera/vision UI (port 3002) +- `scheduling-remote/` - Scheduling/availability UI (port 3003) +- `scripts/` - Host IP, RTSP checks, env helpers (see [scripts/README.md](scripts/README.md)) +- `docs/` - Setup, Supabase, RTSP, design docs +- `mediamtx.yml` - RTSP/WebRTC config for MediaMTX streaming ## Quick Start @@ -28,15 +35,15 @@ The wrapper script automatically: For more details, see [Docker Compose Environment Setup](docs/DOCKER_COMPOSE_ENV_SETUP.md). -- Web: +- Web: - API: -- MQTT broker: localhost:1883 +- MQTT is optional; configure in API config if used (see `.env.example`). To stop: `docker compose down` ### Development Mode (Recommended for Development) -For development with live logging, debugging, and hot reloading: +For development, use the same Docker Compose stack as production. The web app runs with the Vite dev server on port 8080 (hot reload); the API runs on port 8000. 1) Copy env template and set values (for web/Supabase): @@ -45,36 +52,25 @@ cp .env.example .env # set VITE_SUPABASE_URL and VITE_SUPABASE_ANON_KEY in .env ``` -2) Start the development environment: +2) Start the stack (with logs in the foreground, or add `-d` for detached): ```bash -./dev-start.sh +./docker-compose.sh up --build ``` -This will: -- Start containers with debug logging enabled -- Enable hot reloading for both API and web services -- Show all logs in real-time -- Keep containers running for debugging - **Development URLs:** -- Web: (with hot reloading) -- API: (with debug logging) +- Web: (Vite dev server with hot reload) +- API: **Development Commands:** -- `./dev-start.sh` - Start development environment -- `./dev-stop.sh` - Stop development environment -- `./dev-logs.sh` - View logs (use `-f` to follow, `-t N` for last N lines) -- `./dev-logs.sh -f api` - Follow API logs only -- `./dev-logs.sh -f web` - Follow web logs only -- `./dev-shell.sh` - Open shell in API container -- `./dev-shell.sh web` - Open shell in web container - -**Debug Features:** -- API runs with `--debug --verbose` flags for maximum logging -- Web runs with Vite dev server for hot reloading -- All containers have `stdin_open: true` and `tty: true` for debugging -- Environment variables set for development mode +- `./docker-compose.sh up --build` - Start stack (omit `-d` to see logs) +- `./docker-compose.sh up --build -d` - Start stack in background +- `docker compose down` - Stop all services +- `docker compose logs -f` - Follow all logs +- `docker compose logs -f api` - Follow API logs only +- `docker compose logs -f web` - Follow web logs only +- `docker compose exec api sh` - Open shell in API container +- `docker compose exec web sh` - Open shell in web container ## Services @@ -84,16 +80,34 @@ This will: - Video recording controls - File management -### Web Dashboard (Port 5173) +### Web Dashboard (Port 8080) - User authentication via Supabase - Experiment definition and management - Camera control interface - Video playback and analysis -### MQTT Broker (Port 1883) +### Media API (Port 8090) -- Local Mosquitto broker for development and integration testing +- Video listing, thumbnails, transcoding + +### Video Remote (Port 3001) + +- Video browser UI + +### Vision System Remote (Port 3002) + +- Camera/vision control UI + +### Scheduling Remote (Port 3003) + +- Scheduling/availability UI + +### MediaMTX (Ports 8554, 8889, 8189) + +- RTSP and WebRTC streaming (config: [mediamtx.yml](mediamtx.yml)) + +Supabase services are currently commented out in `docker-compose.yml` and can be run via Supabase CLI (e.g. from `management-dashboard-web-app`). See [docs](docs/) for setup. ## Git Subtree Workflow @@ -138,7 +152,7 @@ Notes: - Storage (recordings) is mapped to `camera-management-api/storage/` and ignored by git. - Web - - Code lives under `management-dashboard-web-app/` with a Vite dev server on port 5173. + - Code lives under `management-dashboard-web-app/` with a Vite dev server on port 8080 when run via Docker. - Environment: set `VITE_SUPABASE_URL` and `VITE_SUPABASE_ANON_KEY` in `.env` (not committed). - Common scripts: `npm run dev`, `npm run build` (executed inside the container by compose). diff --git a/archive/management-dashboard-web-app/README.md b/archive/management-dashboard-web-app/README.md new file mode 100644 index 0000000..b336d0e --- /dev/null +++ b/archive/management-dashboard-web-app/README.md @@ -0,0 +1,7 @@ +# Archive: management-dashboard-web-app legacy/backup files + +Moved from `management-dashboard-web-app/` so the app directory only contains active code and config. + +- **env-backups/** โ€“ Old `.env.backup` and timestamped backup (Supabase URL/key). Keep out of version control if they contain secrets. +- **experiment-data/** โ€“ CSV run sheets: `phase_2_JC_experimental_run_sheet.csv`, `post_workshop_meyer_experiments.csv`. Source/reference data for experiments. +- **test-api-fix.js** โ€“ One-off test script for camera config API; not part of the app build. diff --git a/archive/management-dashboard-web-app/experiment-data/phase_2_JC_experimental_run_sheet.csv b/archive/management-dashboard-web-app/experiment-data/phase_2_JC_experimental_run_sheet.csv new file mode 100644 index 0000000..d3b34c9 --- /dev/null +++ b/archive/management-dashboard-web-app/experiment-data/phase_2_JC_experimental_run_sheet.csv @@ -0,0 +1,61 @@ +experiment_number,soaking_duration_hr,air_drying_duration_min,plate_contact_frequency_hz,throughput_rate_pecans_sec,crush_amount_in,entry_exit_height_diff_in +0,34,19,53,28,0.05,-0.09 +1,24,27,34,29,0.03,0.01 +12,28,59,37,23,0.06,-0.08 +15,16,60,30,24,0.07,0.02 +4,13,41,41,38,0.05,0.03 +18,18,49,38,35,0.07,-0.08 +11,24,59,42,25,0.07,-0.05 +16,20,59,41,14,0.07,0.04 +4,13,41,41,38,0.05,0.03 +19,11,25,56,34,0.06,-0.09 +15,16,60,30,24,0.07,0.02 +16,20,59,41,14,0.07,0.04 +10,26,60,44,12,0.08,-0.1 +1,24,27,34,29,0.03,0.01 +17,34,60,34,29,0.07,-0.09 +5,30,33,30,36,0.05,-0.04 +2,38,10,60,28,0.06,-0.1 +2,38,10,60,28,0.06,-0.1 +13,21,59,41,21,0.06,-0.09 +1,24,27,34,29,0.03,0.01 +14,22,59,45,17,0.07,-0.08 +6,10,22,37,30,0.06,0.02 +11,24,59,42,25,0.07,-0.05 +19,11,25,56,34,0.06,-0.09 +8,27,12,55,24,0.04,0.04 +18,18,49,38,35,0.07,-0.08 +5,30,33,30,36,0.05,-0.04 +9,32,26,47,26,0.07,0.03 +3,11,36,42,13,0.07,-0.07 +10,26,60,44,12,0.08,-0.1 +8,27,12,55,24,0.04,0.04 +5,30,33,30,36,0.05,-0.04 +8,27,12,55,24,0.04,0.04 +18,18,49,38,35,0.07,-0.08 +3,11,36,42,13,0.07,-0.07 +10,26,60,44,12,0.08,-0.1 +17,34,60,34,29,0.07,-0.09 +13,21,59,41,21,0.06,-0.09 +12,28,59,37,23,0.06,-0.08 +9,32,26,47,26,0.07,0.03 +14,22,59,45,17,0.07,-0.08 +0,34,19,53,28,0.05,-0.09 +7,15,30,35,32,0.05,-0.07 +0,34,19,53,28,0.05,-0.09 +15,16,60,30,24,0.07,0.02 +13,21,59,41,21,0.06,-0.09 +11,24,59,42,25,0.07,-0.05 +7,15,30,35,32,0.05,-0.07 +16,20,59,41,14,0.07,0.04 +3,11,36,42,13,0.07,-0.07 +7,15,30,35,32,0.05,-0.07 +6,10,22,37,30,0.06,0.02 +19,11,25,56,34,0.06,-0.09 +6,10,22,37,30,0.06,0.02 +2,38,10,60,28,0.06,-0.1 +14,22,59,45,17,0.07,-0.08 +4,13,41,41,38,0.05,0.03 +9,32,26,47,26,0.07,0.03 +17,34,60,34,29,0.07,-0.09 +12,28,59,37,23,0.06,-0.08 \ No newline at end of file diff --git a/archive/management-dashboard-web-app/experiment-data/post_workshop_meyer_experiments.csv b/archive/management-dashboard-web-app/experiment-data/post_workshop_meyer_experiments.csv new file mode 100644 index 0000000..897f55a --- /dev/null +++ b/archive/management-dashboard-web-app/experiment-data/post_workshop_meyer_experiments.csv @@ -0,0 +1,41 @@ +phase_name,machine_type,Motor Speed (Hz),soaking_duration_hr,air_drying_duration_min,jig Displacement (in),Spring Stiffness (N/m),reps_required,rep +"Post Workshop Meyer Experiments","Meyer Cracker",33,27,28,-0.307,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",30,37,17,-0.311,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",47,36,50,-0.291,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",42,12,30,-0.314,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",53,34,19,-0.302,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",37,18,40,-0.301,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",40,14,59,-0.286,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",39,18,32,-0.309,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",49,11,31,-0.299,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",47,33,12,-0.295,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",52,23,36,-0.302,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",59,37,35,-0.299,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",41,15,15,-0.312,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",46,24,22,-0.303,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",50,36,15,-0.308,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",36,32,48,-0.306,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",33,28,38,-0.308,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",35,31,51,-0.311,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",55,20,57,-0.304,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",44,10,27,-0.313,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",37,16,43,-0.294,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",56,25,42,-0.31,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",30,13,21,-0.292,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",60,29,46,-0.294,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",41,21,54,-0.306,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",55,29,54,-0.296,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",39,30,48,-0.293,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",34,35,53,-0.285,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",57,32,39,-0.291,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",45,27,38,-0.296,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",52,17,25,-0.297,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",51,13,22,-0.288,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",36,19,11,-0.29,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",44,38,32,-0.315,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",58,26,18,-0.289,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",32,22,52,-0.288,1800,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",43,12,56,-0.287,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",60,16,45,-0.298,2200,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",54,22,25,-0.301,2000,1,1 +"Post Workshop Meyer Experiments","Meyer Cracker",48,24,13,-0.305,2000,1,1 \ No newline at end of file diff --git a/archive/management-dashboard-web-app/test-api-fix.js b/archive/management-dashboard-web-app/test-api-fix.js new file mode 100644 index 0000000..267d472 --- /dev/null +++ b/archive/management-dashboard-web-app/test-api-fix.js @@ -0,0 +1,132 @@ +// Test script to verify the camera configuration API fix +// This simulates the VisionApiClient.getCameraConfig method + +class TestVisionApiClient { + constructor() { + this.baseUrl = 'http://localhost:8000' + } + + async request(endpoint) { + const response = await fetch(`${this.baseUrl}${endpoint}`, { + headers: { + 'Content-Type': 'application/json', + } + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error(`HTTP ${response.status}: ${response.statusText}\n${errorText}`) + } + + return response.json() + } + + // This is our fixed getCameraConfig method + async getCameraConfig(cameraName) { + try { + const config = await this.request(`/cameras/${cameraName}/config`) + + // Ensure auto-recording fields have default values if missing + return { + ...config, + auto_start_recording_enabled: config.auto_start_recording_enabled ?? false, + auto_recording_max_retries: config.auto_recording_max_retries ?? 3, + auto_recording_retry_delay_seconds: config.auto_recording_retry_delay_seconds ?? 5 + } + } catch (error) { + // If the error is related to missing auto-recording fields, try to handle it gracefully + if (error.message?.includes('auto_start_recording_enabled') || + error.message?.includes('auto_recording_max_retries') || + error.message?.includes('auto_recording_retry_delay_seconds')) { + + // Try to get the raw camera data and add default auto-recording fields + try { + const response = await fetch(`${this.baseUrl}/cameras/${cameraName}/config`, { + headers: { + 'Content-Type': 'application/json', + } + }) + + if (!response.ok) { + throw new Error(`HTTP ${response.status}: ${response.statusText}`) + } + + const rawConfig = await response.json() + + // Add missing auto-recording fields with defaults + return { + ...rawConfig, + auto_start_recording_enabled: false, + auto_recording_max_retries: 3, + auto_recording_retry_delay_seconds: 5 + } + } catch (fallbackError) { + throw new Error(`Failed to load camera configuration: ${error.message}`) + } + } + + throw error + } + } + + async getCameras() { + return this.request('/cameras') + } +} + +// Test function +async function testCameraConfigFix() { + console.log('๐Ÿงช Testing Camera Configuration API Fix') + console.log('=' * 50) + + const api = new TestVisionApiClient() + + try { + // First get available cameras + console.log('๐Ÿ“‹ Getting camera list...') + const cameras = await api.getCameras() + const cameraNames = Object.keys(cameras) + + if (cameraNames.length === 0) { + console.log('โŒ No cameras found') + return + } + + console.log(`โœ… Found ${cameraNames.length} cameras: ${cameraNames.join(', ')}`) + + // Test configuration for each camera + for (const cameraName of cameraNames) { + console.log(`\n๐ŸŽฅ Testing configuration for ${cameraName}...`) + + try { + const config = await api.getCameraConfig(cameraName) + + console.log(`โœ… Configuration loaded successfully for ${cameraName}`) + console.log(` - auto_start_recording_enabled: ${config.auto_start_recording_enabled}`) + console.log(` - auto_recording_max_retries: ${config.auto_recording_max_retries}`) + console.log(` - auto_recording_retry_delay_seconds: ${config.auto_recording_retry_delay_seconds}`) + console.log(` - exposure_ms: ${config.exposure_ms}`) + console.log(` - gain: ${config.gain}`) + + } catch (error) { + console.log(`โŒ Configuration failed for ${cameraName}: ${error.message}`) + } + } + + console.log('\n๐ŸŽ‰ Camera configuration API test completed!') + + } catch (error) { + console.log(`โŒ Test failed: ${error.message}`) + } +} + +// Export for use in browser console or Node.js +if (typeof module !== 'undefined' && module.exports) { + module.exports = { TestVisionApiClient, testCameraConfigFix } +} else { + // Browser environment + window.TestVisionApiClient = TestVisionApiClient + window.testCameraConfigFix = testCameraConfigFix +} + +console.log('๐Ÿ“ Test script loaded. Run testCameraConfigFix() to test the fix.') diff --git a/docker-compose.yml b/docker-compose.yml index 5692326..9f37c32 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,10 +2,10 @@ networks: usda-vision-network: driver: bridge -volumes: - supabase-db: - driver: local - supabase-storage: +# volumes: +# supabase-db: +# driver: local +# supabase-storage: services: # ============================================================================ @@ -17,7 +17,7 @@ services: # - Filter by label: docker compose ps --filter "label=com.usda-vision.service=supabase" # - Or use service names: docker compose ps supabase-* # - # NOTE: Currently commented out to test Supabase CLI setup from management-dashboard-web-app + # NOTE: Supabase CLI and docker-compose use root supabase/ # # # Supabase Database # # supabase-db: @@ -166,7 +166,7 @@ services: # supabase-rest: # condition: service_started # environment: - # ANON_KEY: ${ANON_KEY:-[REDACTED]} + # ANON_KEY: ${ANON_KEY:-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0} # SERVICE_KEY: ${SERVICE_KEY:-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU} # POSTGREST_URL: http://supabase-rest:3000 # PGRST_JWT_SECRET: ${JWT_SECRET:-super-secret-jwt-token-with-at-least-32-characters-long} @@ -205,7 +205,7 @@ services: # DEFAULT_PROJECT_NAME: Default Project # SUPABASE_URL: http://supabase-rest:3000 # SUPABASE_PUBLIC_URL: http://localhost:54321 - # SUPABASE_ANON_KEY: ${ANON_KEY:-[REDACTED]} + # SUPABASE_ANON_KEY: ${ANON_KEY:-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0} # SUPABASE_SERVICE_KEY: ${SERVICE_KEY:-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6InNlcnZpY2Vfcm9sZSIsImV4cCI6MTk4MzgxMjk5Nn0.EGIM96RAZx35lJzdJsyH-qQwv8Hdp7fsn3W0YpN81IU} # ports: # - "54323:3000" @@ -400,6 +400,8 @@ services: video-remote: container_name: usda-vision-video-remote image: node:20-alpine + tty: true + stdin_open: true working_dir: /app environment: - CHOKIDAR_USEPOLLING=true @@ -424,6 +426,8 @@ services: vision-system-remote: container_name: usda-vision-vision-system-remote image: node:20-alpine + tty: true + stdin_open: true working_dir: /app environment: - CHOKIDAR_USEPOLLING=true @@ -447,6 +451,8 @@ services: scheduling-remote: container_name: usda-vision-scheduling-remote image: node:20-alpine + tty: true + stdin_open: true working_dir: /app env_file: - ./management-dashboard-web-app/.env @@ -466,6 +472,14 @@ services: - "3003:3003" networks: - usda-vision-network + develop: + watch: + - path: ./scheduling-remote + action: restart + ignore: + - node_modules/ + - dist/ + - .git/ media-api: container_name: usda-vision-media-api diff --git a/docs/DESIGN_RECOMMENDATION_SUMMARY.md b/docs/DESIGN_RECOMMENDATION_SUMMARY.md index d62445d..fc6a843 100644 --- a/docs/DESIGN_RECOMMENDATION_SUMMARY.md +++ b/docs/DESIGN_RECOMMENDATION_SUMMARY.md @@ -7,6 +7,7 @@ Your current design has a **fundamental flaw** that prevents it from working cor ### The Problem The phase tables (`soaking`, `airdrying`, `cracking`, `shelling`) have this constraint: + ```sql CONSTRAINT unique_soaking_per_experiment UNIQUE (experiment_id) ``` @@ -16,7 +17,8 @@ This means you can **only have ONE soaking record per experiment**, even if you ### Why This Happens When you create an experiment with 3 repetitions: -1. โœ… 3 rows are created in `experiment_repetitions` + +1. โœ… 3 rows are created in `experiment_repetitions` 2. โŒ But you can only create 1 row in `soaking` (due to UNIQUE constraint) 3. โŒ The other 2 repetitions cannot have soaking data! @@ -25,6 +27,7 @@ When you create an experiment with 3 repetitions: ### Current Approach: Separate Tables (โŒ Not Recommended) **Problems:** + - โŒ UNIQUE constraint breaks repetitions - โŒ Schema duplication (same structure 4 times) - โŒ Hard to query "all phases for a repetition" @@ -34,6 +37,7 @@ When you create an experiment with 3 repetitions: ### Recommended Approach: Unified Table (โœ… Best Practice) **Benefits:** + - โœ… Properly supports repetitions (one phase per repetition) - โœ… Automatic phase creation via database trigger - โœ… Simple sequential time calculations @@ -45,7 +49,7 @@ When you create an experiment with 3 repetitions: I've created a migration file that implements a **unified `experiment_phase_executions` table**: -### Key Features: +### Key Features 1. **Single Table for All Phases** - Uses `phase_type` enum to distinguish phases @@ -68,7 +72,8 @@ I've created a migration file that implements a **unified `experiment_phase_exec ## Files Created 1. **`docs/database_design_analysis.md`** - Detailed analysis with comparison matrix -2. **`management-dashboard-web-app/supabase/migrations/00012_unified_phase_executions.sql`** - Complete migration implementation +3. **`supabase/migrations/00012_unified_phase_executions.sql`** - Complete migration implementation + ## Migration Path @@ -81,6 +86,7 @@ I've created a migration file that implements a **unified `experiment_phase_exec ## Alternative: Fix Current Design If you prefer to keep separate tables, you MUST: + 1. Remove `UNIQUE (experiment_id)` constraints from all phase tables 2. Keep only `UNIQUE (repetition_id)` constraints 3. Add trigger to auto-create phase entries when repetitions are created @@ -91,4 +97,3 @@ However, this still has the drawbacks of schema duplication and complexity. ## Recommendation **Use the unified table approach** - it's cleaner, more maintainable, and properly supports your repetition model. - diff --git a/docs/SUPABASE_DOCKER_COMPOSE.md b/docs/SUPABASE_DOCKER_COMPOSE.md index c90e099..f9a073a 100644 --- a/docs/SUPABASE_DOCKER_COMPOSE.md +++ b/docs/SUPABASE_DOCKER_COMPOSE.md @@ -5,6 +5,7 @@ The Supabase containers are now integrated into the main `docker-compose.yml` fi ## What Changed All Supabase services are now defined in the root `docker-compose.yml`: + - **supabase-db**: PostgreSQL database (port 54322) - **supabase-rest**: PostgREST API (port 54321) - **supabase-auth**: GoTrue authentication service (port 9999) @@ -48,6 +49,7 @@ VITE_SUPABASE_ANON_KEY= ``` The default anon key for local development is: + ``` [REDACTED] ``` @@ -55,20 +57,22 @@ The default anon key for local development is: ### Migrations Migrations are automatically run on first startup via the `supabase-migrate` service. The service: + 1. Waits for the database to be ready 2. Runs all migrations from `supabase/migrations/` in alphabetical order 3. Runs seed files (`seed_01_users.sql` and `seed_02_phase2_experiments.sql`) If you need to re-run migrations, you can: + 1. Stop the containers: `docker compose down` 2. Remove the database volume: `docker volume rm usda-vision_supabase-db` 3. Start again: `docker compose up -d` ### Accessing Services -- **Supabase API**: http://localhost:54321 -- **Supabase Studio**: http://localhost:54323 -- **Email Testing (Inbucket)**: http://localhost:54324 +- **Supabase API**: +- **Supabase Studio**: +- **Email Testing (Inbucket)**: - **Database (direct)**: localhost:54322 ### Network @@ -88,12 +92,14 @@ If you were previously using `supabase start` from the `management-dashboard-web ### Port Conflicts If you get port conflicts, make sure: + - No other Supabase instances are running - The Supabase CLI isn't running containers (`supabase stop` if needed) ### Migration Issues If migrations fail: + 1. Check the logs: `docker compose logs supabase-migrate` 2. Ensure migration files are valid SQL 3. You may need to manually connect to the database and fix issues @@ -101,7 +107,7 @@ If migrations fail: ### Database Connection Issues If services can't connect to the database: + 1. Check database is healthy: `docker compose ps supabase-db` 2. Check logs: `docker compose logs supabase-db` 3. Ensure the database password matches across all services - diff --git a/docs/SUPABASE_MIGRATION.md b/docs/SUPABASE_MIGRATION.md index 974ca29..744eafd 100644 --- a/docs/SUPABASE_MIGRATION.md +++ b/docs/SUPABASE_MIGRATION.md @@ -25,6 +25,7 @@ docker compose up -d ### For Supabase CLI Users **Before** (old way): + ```bash cd management-dashboard-web-app supabase start @@ -32,6 +33,7 @@ supabase db reset ``` **After** (new way): + ```bash # From project root - no need to cd! supabase start @@ -50,23 +52,22 @@ If you have scripts or documentation that reference the old path, update them: - โŒ `management-dashboard-web-app/supabase/config.toml` - โœ… `supabase/config.toml` -## Backward Compatibility -The old directory (`management-dashboard-web-app/supabase/`) can be kept for reference, but it's no longer used by docker-compose or the Supabase CLI. You can safely remove it after verifying everything works: +## Current State -```bash -# After verifying everything works with the new location -rm -rf management-dashboard-web-app/supabase -``` +The old directory (`management-dashboard-web-app/supabase/`) has been removed. All Supabase and DB configuration, migrations, and seeds now live only under the project root `supabase/` directory. Docker Compose and the Supabase CLI use root `supabase/` exclusively. ## Verification -To verify the migration worked: +To verify the migration: + +1. **Check docker-compose paths** (only root supabase should be referenced): -1. **Check docker-compose paths**: ```bash - grep -r "supabase" docker-compose.yml - # Should show: ./supabase/ (not ./management-dashboard-web-app/supabase/) + grep "supabase" docker-compose.yml + # Should show only ./supabase/ (no management-dashboard-web-app/supabase/) + + ``` 2. **Test Supabase CLI**: @@ -76,7 +77,8 @@ To verify the migration worked: # Should work without needing to cd into management-dashboard-web-app ``` -3. **Test migrations**: +1. **Test migrations**: + ```bash docker compose up -d docker compose logs supabase-migrate @@ -90,4 +92,3 @@ To verify the migration worked: โœ… Easier to share database across services โœ… Better alignment with monorepo best practices โœ… Infrastructure separated from application code - diff --git a/docs/database_entities.md b/docs/database_entities.md new file mode 100644 index 0000000..911359c --- /dev/null +++ b/docs/database_entities.md @@ -0,0 +1,303 @@ +# Database Entities Documentation + +This document describes the core entities in the USDA Vision database schema, focusing on entity-specific attributes (excluding generic fields like `id`, `created_at`, `updated_at`, `created_by`). + +## Entity Relationships Overview + +``` +Experiment Phase (Template) + โ†“ +Experiment + โ†“ +Experiment Repetition + โ†“ +Experiment Phase Execution (Soaking, Airdrying, Cracking, Shelling) +``` + +--- + +## 1. Experiment Phase + +**Table:** `experiment_phases` + +**Purpose:** Defines a template/blueprint for experiments that specifies which processing phases are included and their configuration. + +### Attributes + +- **name** (TEXT, UNIQUE, NOT NULL) + - Unique name identifying the experiment phase template + - Example: "Phase 2 - Standard Processing" + +- **description** (TEXT, nullable) + - Optional description providing details about the experiment phase + +- **has_soaking** (BOOLEAN, NOT NULL, DEFAULT false) + - Indicates whether soaking phase is included in experiments using this template + +- **has_airdrying** (BOOLEAN, NOT NULL, DEFAULT false) + - Indicates whether airdrying phase is included in experiments using this template + +- **has_cracking** (BOOLEAN, NOT NULL, DEFAULT false) + - Indicates whether cracking phase is included in experiments using this template + +- **has_shelling** (BOOLEAN, NOT NULL, DEFAULT false) + - Indicates whether shelling phase is included in experiments using this template + +- **cracking_machine_type_id** (UUID, nullable) + - References the machine type to be used for cracking (required if `has_cracking` is true) + - Links to `machine_types` table + +### Constraints + +- At least one phase (soaking, airdrying, cracking, or shelling) must be enabled +- If `has_cracking` is true, `cracking_machine_type_id` must be provided + +--- + +## 2. Experiment + +**Table:** `experiments` + +**Purpose:** Defines an experiment blueprint that specifies the parameters and requirements for conducting pecan processing experiments. + +### Attributes + +- **experiment_number** (INTEGER, NOT NULL) + - Unique number identifying the experiment + - Combined with `phase_id` must be unique + +- **reps_required** (INTEGER, NOT NULL, CHECK > 0) + - Number of repetitions required for this experiment + - Must be greater than zero + +- **weight_per_repetition_lbs** (DOUBLE PRECISION, NOT NULL, DEFAULT 5.0, CHECK > 0) + - Weight in pounds required for each repetition of the experiment + +- **results_status** (TEXT, NOT NULL, DEFAULT 'valid', CHECK IN ('valid', 'invalid')) + - Status indicating whether the experiment results are considered valid or invalid + +- **completion_status** (BOOLEAN, NOT NULL, DEFAULT false) + - Indicates whether the experiment has been completed + +- **phase_id** (UUID, NOT NULL) + - References the experiment phase template this experiment belongs to + - Links to `experiment_phases` table + +### Constraints + +- Combination of `experiment_number` and `phase_id` must be unique + +--- + +## 3. Experiment Repetition + +**Table:** `experiment_repetitions` + +**Purpose:** Represents a single execution instance of an experiment that can be scheduled and tracked. + +### Attributes + +- **experiment_id** (UUID, NOT NULL) + - References the parent experiment blueprint + - Links to `experiments` table + +- **repetition_number** (INTEGER, NOT NULL, CHECK > 0) + - Sequential number identifying this repetition within the experiment + - Must be unique per experiment + +- **scheduled_date** (TIMESTAMP WITH TIME ZONE, nullable) + - Date and time when the repetition is scheduled to be executed + +- **status** (TEXT, NOT NULL, DEFAULT 'pending', CHECK IN ('pending', 'in_progress', 'completed', 'cancelled')) + - Current status of the repetition execution + - Values: `pending`, `in_progress`, `completed`, `cancelled` + +### Constraints + +- Combination of `experiment_id` and `repetition_number` must be unique + +--- + +## 4. Experiment Phase Executions + +**Table:** `experiment_phase_executions` + +**Purpose:** Unified table storing execution data for all phase types (soaking, airdrying, cracking, shelling) associated with experiment repetitions. + +### Common Attributes (All Phase Types) + +- **repetition_id** (UUID, NOT NULL) + - References the experiment repetition this phase execution belongs to + - Links to `experiment_repetitions` table + +- **phase_type** (TEXT, NOT NULL, CHECK IN ('soaking', 'airdrying', 'cracking', 'shelling')) + - Type of phase being executed + - Must be one of: `soaking`, `airdrying`, `cracking`, `shelling` + +- **scheduled_start_time** (TIMESTAMP WITH TIME ZONE, NOT NULL) + - Planned start time for the phase execution + +- **scheduled_end_time** (TIMESTAMP WITH TIME ZONE, nullable) + - Planned end time for the phase execution + - Automatically calculated for soaking and airdrying based on duration + +- **actual_start_time** (TIMESTAMP WITH TIME ZONE, nullable) + - Actual time when the phase execution started + +- **actual_end_time** (TIMESTAMP WITH TIME ZONE, nullable) + - Actual time when the phase execution ended + +- **status** (TEXT, NOT NULL, DEFAULT 'pending', CHECK IN ('pending', 'scheduled', 'in_progress', 'completed', 'cancelled')) + - Current status of the phase execution + +### Phase-Specific Concepts: Independent & Dependent Variables + +> **Note:** This section describes the conceptual variables for each phase (what we measure or control), not necessarily the current physical columns in the database. Some of these variables will be added to the schema in future migrations. + +#### Soaking Phase + +- **Independent variables (IV)** + - **Pre-soaking shell moisture percentage** + - Moisture percentage of the shell **before soaking**. + - **Pre-soaking kernel moisture percentage** + - Moisture percentage of the kernel **before soaking**. + - **Average pecan diameter (inches)** + - Average diameter of pecans in the batch, measured in inches. + - **Batch weight** + - Total weight of the batch being soaked. + - **Soaking duration (minutes)** + - Duration of the soaking process in minutes (currently represented as `soaking_duration_minutes`). + +- **Dependent variables (DV)** + - **Post-soaking shell moisture percentage** + - Moisture percentage of the shell **after soaking**. + - **Post-soaking kernel moisture percentage** + - Moisture percentage of the kernel **after soaking**. + +#### Airdrying Phase + +- **Independent variables (IV)** + - **Airdrying duration (minutes)** + - Duration of the airdrying process in minutes (currently represented as `duration_minutes`). + +- **Dependent variables (DV)** + - *(TBD โ€” moisture/other measurements after airdrying can be added here when finalized.)* + +#### Cracking Phase + +- **Independent variables (IV)** + - **Cracking machine type** + - The type of cracking machine used (linked via `machine_type_id` and `experiment_phases.cracking_machine_type_id`). + +- **Dependent variables (DV)** + - *None defined yet for cracking.* + Business/analysis metrics for cracking can be added later (e.g., crack quality, breakage rates). + +#### Shelling Phase + +- **Independent variables (IV)** + - **Shelling machine configuration parameters** (not yet present in the DB schema) + - **Ring gap (inches)** + - Radial gap setting of the shelling ring (e.g., `0.34` inches). + - **Paddle RPM** + - Rotational speed of the paddles (integer RPM value). + - **Third machine parameter (TBD)** + - The shelling machine expects a third configuration parameter that will be defined and added to the schema later. + +- **Dependent variables (DV)** + - **Half-yield ratio (percentage)** + - Percentage of the shelled product that is composed of half kernels, relative to total yield. + +### Constraints + +- Combination of `repetition_id` and `phase_type` must be unique (one execution per phase type per repetition) + +### Notes + +- Phase executions are automatically created when an experiment repetition is created, based on the experiment phase template configuration +- Sequential phases (soaking โ†’ airdrying โ†’ cracking โ†’ shelling) automatically calculate their `scheduled_start_time` based on the previous phase's `scheduled_end_time` +- For soaking and airdrying phases, `scheduled_end_time` is automatically calculated from `scheduled_start_time` + duration + +--- + +## 5. Machine Types + +**Table:** `machine_types` + +**Purpose:** Defines the types of machines available for cracking operations. + +### Attributes + +- **name** (TEXT, UNIQUE, NOT NULL) + - Unique name identifying the machine type + - Example: "JC Cracker", "Meyer Cracker" + +- **description** (TEXT, nullable) + - Optional description of the machine type + +### Related Tables + +Machine types are referenced by: +- `experiment_phases.cracking_machine_type_id` - Defines which machine type to use for cracking in an experiment phase template +- `experiment_phase_executions.machine_type_id` - Specifies which machine was used for a specific cracking execution + +--- + +## 6. Cracker Parameters (Machine-Specific) + +### JC Cracker Parameters + +**Table:** `jc_cracker_parameters` + +**Purpose:** Stores parameters specific to JC Cracker machines. + +#### Attributes + +- **plate_contact_frequency_hz** (DOUBLE PRECISION, NOT NULL, CHECK > 0) + - Frequency of plate contact in Hertz + +- **throughput_rate_pecans_sec** (DOUBLE PRECISION, NOT NULL, CHECK > 0) + - Rate of pecan processing in pecans per second + +- **crush_amount_in** (DOUBLE PRECISION, NOT NULL, CHECK >= 0) + - Amount of crushing in inches + +- **entry_exit_height_diff_in** (DOUBLE PRECISION, NOT NULL) + - Difference in height between entry and exit points in inches + +### Meyer Cracker Parameters + +**Table:** `meyer_cracker_parameters` + +**Purpose:** Stores parameters specific to Meyer Cracker machines. + +#### Attributes + +- **motor_speed_hz** (DOUBLE PRECISION, NOT NULL, CHECK > 0) + - Motor speed in Hertz + +- **jig_displacement_inches** (DOUBLE PRECISION, NOT NULL) + - Jig displacement in inches + +- **spring_stiffness_nm** (DOUBLE PRECISION, NOT NULL, CHECK > 0) + - Spring stiffness in Newton-meters + +--- + +## Summary of Entity Relationships + +1. **Experiment Phase** โ†’ Defines which phases are included and machine type for cracking +2. **Experiment** โ†’ Belongs to an Experiment Phase, defines repetition requirements and weight per repetition +3. **Experiment Repetition** โ†’ Instance of an Experiment, can be scheduled and tracked +4. **Experiment Phase Execution** โ†’ Execution record for each phase (soaking, airdrying, cracking, shelling) within a repetition +5. **Machine Types** โ†’ Defines available cracking machines +6. **Cracker Parameters** โ†’ Machine-specific operational parameters (JC or Meyer) + +### Key Relationships + +- One Experiment Phase can have many Experiments +- One Experiment can have many Experiment Repetitions +- One Experiment Repetition can have multiple Phase Executions (one per phase type) +- Phase Executions are automatically created based on the Experiment Phase template configuration +- Cracking Phase Executions reference a Machine Type +- Machine Types can have associated Cracker Parameters (JC or Meyer specific) diff --git a/management-dashboard-web-app/src/components/DashboardLayout.tsx b/management-dashboard-web-app/src/components/DashboardLayout.tsx index 3273303..adac4f1 100755 --- a/management-dashboard-web-app/src/components/DashboardLayout.tsx +++ b/management-dashboard-web-app/src/components/DashboardLayout.tsx @@ -26,9 +26,8 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps const [loading, setLoading] = useState(true) const [error, setError] = useState(null) const [currentView, setCurrentView] = useState('dashboard') - const [isExpanded, setIsExpanded] = useState(true) + const [isExpanded, setIsExpanded] = useState(false) const [isMobileOpen, setIsMobileOpen] = useState(false) - const [isHovered, setIsHovered] = useState(false) // Valid dashboard views const validViews = ['dashboard', 'user-management', 'experiments', 'analytics', 'data-entry', 'vision-system', 'scheduling', 'video-library', 'profile'] @@ -53,6 +52,26 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps } } + // Save sidebar expanded state to localStorage + const saveSidebarState = (expanded: boolean) => { + try { + localStorage.setItem('sidebar-expanded', String(expanded)) + } catch (error) { + console.warn('Failed to save sidebar state to localStorage:', error) + } + } + + // Get saved sidebar state from localStorage + const getSavedSidebarState = (): boolean => { + try { + const saved = localStorage.getItem('sidebar-expanded') + return saved === 'true' + } catch (error) { + console.warn('Failed to get saved sidebar state from localStorage:', error) + return false + } + } + // Check if user has access to a specific view const hasAccessToView = (view: string): boolean => { if (!user) return false @@ -80,6 +99,9 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps useEffect(() => { fetchUserProfile() + // Load saved sidebar state + const savedSidebarState = getSavedSidebarState() + setIsExpanded(savedSidebarState) }, []) // Restore saved view when user is loaded @@ -144,7 +166,9 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps } const toggleSidebar = () => { - setIsExpanded(!isExpanded) + const newState = !isExpanded + setIsExpanded(newState) + saveSidebarState(newState) } const toggleMobileSidebar = () => { @@ -225,7 +249,7 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps ) case 'scheduling': return ( - + Loading scheduling module...}> @@ -300,8 +324,6 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps onViewChange={handleViewChange} isExpanded={isExpanded} isMobileOpen={isMobileOpen} - isHovered={isHovered} - setIsHovered={setIsHovered} /> {/* Backdrop for mobile */} {isMobileOpen && ( @@ -312,7 +334,7 @@ export function DashboardLayout({ onLogout, currentRoute }: DashboardLayoutProps )}
void showRetry?: boolean + autoRetry?: boolean + retryDelay?: number + maxRetries?: number } -type State = { hasError: boolean } +type State = { hasError: boolean; retryCount: number } export class ErrorBoundary extends Component { - state: State = { hasError: false } + private retryTimeoutId?: NodeJS.Timeout + + state: State = { hasError: false, retryCount: 0 } static getDerivedStateFromError() { return { hasError: true } } - componentDidCatch() {} + componentDidCatch(error: Error, errorInfo: React.ErrorInfo) { + // Auto-retry logic for module federation loading issues + const maxRetries = this.props.maxRetries || 3 + if (this.props.autoRetry !== false && this.state.retryCount < maxRetries) { + const delay = this.props.retryDelay || 2000 + this.retryTimeoutId = setTimeout(() => { + this.setState(prevState => ({ + hasError: false, + retryCount: prevState.retryCount + 1 + })) + if (this.props.onRetry) { + this.props.onRetry() + } + }, delay) + } + } + + componentDidUpdate(prevProps: Props, prevState: State) { + // Reset retry count if error is cleared and component successfully rendered + if (prevState.hasError && !this.state.hasError && this.state.retryCount > 0) { + // Give it a moment to see if it stays error-free + setTimeout(() => { + if (!this.state.hasError) { + this.setState({ retryCount: 0 }) + } + }, 1000) + } + } + + componentWillUnmount() { + if (this.retryTimeoutId) { + clearTimeout(this.retryTimeoutId) + } + } handleRetry = () => { - this.setState({ hasError: false }) + if (this.retryTimeoutId) { + clearTimeout(this.retryTimeoutId) + } + this.setState({ hasError: false, retryCount: 0 }) if (this.props.onRetry) { this.props.onRetry() } @@ -43,6 +84,11 @@ export class ErrorBoundary extends Component {

Something went wrong loading this section

An error occurred while loading this component. Please try reloading it.

+ {this.props.autoRetry !== false && this.state.retryCount < (this.props.maxRetries || 3) && ( +

+ Retrying automatically... (Attempt {this.state.retryCount + 1} of {(this.props.maxRetries || 3) + 1}) +

+ )}
{(this.props.showRetry !== false) && (
diff --git a/management-dashboard-web-app/src/components/ExperimentForm.tsx b/management-dashboard-web-app/src/components/ExperimentForm.tsx index 9e32b60..8329411 100755 --- a/management-dashboard-web-app/src/components/ExperimentForm.tsx +++ b/management-dashboard-web-app/src/components/ExperimentForm.tsx @@ -3,7 +3,7 @@ import type { CreateExperimentRequest, UpdateExperimentRequest, ScheduleStatus, import { experimentPhaseManagement, machineTypeManagement } from '../lib/supabase' interface ExperimentFormProps { - initialData?: Partial + initialData?: Partial & { phase_id?: string | null } onSubmit: (data: CreateExperimentRequest | UpdateExperimentRequest) => Promise onCancel: () => void isEditing?: boolean @@ -12,31 +12,41 @@ interface ExperimentFormProps { } export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = false, loading = false, phaseId }: ExperimentFormProps) { - const [formData, setFormData] = useState({ - experiment_number: initialData?.experiment_number || 0, - reps_required: initialData?.reps_required || 1, - weight_per_repetition_lbs: (initialData as any)?.weight_per_repetition_lbs || 1, - soaking_duration_hr: initialData?.soaking_duration_hr || 0, - air_drying_time_min: initialData?.air_drying_time_min || 0, - plate_contact_frequency_hz: initialData?.plate_contact_frequency_hz || 1, - throughput_rate_pecans_sec: initialData?.throughput_rate_pecans_sec || 1, - crush_amount_in: initialData?.crush_amount_in || 0, - entry_exit_height_diff_in: initialData?.entry_exit_height_diff_in || 0, - // Meyer-specific (UI only) - motor_speed_hz: (initialData as any)?.motor_speed_hz || 1, - jig_displacement_inches: (initialData as any)?.jig_displacement_inches || 0, - spring_stiffness_nm: (initialData as any)?.spring_stiffness_nm || 1, - schedule_status: initialData?.schedule_status || 'pending schedule', - results_status: initialData?.results_status || 'valid', - completion_status: initialData?.completion_status || false, - phase_id: initialData?.phase_id || phaseId + const getInitialFormState = (d: any) => ({ + experiment_number: d?.experiment_number ?? 0, + reps_required: d?.reps_required ?? 1, + weight_per_repetition_lbs: d?.weight_per_repetition_lbs ?? 1, + soaking_duration_hr: d?.soaking?.soaking_duration_hr ?? d?.soaking_duration_hr ?? 0, + air_drying_time_min: d?.airdrying?.duration_minutes ?? d?.air_drying_time_min ?? 0, + plate_contact_frequency_hz: d?.cracking?.plate_contact_frequency_hz ?? d?.plate_contact_frequency_hz ?? 1, + throughput_rate_pecans_sec: d?.cracking?.throughput_rate_pecans_sec ?? d?.throughput_rate_pecans_sec ?? 1, + crush_amount_in: d?.cracking?.crush_amount_in ?? d?.crush_amount_in ?? 0, + entry_exit_height_diff_in: d?.cracking?.entry_exit_height_diff_in ?? d?.entry_exit_height_diff_in ?? 0, + motor_speed_hz: d?.cracking?.motor_speed_hz ?? d?.motor_speed_hz ?? 1, + jig_displacement_inches: d?.cracking?.jig_displacement_inches ?? d?.jig_displacement_inches ?? 0, + spring_stiffness_nm: d?.cracking?.spring_stiffness_nm ?? d?.spring_stiffness_nm ?? 1, + schedule_status: d?.schedule_status ?? 'pending schedule', + results_status: d?.results_status ?? 'valid', + completion_status: d?.completion_status ?? false, + phase_id: d?.phase_id ?? phaseId, + ring_gap_inches: d?.shelling?.ring_gap_inches ?? d?.ring_gap_inches ?? null, + drum_rpm: d?.shelling?.drum_rpm ?? d?.drum_rpm ?? null }) + const [formData, setFormData] = useState(() => getInitialFormState(initialData)) + const [errors, setErrors] = useState>({}) const [phase, setPhase] = useState(null) const [crackingMachine, setCrackingMachine] = useState(null) const [metaLoading, setMetaLoading] = useState(false) + // When initialData loads with phase config (edit mode), sync form state + useEffect(() => { + if ((initialData as any)?.id) { + setFormData(prev => ({ ...prev, ...getInitialFormState(initialData) })) + } + }, [initialData]) + useEffect(() => { const loadMeta = async () => { if (!phaseId) return @@ -76,11 +86,11 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa } - if (formData.soaking_duration_hr < 0) { + if ((formData.soaking_duration_hr ?? 0) < 0) { newErrors.soaking_duration_hr = 'Soaking duration cannot be negative' } - if (formData.air_drying_time_min < 0) { + if ((formData.air_drying_time_min ?? 0) < 0) { newErrors.air_drying_time_min = 'Air drying time cannot be negative' } @@ -93,7 +103,7 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa if (!formData.throughput_rate_pecans_sec || formData.throughput_rate_pecans_sec <= 0) { newErrors.throughput_rate_pecans_sec = 'Throughput rate must be positive' } - if (formData.crush_amount_in < 0) { + if ((formData.crush_amount_in ?? 0) < 0) { newErrors.crush_amount_in = 'Crush amount cannot be negative' } } @@ -110,6 +120,16 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa } } + // Shelling: if provided, must be positive + if (phase?.has_shelling) { + if (formData.ring_gap_inches != null && (typeof formData.ring_gap_inches !== 'number' || formData.ring_gap_inches <= 0)) { + newErrors.ring_gap_inches = 'Ring gap must be positive' + } + if (formData.drum_rpm != null && (typeof formData.drum_rpm !== 'number' || formData.drum_rpm <= 0)) { + newErrors.drum_rpm = 'Drum RPM must be positive' + } + } + setErrors(newErrors) return Object.keys(newErrors).length === 0 } @@ -122,14 +142,25 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa } try { - // Prepare data for submission + // Prepare data: include all phase params so they are stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling const submitData = isEditing ? formData : { experiment_number: formData.experiment_number, reps_required: formData.reps_required, weight_per_repetition_lbs: formData.weight_per_repetition_lbs, results_status: formData.results_status, completion_status: formData.completion_status, - phase_id: formData.phase_id + phase_id: formData.phase_id, + soaking_duration_hr: formData.soaking_duration_hr, + air_drying_time_min: formData.air_drying_time_min, + plate_contact_frequency_hz: formData.plate_contact_frequency_hz, + throughput_rate_pecans_sec: formData.throughput_rate_pecans_sec, + crush_amount_in: formData.crush_amount_in, + entry_exit_height_diff_in: formData.entry_exit_height_diff_in, + motor_speed_hz: (formData as any).motor_speed_hz, + jig_displacement_inches: (formData as any).jig_displacement_inches, + spring_stiffness_nm: (formData as any).spring_stiffness_nm, + ring_gap_inches: formData.ring_gap_inches ?? undefined, + drum_rpm: formData.drum_rpm ?? undefined } await onSubmit(submitData) @@ -138,7 +169,7 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa } } - const handleInputChange = (field: keyof typeof formData, value: string | number | boolean) => { + const handleInputChange = (field: keyof typeof formData, value: string | number | boolean | null | undefined) => { setFormData(prev => ({ ...prev, [field]: value @@ -441,18 +472,40 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa

Shelling

-
+
+ + handleInputChange('drum_rpm' as any, e.target.value === '' ? null : parseInt(e.target.value, 10))} + className={`max-w-xs px-3 py-2 border rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-blue-500 transition-colors text-sm ${errors.drum_rpm ? 'border-red-300' : 'border-gray-300'}`} + placeholder="e.g. 300" + min="1" step="1" /> + {errors.drum_rpm && ( +

{errors.drum_rpm}

+ )}
diff --git a/management-dashboard-web-app/src/components/ExperimentModal.tsx b/management-dashboard-web-app/src/components/ExperimentModal.tsx index b4d9a82..34797b2 100755 --- a/management-dashboard-web-app/src/components/ExperimentModal.tsx +++ b/management-dashboard-web-app/src/components/ExperimentModal.tsx @@ -1,4 +1,4 @@ -import { useState } from 'react' +import { useState, useEffect } from 'react' import { ExperimentForm } from './ExperimentForm' import { experimentManagement } from '../lib/supabase' import type { Experiment, CreateExperimentRequest, UpdateExperimentRequest } from '../lib/supabase' @@ -13,9 +13,20 @@ interface ExperimentModalProps { export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseId }: ExperimentModalProps) { const [loading, setLoading] = useState(false) const [error, setError] = useState(null) + const [initialData, setInitialData] = useState(experiment ?? undefined) const isEditing = !!experiment + useEffect(() => { + if (experiment) { + experimentManagement.getExperimentWithPhaseConfig(experiment.id) + .then((data) => setInitialData(data ?? experiment)) + .catch(() => setInitialData(experiment)) + } else { + setInitialData(undefined) + } + }, [experiment?.id]) + const handleSubmit = async (data: CreateExperimentRequest | UpdateExperimentRequest) => { setError(null) setLoading(true) @@ -24,22 +35,24 @@ export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseI let savedExperiment: Experiment if (isEditing && experiment) { - // Check if experiment number is unique (excluding current experiment) + // Check if experiment number is unique within this phase (excluding current experiment) if ('experiment_number' in data && data.experiment_number !== undefined && data.experiment_number !== experiment.experiment_number) { - const isUnique = await experimentManagement.isExperimentNumberUnique(data.experiment_number, experiment.id) + const phaseIdToCheck = data.phase_id ?? experiment.phase_id ?? phaseId + const isUnique = await experimentManagement.isExperimentNumberUnique(data.experiment_number, phaseIdToCheck ?? undefined, experiment.id) if (!isUnique) { - setError('Experiment number already exists. Please choose a different number.') + setError('Experiment number already exists in this phase. Please choose a different number.') return } } savedExperiment = await experimentManagement.updateExperiment(experiment.id, data) } else { - // Check if experiment number is unique for new experiments + // Check if experiment number is unique within this phase for new experiments const createData = data as CreateExperimentRequest - const isUnique = await experimentManagement.isExperimentNumberUnique(createData.experiment_number) + const phaseIdToCheck = createData.phase_id ?? phaseId + const isUnique = await experimentManagement.isExperimentNumberUnique(createData.experiment_number, phaseIdToCheck ?? undefined) if (!isUnique) { - setError('Experiment number already exists. Please choose a different number.') + setError('Experiment number already exists in this phase. Please choose a different number.') return } @@ -115,7 +128,7 @@ export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseI {/* Form */}
-

Experiment Phases

-

Select an experiment phase to view and manage its experiments

-

Experiment phases help organize experiments into logical groups for easier navigation and management.

+

Experiment Books

+

Select an experiment book to view and manage its experiments

+

Experiment books help organize experiments into logical groups for easier navigation and management.

{canManagePhases && ( )}
@@ -162,9 +162,9 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) { -

No experiment phases found

+

No experiment books found

- Get started by creating your first experiment phase. + Get started by creating your first experiment book.

{canManagePhases && (
@@ -172,7 +172,7 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) { onClick={() => setShowCreateModal(true)} className="inline-flex items-center px-4 py-2 border border-transparent shadow-sm text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500" > - โž• Create First Phase + โž• Create First Book
)} diff --git a/management-dashboard-web-app/src/components/PhaseExperiments.tsx b/management-dashboard-web-app/src/components/PhaseExperiments.tsx index 624d7a8..c2ae719 100644 --- a/management-dashboard-web-app/src/components/PhaseExperiments.tsx +++ b/management-dashboard-web-app/src/components/PhaseExperiments.tsx @@ -193,7 +193,7 @@ export function PhaseExperiments({ phase, onBack }: PhaseExperimentsProps) { - Back to Phases + Back to Books
@@ -203,7 +203,7 @@ export function PhaseExperiments({ phase, onBack }: PhaseExperimentsProps) { {phase.description && (

{phase.description}

)} -

Manage experiments within this phase

+

Manage experiments within this book

{canManageExperiments && ( )} - {nav.subItems && (isExpanded || isHovered || isMobileOpen) && ( + {nav.subItems && (isExpanded || isMobileOpen) && (
{ subMenuRefs.current[`submenu-${index}`] = el @@ -265,21 +261,17 @@ export function Sidebar({ className={`fixed mt-16 flex flex-col lg:mt-0 top-0 px-5 left-0 bg-white dark:bg-gray-900 dark:border-gray-800 text-gray-900 h-screen transition-all duration-300 ease-in-out z-50 border-r border-gray-200 ${isExpanded || isMobileOpen ? "w-[290px]" - : isHovered - ? "w-[290px]" - : "w-[90px]" + : "w-[90px]" } ${isMobileOpen ? "translate-x-0" : "-translate-x-full"} lg:translate-x-0`} - onMouseEnter={() => !isExpanded && setIsHovered && setIsHovered(true)} - onMouseLeave={() => setIsHovered && setIsHovered(false)} >
- {isExpanded || isHovered || isMobileOpen ? ( + {isExpanded || isMobileOpen ? ( <>

Pecan Experiments

Research Dashboard

@@ -297,12 +289,12 @@ export function Sidebar({

- {isExpanded || isHovered || isMobileOpen ? ( + {isExpanded || isMobileOpen ? ( "Menu" ) : ( diff --git a/management-dashboard-web-app/src/lib/supabase.ts b/management-dashboard-web-app/src/lib/supabase.ts index af5cebc..70a88e2 100755 --- a/management-dashboard-web-app/src/lib/supabase.ts +++ b/management-dashboard-web-app/src/lib/supabase.ts @@ -60,6 +60,8 @@ export interface Experiment { airdrying_id?: string | null cracking_id?: string | null shelling_id?: string | null + ring_gap_inches?: number | null + drum_rpm?: number | null created_at: string updated_at: string created_by: string @@ -170,6 +172,51 @@ export interface UpdateExperimentPhaseRequest { has_shelling?: boolean } +// Experiment-level phase config (one row per experiment per phase; stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling) +export interface ExperimentSoakingConfig { + id: string + experiment_id: string + soaking_duration_hr: number + created_at: string + updated_at: string + created_by: string +} + +export interface ExperimentAirdryingConfig { + id: string + experiment_id: string + duration_minutes: number + created_at: string + updated_at: string + created_by: string +} + +export interface ExperimentCrackingConfig { + id: string + experiment_id: string + machine_type_id: string + plate_contact_frequency_hz?: number | null + throughput_rate_pecans_sec?: number | null + crush_amount_in?: number | null + entry_exit_height_diff_in?: number | null + motor_speed_hz?: number | null + jig_displacement_inches?: number | null + spring_stiffness_nm?: number | null + created_at: string + updated_at: string + created_by: string +} + +export interface ExperimentShellingConfig { + id: string + experiment_id: string + ring_gap_inches?: number | null + drum_rpm?: number | null + created_at: string + updated_at: string + created_by: string +} + export interface CreateExperimentRequest { experiment_number: number reps_required: number @@ -177,6 +224,19 @@ export interface CreateExperimentRequest { results_status?: ResultsStatus completion_status?: boolean phase_id?: string + // Phase config (stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling) + soaking_duration_hr?: number + air_drying_time_min?: number + // Cracking: machine_type comes from book; params below are JC or Meyer specific + plate_contact_frequency_hz?: number + throughput_rate_pecans_sec?: number + crush_amount_in?: number + entry_exit_height_diff_in?: number + motor_speed_hz?: number + jig_displacement_inches?: number + spring_stiffness_nm?: number + ring_gap_inches?: number | null + drum_rpm?: number | null } export interface UpdateExperimentRequest { @@ -186,6 +246,17 @@ export interface UpdateExperimentRequest { results_status?: ResultsStatus completion_status?: boolean phase_id?: string + soaking_duration_hr?: number + air_drying_time_min?: number + plate_contact_frequency_hz?: number + throughput_rate_pecans_sec?: number + crush_amount_in?: number + entry_exit_height_diff_in?: number + motor_speed_hz?: number + jig_displacement_inches?: number + spring_stiffness_nm?: number + ring_gap_inches?: number | null + drum_rpm?: number | null } export interface CreateRepetitionRequest { @@ -614,12 +685,12 @@ export const userManagement = { } } -// Experiment phase management utility functions +// Experiment book management (table: experiment_books) export const experimentPhaseManagement = { - // Get all experiment phases + // Get all experiment books async getAllExperimentPhases(): Promise { const { data, error } = await supabase - .from('experiment_phases') + .from('experiment_books') .select('*') .order('created_at', { ascending: false }) @@ -627,10 +698,10 @@ export const experimentPhaseManagement = { return data }, - // Get experiment phase by ID + // Get experiment book by ID async getExperimentPhaseById(id: string): Promise { const { data, error } = await supabase - .from('experiment_phases') + .from('experiment_books') .select('*') .eq('id', id) .single() @@ -642,13 +713,13 @@ export const experimentPhaseManagement = { return data }, - // Create a new experiment phase + // Create a new experiment book async createExperimentPhase(phaseData: CreateExperimentPhaseRequest): Promise { const { data: { user }, error: authError } = await supabase.auth.getUser() if (authError || !user) throw new Error('User not authenticated') const { data, error } = await supabase - .from('experiment_phases') + .from('experiment_books') .insert({ ...phaseData, created_by: user.id @@ -660,10 +731,10 @@ export const experimentPhaseManagement = { return data }, - // Update an experiment phase + // Update an experiment book async updateExperimentPhase(id: string, updates: UpdateExperimentPhaseRequest): Promise { const { data, error } = await supabase - .from('experiment_phases') + .from('experiment_books') .update(updates) .eq('id', id) .select() @@ -673,10 +744,10 @@ export const experimentPhaseManagement = { return data }, - // Delete an experiment phase + // Delete an experiment book async deleteExperimentPhase(id: string): Promise { const { error } = await supabase - .from('experiment_phases') + .from('experiment_books') .delete() .eq('id', id) @@ -724,33 +795,170 @@ export const experimentManagement = { return data }, - // Create a new experiment + // Get experiment with its phase config (soaking, airdrying, cracking, shelling) for edit form + async getExperimentWithPhaseConfig(id: string): Promise<(Experiment & { + soaking?: ExperimentSoakingConfig | null + airdrying?: ExperimentAirdryingConfig | null + cracking?: ExperimentCrackingConfig | null + shelling?: ExperimentShellingConfig | null + }) | null> { + const experiment = await this.getExperimentById(id) + if (!experiment) return null + + const [soakingRes, airdryingRes, crackingRes, shellingRes] = await Promise.all([ + supabase.from('experiment_soaking').select('*').eq('experiment_id', id).maybeSingle(), + supabase.from('experiment_airdrying').select('*').eq('experiment_id', id).maybeSingle(), + supabase.from('experiment_cracking').select('*').eq('experiment_id', id).maybeSingle(), + supabase.from('experiment_shelling').select('*').eq('experiment_id', id).maybeSingle() + ]) + if (soakingRes.error) throw soakingRes.error + if (airdryingRes.error) throw airdryingRes.error + if (crackingRes.error) throw crackingRes.error + if (shellingRes.error) throw shellingRes.error + + return { + ...experiment, + soaking: soakingRes.data ?? null, + airdrying: airdryingRes.data ?? null, + cracking: crackingRes.data ?? null, + shelling: shellingRes.data ?? null + } + }, + + // Create a new experiment and its phase config rows (experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling) async createExperiment(experimentData: CreateExperimentRequest): Promise { const { data: { user }, error: authError } = await supabase.auth.getUser() if (authError || !user) throw new Error('User not authenticated') - const { data, error } = await supabase + const phaseId = experimentData.phase_id + const corePayload = { + experiment_number: experimentData.experiment_number, + reps_required: experimentData.reps_required, + weight_per_repetition_lbs: experimentData.weight_per_repetition_lbs, + results_status: experimentData.results_status ?? 'valid', + completion_status: experimentData.completion_status ?? false, + phase_id: phaseId, + created_by: user.id + } + // phase_id required for phase configs + if (!phaseId) { + const { data, error } = await supabase.from('experiments').insert(corePayload).select().single() + if (error) throw error + return data + } + + const { data: experiment, error } = await supabase .from('experiments') - .insert({ - ...experimentData, - created_by: user.id - }) + .insert(corePayload) .select() .single() if (error) throw error - return data + + const book = await experimentPhaseManagement.getExperimentPhaseById(phaseId) + if (!book) return experiment + + if (book.has_soaking && experimentData.soaking_duration_hr != null) { + await supabase.from('experiment_soaking').insert({ + experiment_id: experiment.id, + soaking_duration_hr: experimentData.soaking_duration_hr, + created_by: user.id + }) + } + if (book.has_airdrying && experimentData.air_drying_time_min != null) { + await supabase.from('experiment_airdrying').insert({ + experiment_id: experiment.id, + duration_minutes: experimentData.air_drying_time_min, + created_by: user.id + }) + } + if (book.has_cracking && book.cracking_machine_type_id) { + const crackPayload: Record = { + experiment_id: experiment.id, + machine_type_id: book.cracking_machine_type_id, + created_by: user.id + } + if (experimentData.plate_contact_frequency_hz != null) crackPayload.plate_contact_frequency_hz = experimentData.plate_contact_frequency_hz + if (experimentData.throughput_rate_pecans_sec != null) crackPayload.throughput_rate_pecans_sec = experimentData.throughput_rate_pecans_sec + if (experimentData.crush_amount_in != null) crackPayload.crush_amount_in = experimentData.crush_amount_in + if (experimentData.entry_exit_height_diff_in != null) crackPayload.entry_exit_height_diff_in = experimentData.entry_exit_height_diff_in + if (experimentData.motor_speed_hz != null) crackPayload.motor_speed_hz = experimentData.motor_speed_hz + if (experimentData.jig_displacement_inches != null) crackPayload.jig_displacement_inches = experimentData.jig_displacement_inches + if (experimentData.spring_stiffness_nm != null) crackPayload.spring_stiffness_nm = experimentData.spring_stiffness_nm + await supabase.from('experiment_cracking').insert(crackPayload) + } + if (book.has_shelling && (experimentData.ring_gap_inches != null || experimentData.drum_rpm != null)) { + await supabase.from('experiment_shelling').insert({ + experiment_id: experiment.id, + ring_gap_inches: experimentData.ring_gap_inches ?? null, + drum_rpm: experimentData.drum_rpm ?? null, + created_by: user.id + }) + } + + return experiment }, - // Update an experiment + // Update an experiment and upsert its phase config rows async updateExperiment(id: string, updates: UpdateExperimentRequest): Promise { - const { data, error } = await supabase - .from('experiments') - .update(updates) - .eq('id', id) - .select() - .single() + const { data: { user }, error: authError } = await supabase.auth.getUser() + if (authError || !user) throw new Error('User not authenticated') + const coreKeys = ['experiment_number', 'reps_required', 'weight_per_repetition_lbs', 'results_status', 'completion_status', 'phase_id'] as const + const coreUpdates: Partial = {} + for (const k of coreKeys) { + if (updates[k] !== undefined) coreUpdates[k] = updates[k] + } + if (Object.keys(coreUpdates).length > 0) { + const { data, error } = await supabase.from('experiments').update(coreUpdates).eq('id', id).select().single() + if (error) throw error + } + + if (updates.soaking_duration_hr !== undefined) { + const { data: existing } = await supabase.from('experiment_soaking').select('id').eq('experiment_id', id).maybeSingle() + if (existing) { + await supabase.from('experiment_soaking').update({ soaking_duration_hr: updates.soaking_duration_hr, updated_at: new Date().toISOString() }).eq('experiment_id', id) + } else { + await supabase.from('experiment_soaking').insert({ experiment_id: id, soaking_duration_hr: updates.soaking_duration_hr, created_by: user.id }) + } + } + if (updates.air_drying_time_min !== undefined) { + const { data: existing } = await supabase.from('experiment_airdrying').select('id').eq('experiment_id', id).maybeSingle() + if (existing) { + await supabase.from('experiment_airdrying').update({ duration_minutes: updates.air_drying_time_min, updated_at: new Date().toISOString() }).eq('experiment_id', id) + } else { + await supabase.from('experiment_airdrying').insert({ experiment_id: id, duration_minutes: updates.air_drying_time_min, created_by: user.id }) + } + } + const crackKeys = ['plate_contact_frequency_hz', 'throughput_rate_pecans_sec', 'crush_amount_in', 'entry_exit_height_diff_in', 'motor_speed_hz', 'jig_displacement_inches', 'spring_stiffness_nm'] as const + const hasCrackUpdates = crackKeys.some(k => updates[k] !== undefined) + if (hasCrackUpdates) { + const { data: existing } = await supabase.from('experiment_cracking').select('id').eq('experiment_id', id).maybeSingle() + const crackPayload: Record = {} + crackKeys.forEach(k => { if (updates[k] !== undefined) crackPayload[k] = updates[k] }) + if (Object.keys(crackPayload).length > 0) { + if (existing) { + await supabase.from('experiment_cracking').update({ ...crackPayload, updated_at: new Date().toISOString() }).eq('experiment_id', id) + } else { + const exp = await this.getExperimentById(id) + const book = exp?.phase_id ? await experimentPhaseManagement.getExperimentPhaseById(exp.phase_id) : null + if (book?.has_cracking && book.cracking_machine_type_id) { + await supabase.from('experiment_cracking').insert({ experiment_id: id, machine_type_id: book.cracking_machine_type_id, ...crackPayload, created_by: user.id }) + } + } + } + } + if (updates.ring_gap_inches !== undefined || updates.drum_rpm !== undefined) { + const { data: existing } = await supabase.from('experiment_shelling').select('id').eq('experiment_id', id).maybeSingle() + const shellPayload = { ring_gap_inches: updates.ring_gap_inches ?? null, drum_rpm: updates.drum_rpm ?? null } + if (existing) { + await supabase.from('experiment_shelling').update({ ...shellPayload, updated_at: new Date().toISOString() }).eq('experiment_id', id) + } else { + await supabase.from('experiment_shelling').insert({ experiment_id: id, ...shellPayload, created_by: user.id }) + } + } + + const { data, error } = await supabase.from('experiments').select('*').eq('id', id).single() if (error) throw error return data }, @@ -793,13 +1001,16 @@ export const experimentManagement = { - // Check if experiment number is unique - async isExperimentNumberUnique(experimentNumber: number, excludeId?: string): Promise { + // Check if experiment number is unique within the same phase (experiment_number + phase_id must be unique) + async isExperimentNumberUnique(experimentNumber: number, phaseId?: string, excludeId?: string): Promise { let query = supabase .from('experiments') .select('id') .eq('experiment_number', experimentNumber) + if (phaseId) { + query = query.eq('phase_id', phaseId) + } if (excludeId) { query = query.neq('id', excludeId) } diff --git a/scheduling-remote/package.json b/scheduling-remote/package.json index 4d550a8..ba2c731 100644 --- a/scheduling-remote/package.json +++ b/scheduling-remote/package.json @@ -9,7 +9,7 @@ "build:watch": "vite build --watch", "serve:dist": "serve -s dist -l 3003", "preview": "vite preview --port 3003", - "dev:watch": "npm run build && (npm run build:watch &) && sleep 1 && npx http-server dist -p 3003 --cors -c-1" + "dev:watch": "./wait-and-serve.sh" }, "dependencies": { "@supabase/supabase-js": "^2.52.0", diff --git a/scheduling-remote/src/components/HorizontalTimelineCalendar.tsx b/scheduling-remote/src/components/HorizontalTimelineCalendar.tsx index 6f2744f..4aecb30 100644 --- a/scheduling-remote/src/components/HorizontalTimelineCalendar.tsx +++ b/scheduling-remote/src/components/HorizontalTimelineCalendar.tsx @@ -18,7 +18,13 @@ interface PhaseMarker { phase: 'soaking' | 'airdrying' | 'cracking' startTime: Date assignedConductors: string[] // Array of conductor IDs - locked: boolean +} + +interface RepetitionMetadata { + phaseName: string + experimentNumber: number + repetitionNumber: number + experimentId: string } interface HorizontalTimelineCalendarProps { @@ -28,7 +34,9 @@ interface HorizontalTimelineCalendarProps { phaseMarkers: PhaseMarker[] onMarkerDrag: (markerId: string, newTime: Date) => void onMarkerAssignConductors: (markerId: string, conductorIds: string[]) => void - onMarkerLockToggle: (markerId: string) => void + repetitionMetadata?: Record // Map from repetitionId to metadata + onScrollToRepetition?: (repetitionId: string) => void // Callback to scroll to repetition in accordion + onScheduleRepetition?: (repId: string, experimentId: string) => void // Callback to schedule repetition timeStep?: number // Minutes per pixel or time unit minHour?: number maxHour?: number @@ -36,22 +44,36 @@ interface HorizontalTimelineCalendarProps { } // Repetition border component with hover state and drag support -function RepetitionBorder({ - left, - width, +function RepetitionBorder({ + left, + width, top = 0, - isLocked, - allPhases, - times, - assignedCount, + height, + isLocked, + allPhases, + times, + assignedCount, repId, onMouseDown, isDragging = false, - dragOffset = { x: 0 } + dragOffset = { x: 0 }, + extendLeft = false, + extendRight = false, + phaseName, + experimentNumber, + repetitionNumber, + experimentId, + onScrollToRepetition, + onScheduleRepetition, + visibleMarkers, + getTimePosition, + isScheduledInDb = false, + children }: { left: number width: number top?: number + height: number isLocked: boolean allPhases: string times: string @@ -60,9 +82,77 @@ function RepetitionBorder({ onMouseDown?: (e: React.MouseEvent) => void isDragging?: boolean dragOffset?: { x: number } + extendLeft?: boolean + extendRight?: boolean + phaseName?: string + experimentNumber?: number + repetitionNumber?: number + experimentId?: string + onScrollToRepetition?: (repetitionId: string) => void + onScheduleRepetition?: (repId: string, experimentId: string) => void + visibleMarkers: Array<{ id: string; startTime: Date; assignedConductors: string[] }> + getTimePosition: (time: Date) => number + isScheduledInDb?: boolean + children?: React.ReactNode }) { const [isHovered, setIsHovered] = useState(false) + const handleGoToRepetition = (e: React.MouseEvent) => { + e.stopPropagation() + if (onScrollToRepetition) { + onScrollToRepetition(repId) + } + } + + const handleSchedule = (e: React.MouseEvent) => { + e.stopPropagation() + if (onScheduleRepetition && experimentId) { + onScheduleRepetition(repId, experimentId) + } + } + + // Calculate positions for the text container + const firstMarker = visibleMarkers[0] + const secondMarker = visibleMarkers[1] + + let textContainerLeft = 0 + let textContainerWidth = 0 + + if (firstMarker) { + const firstMarkerX = getTimePosition(firstMarker.startTime) + const firstMarkerLeftRelative = firstMarkerX - left + const MARKER_ICON_SIZE = 32 + // Start after the first marker (marker center + half icon size + padding) + textContainerLeft = firstMarkerLeftRelative + (MARKER_ICON_SIZE / 2) + 8 + + if (secondMarker) { + // Span to the second marker + const secondMarkerX = getTimePosition(secondMarker.startTime) + const secondMarkerLeftRelative = secondMarkerX - left + textContainerWidth = secondMarkerLeftRelative - (MARKER_ICON_SIZE / 2) - textContainerLeft - 8 + } else { + // Span to the end of the repetition border + textContainerWidth = width - textContainerLeft - 8 + } + } + + // Check if all markers have at least one conductor assigned + const allMarkersHaveConductors = visibleMarkers.length > 0 && visibleMarkers.every(m => m.assignedConductors.length > 0) + + // Build border style based on extensions + + // Border radius: top-left top-right bottom-right bottom-left + const borderRadius = extendLeft && extendRight + ? '0px' // No radius if extending both sides + : extendLeft + ? '0 8px 8px 0' // No radius on left side (markers extend to past) + : extendRight + ? '8px 0 0 8px' // No radius on right side (markers extend to future) + : '8px' // Full radius (default) + + // Muted styling for repetitions that have been fully scheduled in DB (gray out, but don't collapse) + const isMuted = isScheduledInDb && !isHovered && !isDragging + return (
setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} onMouseDown={onMouseDown} - title={`Repetition ${repId}: ${allPhases} at ${times}${assignedCount > 0 ? ` (${assignedCount} conductors assigned)` : ''}`} - /> + > + {children} + + {/* Text container div spanning from after first marker to next marker */} + {firstMarker && textContainerWidth > 0 && (phaseName || experimentNumber !== undefined || repetitionNumber !== undefined) && ( +
+ {/* Text content (non-clickable) */} +
+ {isScheduledInDb ? ( + <> + {experimentNumber !== undefined &&
{`Exp ${experimentNumber}`}
} + {repetitionNumber !== undefined &&
{`Rep ${repetitionNumber}`}
} + + ) : ( + <> + {phaseName &&
{phaseName}
} + {experimentNumber !== undefined &&
Exp {experimentNumber}
} + {repetitionNumber !== undefined &&
Rep {repetitionNumber}
} + + )} +
+ + {/* Go to repetition button */} + + + {/* Schedule button */} + {onScheduleRepetition && experimentId && ( + + )} +
+ )} +
) } @@ -97,29 +251,31 @@ export function HorizontalTimelineCalendar({ phaseMarkers, onMarkerDrag, onMarkerAssignConductors, - onMarkerLockToggle, - timeStep = 15, // 15 minutes per time slot - minHour = 6, - maxHour = 22, + repetitionMetadata = {}, + onScrollToRepetition, + onScheduleRepetition, + timeStep = 60, // 60 minutes (1 hour) per time slot for 24 divisions + minHour = 0, + maxHour = 24, dayWidth // Width per day in pixels (optional - if not provided, will be calculated) }: HorizontalTimelineCalendarProps) { const CONDUCTOR_NAME_COLUMN_WIDTH = 128 // Width of conductor name column (w-32 = 128px) const MIN_DAY_WIDTH = 150 // Minimum width per day column const DEFAULT_DAY_WIDTH = 200 // Default width per day column - + const [draggingMarker, setDraggingMarker] = useState(null) const [dragOffset, setDragOffset] = useState({ x: 0, y: 0 }) const [hoveredMarker, setHoveredMarker] = useState(null) - const [selectedMarker, setSelectedMarker] = useState(null) - const [assignmentPanelPosition, setAssignmentPanelPosition] = useState<{ x: number; y: number } | null>(null) const [hoveredAvailability, setHoveredAvailability] = useState(null) // Format: "conductorId-availIndex" const [hoveredVerticalLine, setHoveredVerticalLine] = useState(null) // Marker ID + const [verticalLineTooltip, setVerticalLineTooltip] = useState<{ markerId: string; x: number; y: number; time: string } | null>(null) // Tooltip position and data + const [hoveredIntersection, setHoveredIntersection] = useState(null) // Format: "markerId-conductorId" const [draggingRepetition, setDraggingRepetition] = useState(null) // Repetition ID being dragged + const [showIntersections, setShowIntersections] = useState(true) // Control visibility of intersection buttons const [repetitionDragOffset, setRepetitionDragOffset] = useState({ x: 0 }) const [dragPosition, setDragPosition] = useState<{ x: number } | null>(null) // Current drag position const [containerWidth, setContainerWidth] = useState(0) const timelineRef = useRef(null) - const assignmentPanelRef = useRef(null) const scrollableContainersRef = useRef([]) const containerRef = useRef(null) @@ -141,12 +297,10 @@ export function HorizontalTimelineCalendar({ return phaseMarkers.filter(marker => { const markerDate = new Date(marker.startTime) markerDate.setHours(0, 0, 0, 0) // Compare by date only - const start = new Date(startDate) start.setHours(0, 0, 0, 0) const end = new Date(endDate) end.setHours(0, 0, 0, 0) - // Check if marker's date falls within the visible date range return markerDate >= start && markerDate <= end }) @@ -158,17 +312,17 @@ export function HorizontalTimelineCalendar({ if (containerWidth === 0 || days.length === 0) { return DEFAULT_DAY_WIDTH } - + // Available width = container width - conductor name column width const availableWidth = containerWidth - CONDUCTOR_NAME_COLUMN_WIDTH - + // Ensure we have positive available width if (availableWidth <= 0) { return DEFAULT_DAY_WIDTH } - + const calculatedWidth = availableWidth / days.length - + // Use calculated width if it's above minimum, otherwise use minimum return Math.max(calculatedWidth, MIN_DAY_WIDTH) }, [containerWidth, days.length]) @@ -199,12 +353,12 @@ export function HorizontalTimelineCalendar({ setTimeout(tryMeasure, 10) } } - + // Start measurement after a brief delay const timeoutId = setTimeout(tryMeasure, 0) - + window.addEventListener('resize', updateWidth) - + // Use ResizeObserver for more accurate tracking const resizeObserver = new ResizeObserver((entries) => { for (const entry of entries) { @@ -217,7 +371,6 @@ export function HorizontalTimelineCalendar({ } } }) - // Observe after a brief delay to ensure ref is attached const observeTimeout = setTimeout(() => { if (containerRef.current) { @@ -260,71 +413,47 @@ export function HorizontalTimelineCalendar({ } }, [days.length]) // Re-run when days change - // Close assignment panel when clicking outside - useEffect(() => { - const handleClickOutside = (event: MouseEvent) => { - if ( - selectedMarker && - assignmentPanelRef.current && - !assignmentPanelRef.current.contains(event.target as Node) && - !(event.target as HTMLElement).closest('[data-marker-id]') && - !(event.target as HTMLElement).closest('button[title="Assign Conductors"]') - ) { - setSelectedMarker(null) - setAssignmentPanelPosition(null) - } - } - if (selectedMarker && assignmentPanelPosition) { - document.addEventListener('mousedown', handleClickOutside) - return () => { - document.removeEventListener('mousedown', handleClickOutside) - } - } - }, [selectedMarker, assignmentPanelPosition]) - - // Generate time slots for a day + // Generate time slots for a day - 24 hours, one slot per hour const timeSlots = useMemo(() => { const slots: string[] = [] - for (let hour = minHour; hour < maxHour; hour++) { - for (let minute = 0; minute < 60; minute += timeStep) { - slots.push(`${hour.toString().padStart(2, '0')}:${minute.toString().padStart(2, '0')}`) - } + for (let hour = 0; hour < 24; hour++) { + slots.push(`${hour.toString().padStart(2, '0')}:00`) } return slots - }, [minHour, maxHour, timeStep]) + }, []) // Calculate pixel position for a given date/time const getTimePosition = useCallback((date: Date): number => { - const dayIndex = days.findIndex(d => + const dayIndex = days.findIndex(d => d.toDateString() === date.toDateString() ) if (dayIndex === -1) return 0 const dayStart = new Date(date) - dayStart.setHours(minHour, 0, 0, 0) - + dayStart.setHours(0, 0, 0, 0) // Start at midnight + const minutesFromStart = (date.getTime() - dayStart.getTime()) / (1000 * 60) - const slotIndex = minutesFromStart / timeStep // Use fractional for smoother positioning - - const slotWidth = effectiveDayWidth / (timeSlots.length) - + const slotIndex = minutesFromStart / 60 // 60 minutes per hour slot + + const slotWidth = effectiveDayWidth / 24 // 24 hours per day + return dayIndex * effectiveDayWidth + slotIndex * slotWidth }, [days, timeSlots, minHour, timeStep, effectiveDayWidth]) // Convert pixel position to date/time const getTimeFromPosition = useCallback((x: number, dayIndex: number): Date => { const dayStart = new Date(days[dayIndex]) - dayStart.setHours(minHour, 0, 0, 0) - + dayStart.setHours(0, 0, 0, 0) // Start at midnight + const relativeX = x - (dayIndex * effectiveDayWidth) - const slotWidth = effectiveDayWidth / timeSlots.length - const slotIndex = Math.max(0, Math.min(timeSlots.length - 1, Math.floor(relativeX / slotWidth))) - - const minutes = slotIndex * timeStep + const slotWidth = effectiveDayWidth / 24 // 24 hours per day + const slotIndex = Math.max(0, Math.min(23, Math.floor(relativeX / slotWidth))) + + const minutes = slotIndex * 60 // 60 minutes per hour const result = new Date(dayStart) result.setMinutes(result.getMinutes() + minutes) - + return result }, [days, timeSlots, minHour, timeStep, effectiveDayWidth]) @@ -333,60 +462,71 @@ export function HorizontalTimelineCalendar({ e.preventDefault() e.stopPropagation() const marker = phaseMarkers.find(m => m.id === markerId) - if (!marker || marker.locked) return + if (!marker) return // Find all markers in this repetition const markers = phaseMarkers.filter(m => m.repetitionId === marker.repetitionId) - if (markers.some(m => m.locked)) return + + // Check if any marker in this repetition has assigned conductors - if so, disable drag + const hasAssignedConductors = markers.some(m => m.assignedConductors.length > 0) + if (hasAssignedConductors) return if (!timelineRef.current) return - + const scrollLeft = timelineRef.current.scrollLeft const timelineRect = timelineRef.current.getBoundingClientRect() - + // Get the leftmost marker position to use as reference - const leftmostMarker = markers.reduce((prev, curr) => + const leftmostMarker = markers.reduce((prev, curr) => new Date(prev.startTime).getTime() < new Date(curr.startTime).getTime() ? prev : curr ) const leftmostX = getTimePosition(leftmostMarker.startTime) const borderPadding = 20 const borderLeft = leftmostX - borderPadding - + // Calculate offset from mouse to border left edge in timeline coordinates const mouseXInTimeline = e.clientX - timelineRect.left - CONDUCTOR_NAME_COLUMN_WIDTH + scrollLeft const offsetX = mouseXInTimeline - borderLeft - + setDraggingRepetition(marker.repetitionId) setRepetitionDragOffset({ x: offsetX }) setDragPosition({ x: borderLeft }) + // Hide intersections when dragging starts + setShowIntersections(false) }, [phaseMarkers, getTimePosition]) // Handle repetition border drag start const handleRepetitionMouseDown = useCallback((e: React.MouseEvent, repetitionId: string) => { e.preventDefault() e.stopPropagation() - + const markers = phaseMarkers.filter(m => m.repetitionId === repetitionId) - if (markers.length === 0 || markers.some(m => m.locked)) return + if (markers.length === 0) return + + // Check if any marker in this repetition has assigned conductors - if so, disable drag + const hasAssignedConductors = markers.some(m => m.assignedConductors.length > 0) + if (hasAssignedConductors) return if (!timelineRef.current) return - + const borderElement = e.currentTarget as HTMLElement const borderLeft = parseFloat(borderElement.style.left) || 0 const scrollLeft = timelineRef.current.scrollLeft const timelineRect = timelineRef.current.getBoundingClientRect() - + // Calculate offset from mouse to left edge of border in timeline coordinates const mouseXInTimeline = e.clientX - timelineRect.left - CONDUCTOR_NAME_COLUMN_WIDTH + scrollLeft const offsetX = mouseXInTimeline - borderLeft - + setDraggingRepetition(repetitionId) setRepetitionDragOffset({ x: offsetX }) setDragPosition({ x: borderLeft }) + // Hide intersections when dragging starts + setShowIntersections(false) }, [phaseMarkers]) // Handle mouse move during drag - only update visual position, save on mouse up @@ -400,11 +540,11 @@ export function HorizontalTimelineCalendar({ const scrollContainer = timelineRef.current const rect = scrollContainer.getBoundingClientRect() const scrollLeft = scrollContainer.scrollLeft - + // Drag entire repetition horizontally only - only update visual position const mouseXInTimeline = e.clientX - rect.left - CONDUCTOR_NAME_COLUMN_WIDTH + scrollLeft const borderX = mouseXInTimeline - repetitionDragOffset.x - + // Update visual position during drag (don't call onMarkerDrag here to avoid conflicts) setDragPosition({ x: borderX }) } @@ -417,36 +557,40 @@ export function HorizontalTimelineCalendar({ const scrollLeft = scrollContainer.scrollLeft const mouseXInTimeline = e.clientX - rect.left - CONDUCTOR_NAME_COLUMN_WIDTH + scrollLeft const borderX = mouseXInTimeline - repetitionDragOffset.x - + const markers = visibleMarkers.filter(m => m.repetitionId === draggingRepetition) if (markers.length > 0) { - const leftmostMarker = markers.reduce((prev, curr) => + const leftmostMarker = markers.reduce((prev, curr) => new Date(prev.startTime).getTime() < new Date(curr.startTime).getTime() ? prev : curr ) - + const borderPadding = 20 const leftmostMarkerNewX = borderX + borderPadding const dayIndex = Math.max(0, Math.min(days.length - 1, Math.floor(leftmostMarkerNewX / effectiveDayWidth))) const relativeX = leftmostMarkerNewX - (dayIndex * effectiveDayWidth) - const slotWidth = effectiveDayWidth / timeSlots.length - const slotIndex = Math.max(0, Math.min(timeSlots.length - 1, Math.round(relativeX / slotWidth))) - + const slotWidth = effectiveDayWidth / 24 // 24 hours per day + const slotIndex = Math.max(0, Math.min(23, Math.round(relativeX / slotWidth))) + const dayStart = new Date(days[dayIndex]) - dayStart.setHours(minHour, 0, 0, 0) - const minutes = slotIndex * timeStep + dayStart.setHours(0, 0, 0, 0) // Start at midnight + const minutes = slotIndex * 60 // 60 minutes per hour const finalTime = new Date(dayStart) finalTime.setMinutes(finalTime.getMinutes() + minutes) - + // Update only once on mouse up console.log('Updating marker position:', leftmostMarker.id, finalTime) onMarkerDrag(leftmostMarker.id, finalTime) } } - // Clear drag state setDraggingRepetition(null) setRepetitionDragOffset({ x: 0 }) setDragPosition(null) + + // Show intersections again after animation completes (200ms transition duration) + setTimeout(() => { + setShowIntersections(true) + }, 200) } window.addEventListener('mousemove', handleMouseMove) @@ -491,125 +635,23 @@ export function HorizontalTimelineCalendar({ return (
- {/* Vertical lines layer - positioned outside overflow containers */} -
- {(() => { - // Group markers by repetition to calculate row positions - const markersByRepetition: Record = {} - phaseMarkers.forEach(marker => { - if (!markersByRepetition[marker.repetitionId]) { - markersByRepetition[marker.repetitionId] = [] - } - markersByRepetition[marker.repetitionId].push(marker) - }) - - const borderPadding = 20 - const repetitionData = Object.entries(markersByRepetition).map(([repId, markers]) => { - const positions = markers.map(m => { - const dayIndex = days.findIndex(d => - d.toDateString() === new Date(m.startTime).toDateString() - ) - if (dayIndex === -1) return null - return getTimePosition(m.startTime) - }).filter((p): p is number => p !== null) - - if (positions.length === 0) return null - - const leftmost = Math.min(...positions) - const rightmost = Math.max(...positions) - const borderLeft = leftmost - borderPadding - const borderRight = borderLeft + (rightmost - leftmost) + (borderPadding * 2) - - return { repId, markers, left: borderLeft, right: borderRight } - }).filter((d): d is NonNullable => d !== null) - - const ROW_HEIGHT = 40 - const sortedRepetitions = [...repetitionData].sort((a, b) => a.left - b.left) - const repetitionRows: Array> = [] - - sortedRepetitions.forEach(rep => { - let placed = false - for (let rowIndex = 0; rowIndex < repetitionRows.length; rowIndex++) { - const row = repetitionRows[rowIndex] - const hasOverlap = row.some(existingRep => { - const threshold = 1 - return !(rep.right + threshold <= existingRep.left || rep.left - threshold >= existingRep.right) - }) - - if (!hasOverlap) { - row.push(rep) - placed = true - break - } - } - - if (!placed) { - repetitionRows.push([rep]) - } - }) - - const repIdToRowIndex: Record = {} - repetitionRows.forEach((row, rowIndex) => { - row.forEach(rep => { - repIdToRowIndex[rep.repId] = rowIndex - }) - }) - - return visibleMarkers.map((marker) => { - const style = getPhaseStyle(marker.phase) - const dayIndex = days.findIndex(d => - d.toDateString() === new Date(marker.startTime).toDateString() - ) - if (dayIndex === -1) return null - - const absoluteX = getTimePosition(marker.startTime) - const isDragging = draggingRepetition === marker.repetitionId - const isVerticalLineHovered = hoveredVerticalLine === marker.id - const rowIndex = repIdToRowIndex[marker.repetitionId] || 0 - - const HEADER_ROW_HEIGHT = 60 - const CONDUCTOR_ROWS_HEIGHT = conductorAvailabilities.length * 36 - const MARKER_TOP_OFFSET = 10 - const MARKER_ICON_SIZE = 32 - const markerCenterY = MARKER_TOP_OFFSET + (MARKER_ICON_SIZE / 2) - const markerRowTop = HEADER_ROW_HEIGHT + CONDUCTOR_ROWS_HEIGHT + (rowIndex * ROW_HEIGHT) - const markerCenterAbsoluteY = markerRowTop + markerCenterY - const lineTop = HEADER_ROW_HEIGHT - const lineHeight = markerCenterAbsoluteY - HEADER_ROW_HEIGHT - - // Calculate line position - if dragging, adjust based on drag position - let lineX = absoluteX - if (isDragging && dragPosition) { - const repData = repetitionData.find(r => r.repId === marker.repetitionId) - if (repData) { - const offsetFromLeftmost = absoluteX - (repData.left + borderPadding) - lineX = dragPosition.x + borderPadding + offsetFromLeftmost - } - } - - return ( -
setHoveredVerticalLine(marker.id)} - onMouseLeave={() => setHoveredVerticalLine(null)} - title={moment(marker.startTime).format('h:mm A')} - /> - ) - }) - })()} -
+ {/* Tooltip for vertical lines */} + {verticalLineTooltip && ( +
+ {verticalLineTooltip.time} + {/* Tooltip arrow */} +
+
+ )} {/* Row 1: Day Headers */}
@@ -674,7 +716,6 @@ export function HorizontalTimelineCalendar({ }} /> ))} - {/* Render availability lines across all days */} {conductor.availability.map((avail, availIndex) => { // Get absolute positions from start of timeline @@ -704,7 +745,7 @@ export function HorizontalTimelineCalendar({ return (
) })} + + {/* Render intersection indicators for markers that intersect with this conductor's availability */} + {showIntersections && visibleMarkers.map((marker) => { + // Check if marker's time falls within any of this conductor's availability windows + const intersectingAvailability = conductor.availability.find(avail => { + return marker.startTime >= avail.start && marker.startTime <= avail.end + }) + + if (!intersectingAvailability) return null + + // Calculate marker's x position + const markerX = getTimePosition(marker.startTime) + + // Check if marker is within the availability line's bounds + const availStartPos = getTimePosition(intersectingAvailability.start) + const availEndPos = getTimePosition(intersectingAvailability.end) + + if (markerX < availStartPos || markerX > availEndPos) return null + + const intersectionKey = `${marker.id}-${conductor.conductorId}` + const isHovered = hoveredIntersection === intersectionKey + const isAssigned = marker.assignedConductors.includes(conductor.conductorId) + + return ( + + ) + })}
@@ -730,7 +838,7 @@ export function HorizontalTimelineCalendar({ {/* Row 3: Phase Markers - with multiple sub-rows for stacking */}
{/* Fixed spacer to align with conductor names column */} -
{/* Scrollable background time grid */} -
+
{/* Fixed width based only on visible days - never extends */}
{days.map((day, dayIndex) => ( @@ -749,128 +857,383 @@ export function HorizontalTimelineCalendar({ left: `${dayIndex * effectiveDayWidth}px`, width: `${effectiveDayWidth}px`, height: '100%', - backgroundImage: `repeating-linear-gradient(to right, transparent, transparent ${(effectiveDayWidth / timeSlots.length) - 1}px, #e5e7eb ${(effectiveDayWidth / timeSlots.length) - 1}px, #e5e7eb ${effectiveDayWidth / timeSlots.length}px)` + backgroundImage: `repeating-linear-gradient(to right, transparent, transparent ${(effectiveDayWidth / 24) - 1}px, #e5e7eb ${(effectiveDayWidth / 24) - 1}px, #e5e7eb ${effectiveDayWidth / 24}px)` }} /> ))} {/* Group markers by repetition ID and calculate vertical stacking */} {(() => { - // Group only visible markers by repetition ID - const markersByRepetition: Record = {} - visibleMarkers.forEach(marker => { - if (!markersByRepetition[marker.repetitionId]) { - markersByRepetition[marker.repetitionId] = [] + // Group ALL markers (not just visible) by repetition ID to check boundaries + const allMarkersByRepetition: Record = {} + phaseMarkers.forEach(marker => { + if (!allMarkersByRepetition[marker.repetitionId]) { + allMarkersByRepetition[marker.repetitionId] = [] } - markersByRepetition[marker.repetitionId].push(marker) + allMarkersByRepetition[marker.repetitionId].push(marker) + }) + + // Group visible markers by repetition ID for rendering + const visibleMarkersByRepetition: Record = {} + visibleMarkers.forEach(marker => { + if (!visibleMarkersByRepetition[marker.repetitionId]) { + visibleMarkersByRepetition[marker.repetitionId] = [] + } + visibleMarkersByRepetition[marker.repetitionId].push(marker) }) - // Calculate positions for each repetition const borderPadding = 20 - const repetitionData = Object.entries(markersByRepetition).map(([repId, markers]) => { - const positions = markers.map(m => { - const dayIndex = days.findIndex(d => + const MARKER_ICON_SIZE = 32 + const MARKER_TOP_OFFSET = 10 + const MARKER_HEIGHT = MARKER_ICON_SIZE + (MARKER_TOP_OFFSET * 2) // Total height for a marker row + const ROW_HEIGHT = 40 // Minimum row height + + // Calculate positions for each repetition using ALL markers + const repetitionData = Object.entries(visibleMarkersByRepetition).map(([repId, visibleMarkers]) => { + const allMarkers = allMarkersByRepetition[repId] || [] + + // Get positions of visible markers + const visiblePositions = visibleMarkers.map(m => { + const dayIndex = days.findIndex(d => d.toDateString() === new Date(m.startTime).toDateString() ) if (dayIndex === -1) return null return getTimePosition(m.startTime) }).filter((p): p is number => p !== null) - if (positions.length === 0) return null + if (visiblePositions.length === 0) return null - const leftmost = Math.min(...positions) - const rightmost = Math.max(...positions) - const borderLeft = leftmost - borderPadding - const borderWidth = (rightmost - leftmost) + (borderPadding * 2) - const borderRight = borderLeft + borderWidth + // Get positions of ALL markers (including those outside viewport) + const allPositions = allMarkers.map(m => { + // Check if marker is in viewport date range + const markerDate = new Date(m.startTime) + markerDate.setHours(0, 0, 0, 0) + const start = new Date(startDate) + start.setHours(0, 0, 0, 0) + const end = new Date(endDate) + end.setHours(0, 0, 0, 0) + + if (markerDate >= start && markerDate <= end) { + const dayIndex = days.findIndex(d => + d.toDateString() === markerDate.toDateString() + ) + if (dayIndex === -1) return null + return getTimePosition(m.startTime) + } + // Marker is outside viewport - calculate if it's before or after + if (markerDate < start) { + return -Infinity // Before viewport + } + return Infinity // After viewport + }) + + const visibleLeftmost = Math.min(...visiblePositions) + const visibleRightmost = Math.max(...visiblePositions) + + // Check if markers extend beyond viewport + const hasMarkersBefore = allPositions.some(p => p === -Infinity) + const hasMarkersAfter = allPositions.some(p => p === Infinity) + + // Calculate border width + let borderLeft = visibleLeftmost - borderPadding + let borderWidth = (visibleRightmost - visibleLeftmost) + (borderPadding * 2) + + // If markers extend beyond, extend border to viewport edge + const viewportLeft = 0 + const viewportRight = days.length * effectiveDayWidth + + if (hasMarkersBefore) { + borderLeft = viewportLeft + borderWidth = (visibleRightmost - viewportLeft) + borderPadding + } + if (hasMarkersAfter) { + borderWidth = (viewportRight - borderLeft) + borderPadding + } return { repId, - markers, + visibleMarkers, + allMarkers, left: borderLeft, - right: borderRight, width: borderWidth, - leftmostMarkerPos: leftmost, - rightmostMarkerPos: rightmost + right: borderLeft + borderWidth, + extendLeft: hasMarkersBefore, + extendRight: hasMarkersAfter } }).filter((d): d is NonNullable => d !== null) // Calculate vertical stacking positions - // Sort repetitions by left position to process them in order + // Sort by left position to process from left to right const sortedRepetitions = [...repetitionData].sort((a, b) => a.left - b.left) - const ROW_HEIGHT = 40 // Height allocated per row const repetitionRows: Array> = [] sortedRepetitions.forEach(rep => { - // Find the first row where this repetition doesn't overlap let placed = false + // Try to place in existing rows first for (let rowIndex = 0; rowIndex < repetitionRows.length; rowIndex++) { const row = repetitionRows[rowIndex] - // Check if this repetition overlaps with any in this row - // Two repetitions overlap if they share any horizontal space - // They don't overlap if one is completely to the left or right of the other + // Check if this repetition overlaps with ANY repetition in this row const hasOverlap = row.some(existingRep => { - // Add a small threshold to avoid edge cases - const threshold = 1 - return !(rep.right + threshold <= existingRep.left || rep.left - threshold >= existingRep.right) + // Two repetitions overlap if they share any horizontal space + // Overlap occurs when: rep.left < existingRep.right AND rep.right > existingRep.left + return rep.left < existingRep.right && rep.right > existingRep.left }) - + + // If no overlap with any repetition in this row, we can place it here if (!hasOverlap) { row.push(rep) placed = true break } } - - // If no row found, create a new row + + // If couldn't place in any existing row (due to overlaps), create a new row if (!placed) { repetitionRows.push([rep]) } }) - // Render all repetitions with their vertical positions - // Use flexbox to stack rows and share vertical space equally + // Render all repetitions - items stick to top, not distributed return ( -
{repetitionRows.map((row, rowIndex) => (
{row.map(rep => { - const firstMarker = rep.markers[0] - const allPhases = rep.markers.map(m => getPhaseStyle(m.phase).label).join(', ') - const times = rep.markers.map(m => moment(m.startTime).format('h:mm A')).join(', ') - const totalAssigned = new Set(rep.markers.flatMap(m => m.assignedConductors)).size + const firstMarker = rep.visibleMarkers[0] + const allPhases = rep.visibleMarkers.map(m => getPhaseStyle(m.phase).label).join(', ') + const times = rep.visibleMarkers.map(m => moment(m.startTime).format('h:mm A')).join(', ') + const totalAssigned = new Set(rep.visibleMarkers.flatMap(m => m.assignedConductors)).size + + // Check if any marker has assigned conductors - if so, disable drag + const hasAssignedConductors = rep.visibleMarkers.some(m => m.assignedConductors.length > 0) + const isDraggable = !hasAssignedConductors + + // Calculate height based on markers (just enough for markers) + const repHeight = MARKER_HEIGHT + + // Calculate if dragging + const isDragging = draggingRepetition === rep.repId + const currentLeft = isDragging && dragPosition ? dragPosition.x : rep.left + const borderPadding = 20 + + // Render markers inside the repetition border + const markerElements = rep.visibleMarkers.map((marker) => { + const style = getPhaseStyle(marker.phase) + const absoluteX = getTimePosition(marker.startTime) + const isVerticalLineHovered = hoveredVerticalLine === marker.id + + // Check if this repetition has any assigned conductors (affects all markers) + const markerIsDraggable = !hasAssignedConductors + + // Calculate marker position relative to repetition border's left edge + // The repetition border starts at currentLeft, and markers are positioned relative to that + let markerLeftRelative = absoluteX - currentLeft + + // If dragging, maintain relative position to the original border position + if (isDragging && dragPosition) { + // Calculate offset from the original leftmost position + const originalLeftmost = Math.min(...rep.visibleMarkers.map(m => getTimePosition(m.startTime))) + const offsetFromLeftmost = absoluteX - originalLeftmost + markerLeftRelative = offsetFromLeftmost + } + + // Calculate vertical line dimensions + const HEADER_ROW_HEIGHT = 60 + const CONDUCTOR_ROWS_HEIGHT = conductorAvailabilities.length * 36 + const ROW_HEIGHT = 40 + const MARKER_TOP_OFFSET = 10 + const MARKER_ICON_SIZE = 32 + const rowIndex = repetitionRows.findIndex(r => r.includes(rep)) + + const markerCenterY = MARKER_TOP_OFFSET + (MARKER_ICON_SIZE / 2) + const lineTop = -(HEADER_ROW_HEIGHT + CONDUCTOR_ROWS_HEIGHT + (rowIndex * ROW_HEIGHT) + MARKER_TOP_OFFSET) + const lineHeight = HEADER_ROW_HEIGHT + CONDUCTOR_ROWS_HEIGHT + (rowIndex * ROW_HEIGHT) + MARKER_TOP_OFFSET + markerCenterY + + const formattedTime = moment(marker.startTime).format('h:mm A') + const formattedDate = moment(marker.startTime).format('MMM D, YYYY') + const fullTimeString = `${formattedDate} at ${formattedTime}` + + return ( +
handleMarkerMouseDown(e, marker.id) : undefined} + onMouseEnter={() => { + setHoveredMarker(marker.id) + setHoveredVerticalLine(marker.id) + }} + onMouseLeave={() => { + setHoveredMarker(null) + setHoveredVerticalLine(null) + }} + title={`${style.label} - ${moment(marker.startTime).format('MMM D, h:mm A')}${marker.assignedConductors.length > 0 ? ` (${marker.assignedConductors.length} assigned)` : ''}`} + > + {/* Vertical line extending from header to marker */} +
{ + setHoveredVerticalLine(marker.id) + const rect = e.currentTarget.getBoundingClientRect() + setVerticalLineTooltip({ + markerId: marker.id, + x: rect.left + rect.width / 2, + y: rect.top, + time: fullTimeString + }) + }} + onMouseLeave={() => { + setHoveredVerticalLine(null) + setVerticalLineTooltip(null) + }} + onMouseMove={(e) => { + const rect = e.currentTarget.getBoundingClientRect() + setVerticalLineTooltip({ + markerId: marker.id, + x: rect.left + rect.width / 2, + y: rect.top, + time: fullTimeString + }) + }} + /> + + {/* Small icon marker */} +
+ + {style.icon} + + +
+ + {/* Connection indicators on assigned conductors */} + {/* Only show dots for conductors that don't have an intersection button visible */} + {marker.assignedConductors.map((conductorId, lineIndex) => { + const conductorIndex = conductorAvailabilities.findIndex(c => c.conductorId === conductorId) + if (conductorIndex === -1) return null + + // Check if this conductor has an intersection button (i.e., their availability intersects with this marker) + const conductor = conductorAvailabilities[conductorIndex] + const hasIntersectionButton = conductor.availability.some(avail => { + return marker.startTime >= avail.start && marker.startTime <= avail.end + }) + + // Hide the dot if there's an intersection button for this conductor + if (hasIntersectionButton) return null + + const CONDUCTOR_ROW_HEIGHT = 36 + const HEADER_ROW_HEIGHT = 60 + const CONDUCTOR_ROWS_HEIGHT = conductorAvailabilities.length * 36 + const ROW_HEIGHT = 40 + const MARKER_TOP_OFFSET = 10 + const MARKER_ICON_SIZE = 32 + + const conductorRowTop = HEADER_ROW_HEIGHT + (conductorIndex * CONDUCTOR_ROW_HEIGHT) + const conductorRowCenter = conductorRowTop + (CONDUCTOR_ROW_HEIGHT / 2) + const markerCenterFromTop = HEADER_ROW_HEIGHT + CONDUCTOR_ROWS_HEIGHT + (rowIndex * ROW_HEIGHT) + MARKER_TOP_OFFSET + (MARKER_ICON_SIZE / 2) + const dotY = -(markerCenterFromTop - conductorRowCenter) + + return ( +
+ ) + })} +
+ ) + }) + + const metadata = repetitionMetadata[rep.repId] + // Convert visible markers to format needed by RepetitionBorder + const borderMarkers = rep.visibleMarkers.map(m => ({ + id: m.id, + startTime: m.startTime, + assignedConductors: m.assignedConductors + })) return ( handleRepetitionMouseDown(e, rep.repId)} - isDragging={draggingRepetition === rep.repId} + onMouseDown={isDraggable ? (e) => handleRepetitionMouseDown(e, rep.repId) : undefined} + isDragging={isDragging} dragOffset={repetitionDragOffset} - /> + extendLeft={rep.extendLeft} + extendRight={rep.extendRight} + phaseName={metadata?.phaseName} + experimentNumber={metadata?.experimentNumber} + repetitionNumber={metadata?.repetitionNumber} + experimentId={metadata?.experimentId} + isScheduledInDb={metadata?.isScheduledInDb} + onScrollToRepetition={onScrollToRepetition} + onScheduleRepetition={onScheduleRepetition} + visibleMarkers={borderMarkers} + getTimePosition={getTimePosition} + > + {markerElements} + ) })}
@@ -878,289 +1241,9 @@ export function HorizontalTimelineCalendar({
) })()} - - {/* Phase markers - positioned relative to their repetition's row */} - {(() => { - // Group only visible markers by repetition to find their row positions - const markersByRepetition: Record = {} - visibleMarkers.forEach(marker => { - if (!markersByRepetition[marker.repetitionId]) { - markersByRepetition[marker.repetitionId] = [] - } - markersByRepetition[marker.repetitionId].push(marker) - }) - - // Calculate repetition positions and rows (same logic as borders) - const borderPadding = 20 - const repetitionData = Object.entries(markersByRepetition).map(([repId, markers]) => { - const positions = markers.map(m => { - const dayIndex = days.findIndex(d => - d.toDateString() === new Date(m.startTime).toDateString() - ) - if (dayIndex === -1) return null - return getTimePosition(m.startTime) - }).filter((p): p is number => p !== null) - - if (positions.length === 0) return null - - const leftmost = Math.min(...positions) - const rightmost = Math.max(...positions) - const borderLeft = leftmost - borderPadding - const borderWidth = (rightmost - leftmost) + (borderPadding * 2) - const borderRight = borderLeft + borderWidth - - return { - repId, - markers, - left: borderLeft, - right: borderRight, - width: borderWidth - } - }).filter((d): d is NonNullable => d !== null) - - // Calculate vertical stacking (same as borders) - const ROW_HEIGHT = 40 - const sortedRepetitions = [...repetitionData].sort((a, b) => a.left - b.left) - const repetitionRows: Array> = [] - - sortedRepetitions.forEach(rep => { - let placed = false - for (let rowIndex = 0; rowIndex < repetitionRows.length; rowIndex++) { - const row = repetitionRows[rowIndex] - const hasOverlap = row.some(existingRep => { - const threshold = 1 - return !(rep.right + threshold <= existingRep.left || rep.left - threshold >= existingRep.right) - }) - - if (!hasOverlap) { - row.push(rep) - placed = true - break - } - } - - if (!placed) { - repetitionRows.push([rep]) - } - }) - - // Create a map of repetition ID to row index - const repIdToRowIndex: Record = {} - repetitionRows.forEach((row, rowIndex) => { - row.forEach(rep => { - repIdToRowIndex[rep.repId] = rowIndex - }) - }) - - return visibleMarkers.map((marker) => { - const style = getPhaseStyle(marker.phase) - const dayIndex = days.findIndex(d => - d.toDateString() === new Date(marker.startTime).toDateString() - ) - if (dayIndex === -1) return null - - // Get absolute position from start of timeline (includes day offset) - const absoluteX = getTimePosition(marker.startTime) - const isDragging = draggingRepetition === marker.repetitionId - const isSelected = selectedMarker === marker.id - const rowIndex = repIdToRowIndex[marker.repetitionId] || 0 - const topOffset = rowIndex * ROW_HEIGHT + 10 // 10px padding from top of row - - const isVerticalLineHovered = hoveredVerticalLine === marker.id - - // Calculate marker position - if dragging, maintain relative position to border - let markerLeft = absoluteX - if (isDragging && dragPosition) { - const repData = repetitionData.find(r => r.repId === marker.repetitionId) - if (repData) { - // Calculate offset from leftmost marker - const leftmostMarker = repData.markers.reduce((prev, curr) => - getTimePosition(prev.startTime) < getTimePosition(curr.startTime) ? prev : curr - ) - const leftmostX = getTimePosition(leftmostMarker.startTime) - const offsetFromLeftmost = absoluteX - leftmostX - // Position relative to dragged border - markerLeft = dragPosition.x + borderPadding + offsetFromLeftmost - } - } - - return ( -
handleMarkerMouseDown(e, marker.id)} - onMouseEnter={() => { - setHoveredMarker(marker.id) - setHoveredVerticalLine(marker.id) - }} - onMouseLeave={() => { - setHoveredMarker(null) - setHoveredVerticalLine(null) - }} - title={`${style.label} - ${moment(marker.startTime).format('MMM D, h:mm A')}${marker.assignedConductors.length > 0 ? ` (${marker.assignedConductors.length} assigned)` : ''}`} - > - {/* Small icon marker */} -
- - {style.icon} - - - {/* Assign Conductors button - top right corner */} - -
- - {/* Connection indicators on assigned conductors (shown as dots on the vertical line) */} - {marker.assignedConductors.map((conductorId, lineIndex) => { - const conductorIndex = conductorAvailabilities.findIndex(c => c.conductorId === conductorId) - if (conductorIndex === -1) return null - - const CONDUCTOR_ROW_HEIGHT = 36 // Height of each conductor row - const HEADER_ROW_HEIGHT = 60 // Approximate height of header row - const conductorRowTop = HEADER_ROW_HEIGHT + (conductorIndex * CONDUCTOR_ROW_HEIGHT) - const conductorRowCenter = conductorRowTop + (CONDUCTOR_ROW_HEIGHT / 2) - - // Position dot at conductor row center (negative because it's above the marker) - // Distance from marker center to conductor row center - const dotY = -(totalHeightToMarker - conductorRowCenter) - - return ( -
- ) - })} -
- ) - }) - })()}
- {/* Conductor assignment panel (shown when marker is selected) */} - {selectedMarker && assignmentPanelPosition && ( -
-
-

- Assign Conductors -

- -
-
- {conductorAvailabilities.map((conductor) => { - const marker = phaseMarkers.find(m => m.id === selectedMarker) - const isAssigned = marker?.assignedConductors.includes(conductor.conductorId) || false - - return ( -
- )}
diff --git a/scheduling-remote/src/components/Scheduling.tsx b/scheduling-remote/src/components/Scheduling.tsx index 81c16ee..fcdd8cb 100644 --- a/scheduling-remote/src/components/Scheduling.tsx +++ b/scheduling-remote/src/components/Scheduling.tsx @@ -70,8 +70,6 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => // Track repetitions that have been dropped/moved and should show time points const [repetitionsWithTimes, setRepetitionsWithTimes] = useState>(new Set()) - // Track which repetitions are locked (prevent dragging) - const [lockedSchedules, setLockedSchedules] = useState>(new Set()) // Track which repetitions are currently being scheduled const [schedulingRepetitions, setSchedulingRepetitions] = useState>(new Set()) // Track conductor assignments for each phase marker (markerId -> conductorIds[]) @@ -253,44 +251,22 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => } const toggleRepetition = (repId: string) => { + // Checking/unchecking should only control visibility on the timeline. + // It must NOT clear scheduling info or conductor assignments. setSelectedRepetitionIds(prev => { const next = new Set(prev) if (next.has(repId)) { + // Hide this repetition from the timeline next.delete(repId) - // Remove from scheduled repetitions when unchecked - setScheduledRepetitions(prevScheduled => { - const newScheduled = { ...prevScheduled } - delete newScheduled[repId] - return newScheduled - }) - // Clear all related state when unchecked - setRepetitionsWithTimes(prev => { - const next = new Set(prev) - next.delete(repId) - return next - }) - setLockedSchedules(prev => { - const next = new Set(prev) - next.delete(repId) - return next - }) - setSchedulingRepetitions(prev => { - const next = new Set(prev) - next.delete(repId) - return next - }) - // Re-stagger remaining repetitions - const remainingIds = Array.from(next).filter(id => id !== repId) - if (remainingIds.length > 0) { - reStaggerRepetitions(remainingIds) - } + // Keep scheduledRepetitions and repetitionsWithTimes intact so that + // re-checking the box restores the repetition in the correct spot. } else { + // Show this repetition on the timeline next.add(repId) // Auto-spawn when checked - pass the updated set to ensure correct stagger calculation + // spawnSingleRepetition will position the new repetition relative to existing ones + // without resetting existing positions spawnSingleRepetition(repId, next) - // Re-stagger all existing repetitions to prevent overlap - // Note: reStaggerRepetitions will automatically skip locked repetitions - reStaggerRepetitions([...next, repId]) } return next }) @@ -305,20 +281,14 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => const allSelected = allRepetitions.every(rep => prev.has(rep.id)) if (allSelected) { - // Deselect all repetitions in this phase + // Deselect all repetitions in this phase (hide from timeline only) const next = new Set(prev) allRepetitions.forEach(rep => { next.delete(rep.id) - // Remove from scheduled repetitions - setScheduledRepetitions(prevScheduled => { - const newScheduled = { ...prevScheduled } - delete newScheduled[rep.id] - return newScheduled - }) }) return next } else { - // Select all repetitions in this phase + // Select all repetitions in this phase (show on timeline) const next = new Set(prev) allRepetitions.forEach(rep => { next.add(rep.id) @@ -356,7 +326,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => // Re-stagger all repetitions to prevent overlap // IMPORTANT: Skip locked repetitions to prevent them from moving - const reStaggerRepetitions = useCallback((repIds: string[]) => { + const reStaggerRepetitions = useCallback((repIds: string[], onlyResetWithoutCustomTimes: boolean = false) => { const tomorrow = new Date() tomorrow.setDate(tomorrow.getDate() + 1) tomorrow.setHours(9, 0, 0, 0) @@ -364,8 +334,11 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => setScheduledRepetitions(prev => { const newScheduled = { ...prev } - // Filter out locked repetitions - they should not be moved - const unlockedRepIds = repIds.filter(repId => !lockedSchedules.has(repId)) + // If onlyResetWithoutCustomTimes is true, filter out repetitions that have custom times set + let unlockedRepIds = repIds + if (onlyResetWithoutCustomTimes) { + unlockedRepIds = unlockedRepIds.filter(repId => !repetitionsWithTimes.has(repId)) + } // Calculate stagger index only for unlocked repetitions let staggerIndex = 0 @@ -407,7 +380,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => return newScheduled }) - }, [lockedSchedules, repetitionsByExperiment, experimentsByPhase, soakingByExperiment, airdryingByExperiment]) + }, [repetitionsByExperiment, experimentsByPhase, soakingByExperiment, airdryingByExperiment, repetitionsWithTimes]) // Spawn a single repetition in calendar const spawnSingleRepetition = (repId: string, updatedSelectedIds?: Set) => { @@ -477,10 +450,11 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => let newScheduled = { ...prev } const clampToReasonableHours = (d: Date) => { + // Allow full 24 hours (midnight to midnight) const min = new Date(d) - min.setHours(5, 0, 0, 0) + min.setHours(0, 0, 0, 0) const max = new Date(d) - max.setHours(23, 0, 0, 0) + max.setHours(23, 59, 59, 999) const t = d.getTime() return new Date(Math.min(Math.max(t, min.getTime()), max.getTime())) } @@ -536,13 +510,10 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => const repetition = repetitionsByExperiment[scheduled.experimentId]?.find(r => r.id === scheduled.repetitionId) if (experiment && repetition && scheduled.soakingStart) { - const isLocked = lockedSchedules.has(scheduled.repetitionId) - const lockIcon = isLocked ? '๐Ÿ”’' : '๐Ÿ”“' - // Soaking marker events.push({ id: `${scheduled.repetitionId}-soaking`, - title: `${lockIcon} ๐Ÿ’ง Soaking - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, + title: `๐Ÿ’ง Soaking - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, start: scheduled.soakingStart, end: new Date(scheduled.soakingStart.getTime() + 15 * 60000), // 15 minute duration for better visibility resource: 'soaking' @@ -552,7 +523,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => if (scheduled.airdryingStart) { events.push({ id: `${scheduled.repetitionId}-airdrying`, - title: `${lockIcon} ๐ŸŒฌ๏ธ Airdrying - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, + title: `๐ŸŒฌ๏ธ Airdrying - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, start: scheduled.airdryingStart, end: new Date(scheduled.airdryingStart.getTime() + 15 * 60000), // 15 minute duration for better visibility resource: 'airdrying' @@ -563,7 +534,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => if (scheduled.crackingStart) { events.push({ id: `${scheduled.repetitionId}-cracking`, - title: `${lockIcon} โšก Cracking - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, + title: `โšก Cracking - Exp ${experiment.experiment_number} Rep ${repetition.repetition_number}`, start: scheduled.crackingStart, end: new Date(scheduled.crackingStart.getTime() + 15 * 60000), // 15 minute duration for better visibility resource: 'cracking' @@ -573,7 +544,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => }) return events - }, [scheduledRepetitions, experimentsByPhase, repetitionsByExperiment, lockedSchedules]) + }, [scheduledRepetitions, experimentsByPhase, repetitionsByExperiment]) // Memoize the calendar events const calendarEvents = useMemo(() => { @@ -609,15 +580,16 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => return moment(date).format('MMM D, h:mm A') } - const toggleScheduleLock = (repId: string) => { - setLockedSchedules(prev => { - const next = new Set(prev) - if (next.has(repId)) { - next.delete(repId) - } else { - next.add(repId) - } - return next + // Remove all conductor assignments from a repetition + const removeRepetitionAssignments = (repId: string) => { + const markerIdPrefix = repId + setConductorAssignments(prev => { + const newAssignments = { ...prev } + // Remove assignments for all three phases + delete newAssignments[`${markerIdPrefix}-soaking`] + delete newAssignments[`${markerIdPrefix}-airdrying`] + delete newAssignments[`${markerIdPrefix}-cracking`] + return newAssignments }) } @@ -625,24 +597,16 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => // Only make repetition markers draggable, not availability events const resource = event.resource as string if (resource === 'soaking' || resource === 'airdrying' || resource === 'cracking') { - // Check if the repetition is locked - const eventId = event.id as string - const repId = eventId.split('-')[0] - const isLocked = lockedSchedules.has(repId) - return !isLocked + return true } return false - }, [lockedSchedules]) + }, []) const eventPropGetter = useCallback((event: any) => { const resource = event.resource as string // Styling for repetition markers (foreground events) if (resource === 'soaking' || resource === 'airdrying' || resource === 'cracking') { - const eventId = event.id as string - const repId = eventId.split('-')[0] - const isLocked = lockedSchedules.has(repId) - const colors = { soaking: '#3b82f6', // blue airdrying: '#10b981', // green @@ -652,8 +616,8 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => return { style: { - backgroundColor: isLocked ? '#9ca3af' : color, // gray if locked - borderColor: isLocked ? color : color, // border takes original color when locked + backgroundColor: color, + borderColor: color, color: 'white', borderRadius: '8px', border: '2px solid', @@ -674,17 +638,17 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap', - cursor: isLocked ? 'not-allowed' : 'grab', - boxShadow: isLocked ? '0 1px 2px rgba(0,0,0,0.1)' : '0 2px 4px rgba(0,0,0,0.2)', + cursor: 'grab', + boxShadow: '0 2px 4px rgba(0,0,0,0.2)', transition: 'all 0.2s ease', - opacity: isLocked ? 0.7 : 1 + opacity: 1 } } } // Default styling for other events return {} - }, [lockedSchedules]) + }, []) const scheduleRepetition = async (repId: string, experimentId: string) => { setSchedulingRepetitions(prev => new Set(prev).add(repId)) @@ -756,6 +720,51 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => } } + // Unschedule a repetition: clear its scheduling info and unassign all conductors. + const unscheduleRepetition = async (repId: string, experimentId: string) => { + setSchedulingRepetitions(prev => new Set(prev).add(repId)) + + try { + // Remove all conductor assignments for this repetition + removeRepetitionAssignments(repId) + + // Clear scheduled_date on the repetition in local state + setRepetitionsByExperiment(prev => ({ + ...prev, + [experimentId]: prev[experimentId]?.map(r => + r.id === repId ? { ...r, scheduled_date: null } : r + ) || [] + })) + + // Clear scheduled times for this repetition so it disappears from the timeline + setScheduledRepetitions(prev => { + const next = { ...prev } + delete next[repId] + return next + }) + + // This repetition no longer has active times + setRepetitionsWithTimes(prev => { + const next = new Set(prev) + next.delete(repId) + return next + }) + + // Also clear scheduled_date in the database for this repetition + await repetitionManagement.updateRepetition(repId, { + scheduled_date: null + }) + } catch (error: any) { + setError(error?.message || 'Failed to unschedule repetition') + } finally { + setSchedulingRepetitions(prev => { + const next = new Set(prev) + next.delete(repId) + return next + }) + } + } + // Restore scroll position after scheduledRepetitions changes useEffect(() => { if (scrollPositionRef.current) { @@ -806,11 +815,15 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => phase: 'soaking' | 'airdrying' | 'cracking' startTime: Date assignedConductors: string[] - locked: boolean }> = [] Object.values(scheduledRepetitions).forEach(scheduled => { const repId = scheduled.repetitionId + // Only include markers for repetitions that are checked (selected) + if (!selectedRepetitionIds.has(repId)) { + return + } + const markerIdPrefix = repId if (scheduled.soakingStart) { @@ -820,8 +833,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => experimentId: scheduled.experimentId, phase: 'soaking', startTime: scheduled.soakingStart, - assignedConductors: conductorAssignments[`${markerIdPrefix}-soaking`] || [], - locked: lockedSchedules.has(repId) + assignedConductors: conductorAssignments[`${markerIdPrefix}-soaking`] || [] }) } @@ -832,8 +844,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => experimentId: scheduled.experimentId, phase: 'airdrying', startTime: scheduled.airdryingStart, - assignedConductors: conductorAssignments[`${markerIdPrefix}-airdrying`] || [], - locked: lockedSchedules.has(repId) + assignedConductors: conductorAssignments[`${markerIdPrefix}-airdrying`] || [] }) } @@ -844,8 +855,7 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => experimentId: scheduled.experimentId, phase: 'cracking', startTime: scheduled.crackingStart, - assignedConductors: conductorAssignments[`${markerIdPrefix}-cracking`] || [], - locked: lockedSchedules.has(repId) + assignedConductors: conductorAssignments[`${markerIdPrefix}-cracking`] || [] }) } }) @@ -856,7 +866,66 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => conductorAvailabilities, phaseMarkers } - }, [selectedConductorIds, conductors, conductorColorMap, colorPalette, availabilityEvents, scheduledRepetitions, conductorAssignments, lockedSchedules, calendarStartDate, calendarZoom]) + }, [selectedConductorIds, conductors, conductorColorMap, colorPalette, availabilityEvents, scheduledRepetitions, conductorAssignments, calendarStartDate, calendarZoom, selectedRepetitionIds]) + + // Build repetition metadata mapping for timeline display + const repetitionMetadata = useMemo(() => { + const metadata: Record = {} + + Object.values(scheduledRepetitions).forEach(scheduled => { + const repId = scheduled.repetitionId + // Only include metadata for repetitions that are checked (selected) + if (!selectedRepetitionIds.has(repId)) { + return + } + + const experiment = Object.values(experimentsByPhase).flat().find(e => e.id === scheduled.experimentId) + const repetition = Object.values(repetitionsByExperiment).flat().find(r => r.id === repId) + const phase = phases.find(p => + Object.values(experimentsByPhase[p.id] || []).some(e => e.id === scheduled.experimentId) + ) + + if (experiment && repetition && phase) { + metadata[repId] = { + phaseName: phase.name, + experimentNumber: experiment.experiment_number, + repetitionNumber: repetition.repetition_number, + experimentId: scheduled.experimentId, + // Consider a repetition \"scheduled\" in DB if it has a non-null scheduled_date + isScheduledInDb: Boolean(repetition.scheduled_date) + } + } + }) + + return metadata + }, [scheduledRepetitions, experimentsByPhase, repetitionsByExperiment, phases, selectedRepetitionIds]) + + // Scroll to repetition in accordion + const handleScrollToRepetition = useCallback(async (repetitionId: string) => { + // First, expand the phase if it's collapsed + const repetition = Object.values(repetitionsByExperiment).flat().find(r => r.id === repetitionId) + if (repetition) { + const experiment = Object.values(experimentsByPhase).flat().find(e => + (repetitionsByExperiment[e.id] || []).some(r => r.id === repetitionId) + ) + if (experiment) { + const phase = phases.find(p => + (experimentsByPhase[p.id] || []).some(e => e.id === experiment.id) + ) + if (phase && !expandedPhaseIds.has(phase.id)) { + await togglePhaseExpand(phase.id) + // Wait a bit for the accordion to expand + await new Promise(resolve => setTimeout(resolve, 300)) + } + } + } + + // Then scroll to the element + const element = document.getElementById(`repetition-${repetitionId}`) + if (element) { + element.scrollIntoView({ behavior: 'smooth', block: 'center' }) + } + }, [repetitionsByExperiment, experimentsByPhase, phases, expandedPhaseIds, togglePhaseExpand]) // Handlers for horizontal calendar const handleHorizontalMarkerDrag = useCallback((markerId: string, newTime: Date) => { @@ -878,21 +947,6 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => })) }, []) - const handleHorizontalMarkerLockToggle = useCallback((markerId: string) => { - // Marker ID format: ${repId}-${phase} where repId is a UUID with hyphens - // Split by '-' and take all but the last segment as repId - const parts = markerId.split('-') - const repId = parts.slice(0, -1).join('-') - setLockedSchedules(prev => { - const next = new Set(prev) - if (next.has(repId)) { - next.delete(repId) - } else { - next.add(repId) - } - return next - }) - }, []) return ( @@ -1027,7 +1081,9 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => phaseMarkers={horizontalCalendarData.phaseMarkers} onMarkerDrag={handleHorizontalMarkerDrag} onMarkerAssignConductors={handleHorizontalMarkerAssignConductors} - onMarkerLockToggle={handleHorizontalMarkerLockToggle} + repetitionMetadata={repetitionMetadata} + onScrollToRepetition={handleScrollToRepetition} + onScheduleRepetition={scheduleRepetition} timeStep={15} minHour={6} maxHour={22} @@ -1196,11 +1252,21 @@ export function ScheduleExperiment({ user, onBack }: { user: User; onBack: () => const checked = selectedRepetitionIds.has(rep.id) const hasTimes = repetitionsWithTimes.has(rep.id) const scheduled = scheduledRepetitions[rep.id] - const isLocked = lockedSchedules.has(rep.id) const isScheduling = schedulingRepetitions.has(rep.id) + + // Check if there are any conductor assignments + const markerIdPrefix = rep.id + const soakingConductors = conductorAssignments[`${markerIdPrefix}-soaking`] || [] + const airdryingConductors = conductorAssignments[`${markerIdPrefix}-airdrying`] || [] + const crackingConductors = conductorAssignments[`${markerIdPrefix}-cracking`] || [] + const hasAssignments = soakingConductors.length > 0 || airdryingConductors.length > 0 || crackingConductors.length > 0 return ( -
+
{/* Checkbox row */} - {/* Time points (shown only if has been dropped/moved) */} - {hasTimes && scheduled && ( + {/* Time points (shown whenever the repetition has scheduled times) */} + {scheduled && (
-
- ๐Ÿ’ง - Soaking: {formatTime(scheduled.soakingStart)} -
-
- ๐ŸŒฌ๏ธ - Airdrying: {formatTime(scheduled.airdryingStart)} -
-
- โšก - Cracking: {formatTime(scheduled.crackingStart)} -
+ {(() => { + const repId = rep.id + const markerIdPrefix = repId + + // Get assigned conductors for each phase + const soakingConductors = conductorAssignments[`${markerIdPrefix}-soaking`] || [] + const airdryingConductors = conductorAssignments[`${markerIdPrefix}-airdrying`] || [] + const crackingConductors = conductorAssignments[`${markerIdPrefix}-cracking`] || [] + + // Helper to get conductor names + const getConductorNames = (conductorIds: string[]) => { + return conductorIds.map(id => { + const conductor = conductors.find(c => c.id === id) + if (!conductor) return null + return [conductor.first_name, conductor.last_name].filter(Boolean).join(' ') || conductor.email + }).filter(Boolean).join(', ') + } + + return ( + <> +
+ ๐Ÿ’ง + Soaking: {formatTime(scheduled.soakingStart)} + {soakingConductors.length > 0 && ( + + ({getConductorNames(soakingConductors)}) + + )} +
+
+ ๐ŸŒฌ๏ธ + Airdrying: {formatTime(scheduled.airdryingStart)} + {airdryingConductors.length > 0 && ( + + ({getConductorNames(airdryingConductors)}) + + )} +
+
+ โšก + Cracking: {formatTime(scheduled.crackingStart)} + {crackingConductors.length > 0 && ( + + ({getConductorNames(crackingConductors)}) + + )} +
+ + ) + })()} - {/* Lock checkbox and Schedule button */} + {/* Remove Assignments button and Schedule/Unschedule button */}
- - + {hasAssignments && ( + + )} + {rep.scheduled_date ? ( + + ) : ( + + )}
)} diff --git a/scheduling-remote/wait-and-serve.sh b/scheduling-remote/wait-and-serve.sh new file mode 100644 index 0000000..adfdce0 --- /dev/null +++ b/scheduling-remote/wait-and-serve.sh @@ -0,0 +1,57 @@ +#!/bin/sh + +# Build the project first +echo "Building scheduling-remote..." +npm run build + +# Verify the initial build created remoteEntry.js +REMOTE_ENTRY_PATH="dist/assets/remoteEntry.js" +if [ ! -f "$REMOTE_ENTRY_PATH" ]; then + echo "ERROR: Initial build did not create remoteEntry.js!" + exit 1 +fi + +echo "Initial build complete. remoteEntry.js exists." + +# Start build:watch in the background +echo "Starting build:watch in background..." +npm run build:watch & +BUILD_WATCH_PID=$! + +# Wait a moment for build:watch to start and potentially rebuild +echo "Waiting for build:watch to stabilize..." +sleep 3 + +# Verify remoteEntry.js still exists (build:watch might have rebuilt it) +MAX_WAIT=30 +WAIT_COUNT=0 +while [ ! -f "$REMOTE_ENTRY_PATH" ] && [ $WAIT_COUNT -lt $MAX_WAIT ]; do + sleep 1 + WAIT_COUNT=$((WAIT_COUNT + 1)) + if [ $((WAIT_COUNT % 5)) -eq 0 ]; then + echo "Waiting for remoteEntry.js after build:watch... (${WAIT_COUNT}s)" + fi +done + +if [ ! -f "$REMOTE_ENTRY_PATH" ]; then + echo "ERROR: remoteEntry.js was not available after ${MAX_WAIT} seconds!" + kill $BUILD_WATCH_PID 2>/dev/null || true + exit 1 +fi + +# Wait a bit more to ensure build:watch has finished any initial rebuild +echo "Ensuring build:watch has completed initial build..." +sleep 2 + +# Check file size to ensure it's not empty or being written +FILE_SIZE=$(stat -f%z "$REMOTE_ENTRY_PATH" 2>/dev/null || stat -c%s "$REMOTE_ENTRY_PATH" 2>/dev/null || echo "0") +if [ "$FILE_SIZE" -lt 100 ]; then + echo "WARNING: remoteEntry.js seems too small (${FILE_SIZE} bytes), waiting a bit more..." + sleep 2 +fi + +echo "remoteEntry.js is ready (${FILE_SIZE} bytes). Starting http-server..." + +# Start http-server and give it time to fully initialize +# Use a simple approach: start server and wait a moment for it to be ready +exec npx http-server dist -p 3003 --cors -c-1 diff --git a/supabase/.gitignore b/supabase/.gitignore new file mode 100644 index 0000000..ad9264f --- /dev/null +++ b/supabase/.gitignore @@ -0,0 +1,8 @@ +# Supabase +.branches +.temp + +# dotenvx +.env.keys +.env.local +.env.*.local diff --git a/supabase/README.md b/supabase/README.md index 9f76dd2..71b3c02 100644 --- a/supabase/README.md +++ b/supabase/README.md @@ -64,9 +64,9 @@ supabase gen types typescript --local > management-dashboard-web-app/src/types/s ## Seed Data -Seed files are run automatically after migrations when using docker-compose. They populate the database with initial data: -- `seed_01_users.sql`: Creates admin user and initial user profiles -- `seed_02_phase2_experiments.sql`: Creates initial experiment data +Seed files are run automatically after migrations when using `supabase db reset` (see `config.toml` โ†’ `[db.seed]` โ†’ `sql_paths`). Currently only user seed is enabled: +- `seed_01_users.sql`: Creates admin user and initial user profiles (enabled) +- `seed_02_phase2_experiments.sql`: Initial experiment data (temporarily disabled; add back to `sql_paths` in `config.toml` to re-enable) ## Configuration diff --git a/supabase/archive/test_migration.sql b/supabase/archive/test_migration.sql new file mode 100644 index 0000000..edc5378 --- /dev/null +++ b/supabase/archive/test_migration.sql @@ -0,0 +1,14 @@ +-- Test migration to create experiment_phases table +CREATE TABLE IF NOT EXISTS public.experiment_phases ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + name TEXT NOT NULL UNIQUE, + description TEXT, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL +); + +-- Insert test data +INSERT INTO public.experiment_phases (name, description, created_by) +VALUES ('Phase 2 of JC Experiments', 'Second phase of JC Cracker experiments', '00000000-0000-0000-0000-000000000000') +ON CONFLICT (name) DO NOTHING; diff --git a/supabase/config.toml b/supabase/config.toml index 0cfe5de..53c8cb9 100755 --- a/supabase/config.toml +++ b/supabase/config.toml @@ -57,7 +57,9 @@ schema_paths = [] enabled = true # Specifies an ordered list of seed files to load during db reset. # Supports glob patterns relative to supabase directory: "./seeds/*.sql" -sql_paths = ["./seed_01_users.sql", "./seed_02_phase2_experiments.sql"] +# Temporarily only user seed; other seeds suppressed. +sql_paths = ["./seed_01_users.sql"] +# sql_paths = ["./seed_01_users.sql", "./seed_02_phase2_experiments.sql"] # , "./seed_04_phase2_jc_experiments.sql", "./seed_05_meyer_experiments.sql"] [db.network_restrictions] diff --git a/supabase/migrations/00008_phase_data_tables.sql b/supabase/migrations/00008_phase_data_tables.sql index 96819dd..e438590 100644 --- a/supabase/migrations/00008_phase_data_tables.sql +++ b/supabase/migrations/00008_phase_data_tables.sql @@ -70,6 +70,10 @@ CREATE TABLE IF NOT EXISTS public.shelling ( scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL, actual_start_time TIMESTAMP WITH TIME ZONE, actual_end_time TIMESTAMP WITH TIME ZONE, + -- The space (in inches) between the sheller's rings + ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches > 0), + -- The revolutions per minute for the sheller drum + drum_rpm INTEGER CHECK (drum_rpm > 0), created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), created_by UUID NOT NULL REFERENCES public.user_profiles(id), diff --git a/supabase/migrations/00012_unified_phase_executions.sql b/supabase/migrations/00012_unified_phase_executions.sql index 978e21f..c427443 100644 --- a/supabase/migrations/00012_unified_phase_executions.sql +++ b/supabase/migrations/00012_unified_phase_executions.sql @@ -56,6 +56,80 @@ CREATE INDEX IF NOT EXISTS idx_phase_executions_machine_type_id CREATE INDEX IF NOT EXISTS idx_phase_executions_created_by ON public.experiment_phase_executions(created_by); +-- ============================================= +-- 2.5. CREATE CONDUCTOR ASSIGNMENTS TABLE +-- ============================================= + +-- Table to store conductor assignments to phase executions +-- This allows multiple conductors to be assigned to each phase execution +CREATE TABLE IF NOT EXISTS public.experiment_phase_assignments ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + phase_execution_id UUID NOT NULL REFERENCES public.experiment_phase_executions(id) ON DELETE CASCADE, + conductor_id UUID NOT NULL REFERENCES public.user_profiles(id) ON DELETE CASCADE, + + -- Scheduled times for this assignment (should match phase_execution times, but stored for clarity) + scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL, + scheduled_end_time TIMESTAMP WITH TIME ZONE, + + -- Status tracking + status TEXT NOT NULL DEFAULT 'scheduled' + CHECK (status IN ('scheduled', 'in_progress', 'completed', 'cancelled')), + + -- Optional notes about the assignment + notes TEXT, + + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL REFERENCES public.user_profiles(id), + + -- Ensure scheduled_end_time is after scheduled_start_time + CONSTRAINT valid_scheduled_time_range CHECK (scheduled_end_time IS NULL OR scheduled_end_time > scheduled_start_time), + + -- Ensure unique assignment per conductor per phase execution + CONSTRAINT unique_conductor_phase_execution UNIQUE (phase_execution_id, conductor_id) +); + +-- Indexes for conductor assignments +CREATE INDEX IF NOT EXISTS idx_phase_assignments_phase_execution_id + ON public.experiment_phase_assignments(phase_execution_id); +CREATE INDEX IF NOT EXISTS idx_phase_assignments_conductor_id + ON public.experiment_phase_assignments(conductor_id); +CREATE INDEX IF NOT EXISTS idx_phase_assignments_status + ON public.experiment_phase_assignments(status); +CREATE INDEX IF NOT EXISTS idx_phase_assignments_scheduled_start_time + ON public.experiment_phase_assignments(scheduled_start_time); +CREATE INDEX IF NOT EXISTS idx_phase_assignments_created_by + ON public.experiment_phase_assignments(created_by); + +-- Trigger for updated_at on conductor assignments +CREATE TRIGGER set_updated_at_phase_assignments + BEFORE UPDATE ON public.experiment_phase_assignments + FOR EACH ROW + EXECUTE FUNCTION public.handle_updated_at(); + +-- Grant permissions +GRANT ALL ON public.experiment_phase_assignments TO authenticated; + +-- Enable Row Level Security +ALTER TABLE public.experiment_phase_assignments ENABLE ROW LEVEL SECURITY; + +-- RLS Policies for conductor assignments +CREATE POLICY "Phase assignments are viewable by authenticated users" + ON public.experiment_phase_assignments + FOR SELECT USING (auth.role() = 'authenticated'); + +CREATE POLICY "Phase assignments are insertable by authenticated users" + ON public.experiment_phase_assignments + FOR INSERT WITH CHECK (auth.role() = 'authenticated'); + +CREATE POLICY "Phase assignments are updatable by authenticated users" + ON public.experiment_phase_assignments + FOR UPDATE USING (auth.role() = 'authenticated'); + +CREATE POLICY "Phase assignments are deletable by authenticated users" + ON public.experiment_phase_assignments + FOR DELETE USING (auth.role() = 'authenticated'); + -- ============================================= -- 3. FUNCTION: Calculate Sequential Phase Start Times -- ============================================= diff --git a/supabase/migrations/00015_experiment_shelling_params.sql b/supabase/migrations/00015_experiment_shelling_params.sql new file mode 100644 index 0000000..6b4d26c --- /dev/null +++ b/supabase/migrations/00015_experiment_shelling_params.sql @@ -0,0 +1,9 @@ +-- Add experiment-level shelling parameters (defaults for repetitions) +-- These match the shelling table attributes: ring_gap_inches, drum_rpm + +ALTER TABLE public.experiments +ADD COLUMN IF NOT EXISTS ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches IS NULL OR ring_gap_inches > 0), +ADD COLUMN IF NOT EXISTS drum_rpm INTEGER CHECK (drum_rpm IS NULL OR drum_rpm > 0); + +COMMENT ON COLUMN public.experiments.ring_gap_inches IS 'Default space (inches) between sheller rings for this experiment'; +COMMENT ON COLUMN public.experiments.drum_rpm IS 'Default sheller drum revolutions per minute for this experiment'; diff --git a/supabase/migrations/00016_rename_experiment_phases_to_experiment_books.sql b/supabase/migrations/00016_rename_experiment_phases_to_experiment_books.sql new file mode 100644 index 0000000..0f1a922 --- /dev/null +++ b/supabase/migrations/00016_rename_experiment_phases_to_experiment_books.sql @@ -0,0 +1,399 @@ +-- Rename table experiment_phases to experiment_books +-- This migration renames the table and updates all dependent objects (views, functions, triggers, indexes, RLS). + +-- ============================================= +-- 1. RENAME TABLE +-- ============================================= + +ALTER TABLE public.experiment_phases RENAME TO experiment_books; + +-- ============================================= +-- 2. RENAME TRIGGER +-- ============================================= + +DROP TRIGGER IF EXISTS set_updated_at_experiment_phases ON public.experiment_books; +CREATE TRIGGER set_updated_at_experiment_books + BEFORE UPDATE ON public.experiment_books + FOR EACH ROW + EXECUTE FUNCTION public.handle_updated_at(); + +-- ============================================= +-- 3. RENAME CONSTRAINT +-- ============================================= + +ALTER TABLE public.experiment_books + RENAME CONSTRAINT ck_experiment_phases_machine_required_when_cracking + TO ck_experiment_books_machine_required_when_cracking; + +-- ============================================= +-- 4. RENAME INDEXES +-- ============================================= + +ALTER INDEX IF EXISTS public.idx_experiment_phases_name RENAME TO idx_experiment_books_name; +ALTER INDEX IF EXISTS public.idx_experiment_phases_cracking_machine_type_id RENAME TO idx_experiment_books_cracking_machine_type_id; + +-- ============================================= +-- 5. RLS POLICIES (drop old, create new with updated names) +-- ============================================= + +DROP POLICY IF EXISTS "Experiment phases are viewable by authenticated users" ON public.experiment_books; +DROP POLICY IF EXISTS "Experiment phases are insertable by authenticated users" ON public.experiment_books; +DROP POLICY IF EXISTS "Experiment phases are updatable by authenticated users" ON public.experiment_books; +DROP POLICY IF EXISTS "Experiment phases are deletable by authenticated users" ON public.experiment_books; + +CREATE POLICY "Experiment books are viewable by authenticated users" ON public.experiment_books + FOR SELECT USING (auth.role() = 'authenticated'); + +CREATE POLICY "Experiment books are insertable by authenticated users" ON public.experiment_books + FOR INSERT WITH CHECK (auth.role() = 'authenticated'); + +CREATE POLICY "Experiment books are updatable by authenticated users" ON public.experiment_books + FOR UPDATE USING (auth.role() = 'authenticated'); + +CREATE POLICY "Experiment books are deletable by authenticated users" ON public.experiment_books + FOR DELETE USING (auth.role() = 'authenticated'); + +-- ============================================= +-- 6. UPDATE FUNCTION: create_phase_executions_for_repetition (references experiment_phases) +-- ============================================= + +CREATE OR REPLACE FUNCTION create_phase_executions_for_repetition() +RETURNS TRIGGER AS $$ +DECLARE + exp_phase_config RECORD; + phase_type_list TEXT[] := ARRAY[]::TEXT[]; + phase_name TEXT; +BEGIN + SELECT + ep.has_soaking, + ep.has_airdrying, + ep.has_cracking, + ep.has_shelling, + ep.cracking_machine_type_id + INTO exp_phase_config + FROM public.experiments e + JOIN public.experiment_books ep ON e.phase_id = ep.id + WHERE e.id = NEW.experiment_id; + + IF exp_phase_config.has_soaking THEN + phase_type_list := array_append(phase_type_list, 'soaking'); + END IF; + IF exp_phase_config.has_airdrying THEN + phase_type_list := array_append(phase_type_list, 'airdrying'); + END IF; + IF exp_phase_config.has_cracking THEN + phase_type_list := array_append(phase_type_list, 'cracking'); + END IF; + IF exp_phase_config.has_shelling THEN + phase_type_list := array_append(phase_type_list, 'shelling'); + END IF; + + FOREACH phase_name IN ARRAY phase_type_list + LOOP + INSERT INTO public.experiment_phase_executions ( + repetition_id, + phase_type, + scheduled_start_time, + status, + created_by, + soaking_duration_minutes, + duration_minutes, + machine_type_id + ) + VALUES ( + NEW.id, + phase_name, + NOW(), + 'pending', + NEW.created_by, + NULL, + NULL, + CASE WHEN phase_name = 'cracking' + THEN exp_phase_config.cracking_machine_type_id + ELSE NULL END + ); + END LOOP; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- ============================================= +-- 7. UPDATE FUNCTION: create_sample_experiment_phases (INSERT into experiment_books) +-- ============================================= + +CREATE OR REPLACE FUNCTION public.create_sample_experiment_phases() +RETURNS VOID AS $$ +DECLARE + jc_cracker_id UUID; + meyer_cracker_id UUID; +BEGIN + SELECT id INTO jc_cracker_id FROM public.machine_types WHERE name = 'JC Cracker'; + SELECT id INTO meyer_cracker_id FROM public.machine_types WHERE name = 'Meyer Cracker'; + + INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) VALUES + ('Full Process - JC Cracker', 'Complete pecan processing with JC Cracker', true, true, true, true, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)), + ('Full Process - Meyer Cracker', 'Complete pecan processing with Meyer Cracker', true, true, true, true, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)), + ('Cracking Only - JC Cracker', 'JC Cracker cracking process only', false, false, true, false, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)), + ('Cracking Only - Meyer Cracker', 'Meyer Cracker cracking process only', false, false, true, false, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)) + ON CONFLICT (name) DO NOTHING; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +-- ============================================= +-- 8. UPDATE VIEWS (from 00014 - experiments_with_phases, repetitions_with_phases, experiments_with_all_reps_and_phases, get_experiment_with_reps_and_phases) +-- ============================================= + +CREATE OR REPLACE VIEW public.experiments_with_phases AS +SELECT + e.id, + e.experiment_number, + e.reps_required, + e.weight_per_repetition_lbs, + e.results_status, + e.completion_status, + e.phase_id, + e.created_at, + e.updated_at, + e.created_by, + ep.name as phase_name, + ep.description as phase_description, + ep.has_soaking, + ep.has_airdrying, + ep.has_cracking, + ep.has_shelling, + er.id as first_repetition_id, + er.repetition_number as first_repetition_number, + soaking_e.id as soaking_id, + soaking_e.scheduled_start_time as soaking_scheduled_start, + soaking_e.actual_start_time as soaking_actual_start, + soaking_e.soaking_duration_minutes, + soaking_e.scheduled_end_time as soaking_scheduled_end, + soaking_e.actual_end_time as soaking_actual_end, + airdrying_e.id as airdrying_id, + airdrying_e.scheduled_start_time as airdrying_scheduled_start, + airdrying_e.actual_start_time as airdrying_actual_start, + airdrying_e.duration_minutes as airdrying_duration, + airdrying_e.scheduled_end_time as airdrying_scheduled_end, + airdrying_e.actual_end_time as airdrying_actual_end, + cracking_e.id as cracking_id, + cracking_e.scheduled_start_time as cracking_scheduled_start, + cracking_e.actual_start_time as cracking_actual_start, + cracking_e.actual_end_time as cracking_actual_end, + mt.name as machine_type_name, + shelling_e.id as shelling_id, + shelling_e.scheduled_start_time as shelling_scheduled_start, + shelling_e.actual_start_time as shelling_actual_start, + shelling_e.actual_end_time as shelling_actual_end +FROM public.experiments e +LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id +LEFT JOIN LATERAL ( + SELECT id, repetition_number + FROM public.experiment_repetitions + WHERE experiment_id = e.id + ORDER BY repetition_number + LIMIT 1 +) er ON true +LEFT JOIN public.experiment_phase_executions soaking_e + ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking' +LEFT JOIN public.experiment_phase_executions airdrying_e + ON airdrying_e.repetition_id = er.id AND airdrying_e.phase_type = 'airdrying' +LEFT JOIN public.experiment_phase_executions cracking_e + ON cracking_e.repetition_id = er.id AND cracking_e.phase_type = 'cracking' +LEFT JOIN public.experiment_phase_executions shelling_e + ON shelling_e.repetition_id = er.id AND shelling_e.phase_type = 'shelling' +LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id; + +CREATE OR REPLACE VIEW public.repetitions_with_phases AS +SELECT + er.id, + er.experiment_id, + er.repetition_number, + er.status, + er.created_at, + er.updated_at, + er.created_by, + e.experiment_number, + e.phase_id, + e.weight_per_repetition_lbs, + ep.name as phase_name, + ep.has_soaking, + ep.has_airdrying, + ep.has_cracking, + ep.has_shelling, + soaking_e.scheduled_start_time as soaking_scheduled_start, + soaking_e.actual_start_time as soaking_actual_start, + soaking_e.soaking_duration_minutes, + soaking_e.scheduled_end_time as soaking_scheduled_end, + soaking_e.actual_end_time as soaking_actual_end, + airdrying_e.scheduled_start_time as airdrying_scheduled_start, + airdrying_e.actual_start_time as airdrying_actual_start, + airdrying_e.duration_minutes as airdrying_duration, + airdrying_e.scheduled_end_time as airdrying_scheduled_end, + airdrying_e.actual_end_time as airdrying_actual_end, + cracking_e.scheduled_start_time as cracking_scheduled_start, + cracking_e.actual_start_time as cracking_actual_start, + cracking_e.actual_end_time as cracking_actual_end, + mt.name as machine_type_name, + shelling_e.scheduled_start_time as shelling_scheduled_start, + shelling_e.actual_start_time as shelling_actual_start, + shelling_e.actual_end_time as shelling_actual_end +FROM public.experiment_repetitions er +JOIN public.experiments e ON er.experiment_id = e.id +LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id +LEFT JOIN public.experiment_phase_executions soaking_e + ON er.id = soaking_e.repetition_id AND soaking_e.phase_type = 'soaking' +LEFT JOIN public.experiment_phase_executions airdrying_e + ON er.id = airdrying_e.repetition_id AND airdrying_e.phase_type = 'airdrying' +LEFT JOIN public.experiment_phase_executions cracking_e + ON er.id = cracking_e.repetition_id AND cracking_e.phase_type = 'cracking' +LEFT JOIN public.experiment_phase_executions shelling_e + ON er.id = shelling_e.repetition_id AND shelling_e.phase_type = 'shelling' +LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id; + +-- experiments_with_all_reps_and_phases +CREATE OR REPLACE VIEW public.experiments_with_all_reps_and_phases AS +SELECT + e.id as experiment_id, + e.experiment_number, + e.reps_required, + e.weight_per_repetition_lbs, + e.results_status, + e.completion_status, + e.phase_id, + e.created_at as experiment_created_at, + e.updated_at as experiment_updated_at, + e.created_by as experiment_created_by, + ep.name as phase_name, + ep.description as phase_description, + ep.has_soaking, + ep.has_airdrying, + ep.has_cracking, + ep.has_shelling, + ep.cracking_machine_type_id as phase_cracking_machine_type_id, + er.id as repetition_id, + er.repetition_number, + er.status as repetition_status, + er.scheduled_date, + er.created_at as repetition_created_at, + er.updated_at as repetition_updated_at, + er.created_by as repetition_created_by, + soaking_e.id as soaking_execution_id, + soaking_e.scheduled_start_time as soaking_scheduled_start, + soaking_e.actual_start_time as soaking_actual_start, + soaking_e.soaking_duration_minutes, + soaking_e.scheduled_end_time as soaking_scheduled_end, + soaking_e.actual_end_time as soaking_actual_end, + soaking_e.status as soaking_status, + airdrying_e.id as airdrying_execution_id, + airdrying_e.scheduled_start_time as airdrying_scheduled_start, + airdrying_e.actual_start_time as airdrying_actual_start, + airdrying_e.duration_minutes as airdrying_duration_minutes, + airdrying_e.scheduled_end_time as airdrying_scheduled_end, + airdrying_e.actual_end_time as airdrying_actual_end, + airdrying_e.status as airdrying_status, + cracking_e.id as cracking_execution_id, + cracking_e.scheduled_start_time as cracking_scheduled_start, + cracking_e.actual_start_time as cracking_actual_start, + cracking_e.scheduled_end_time as cracking_scheduled_end, + cracking_e.actual_end_time as cracking_actual_end, + cracking_e.machine_type_id as cracking_machine_type_id, + cracking_e.status as cracking_status, + mt.name as machine_type_name, + shelling_e.id as shelling_execution_id, + shelling_e.scheduled_start_time as shelling_scheduled_start, + shelling_e.actual_start_time as shelling_actual_start, + shelling_e.scheduled_end_time as shelling_scheduled_end, + shelling_e.actual_end_time as shelling_actual_end, + shelling_e.status as shelling_status +FROM public.experiments e +LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id +LEFT JOIN public.experiment_repetitions er ON er.experiment_id = e.id +LEFT JOIN public.experiment_phase_executions soaking_e + ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking' +LEFT JOIN public.experiment_phase_executions airdrying_e + ON airdrying_e.repetition_id = er.id AND airdrying_e.phase_type = 'airdrying' +LEFT JOIN public.experiment_phase_executions cracking_e + ON cracking_e.repetition_id = er.id AND cracking_e.phase_type = 'cracking' +LEFT JOIN public.experiment_phase_executions shelling_e + ON shelling_e.repetition_id = er.id AND shelling_e.phase_type = 'shelling' +LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id +ORDER BY e.experiment_number, er.repetition_number; + +-- get_experiment_with_reps_and_phases function +CREATE OR REPLACE FUNCTION public.get_experiment_with_reps_and_phases(p_experiment_id UUID) +RETURNS TABLE ( + experiment_id UUID, + experiment_number INTEGER, + phase_name TEXT, + repetitions JSONB +) AS $$ +BEGIN + RETURN QUERY + SELECT + e.id, + e.experiment_number, + ep.name, + COALESCE( + jsonb_agg( + jsonb_build_object( + 'repetition_id', er.id, + 'repetition_number', er.repetition_number, + 'status', er.status, + 'scheduled_date', er.scheduled_date, + 'soaking', jsonb_build_object( + 'scheduled_start', soaking_e.scheduled_start_time, + 'actual_start', soaking_e.actual_start_time, + 'duration_minutes', soaking_e.soaking_duration_minutes, + 'scheduled_end', soaking_e.scheduled_end_time, + 'actual_end', soaking_e.actual_end_time, + 'status', soaking_e.status + ), + 'airdrying', jsonb_build_object( + 'scheduled_start', airdrying_e.scheduled_start_time, + 'actual_start', airdrying_e.actual_start_time, + 'duration_minutes', airdrying_e.duration_minutes, + 'scheduled_end', airdrying_e.scheduled_end_time, + 'actual_end', airdrying_e.actual_end_time, + 'status', airdrying_e.status + ), + 'cracking', jsonb_build_object( + 'scheduled_start', cracking_e.scheduled_start_time, + 'actual_start', cracking_e.actual_start_time, + 'scheduled_end', cracking_e.scheduled_end_time, + 'actual_end', cracking_e.actual_end_time, + 'machine_type_id', cracking_e.machine_type_id, + 'machine_type_name', mt.name, + 'status', cracking_e.status + ), + 'shelling', jsonb_build_object( + 'scheduled_start', shelling_e.scheduled_start_time, + 'actual_start', shelling_e.actual_start_time, + 'scheduled_end', shelling_e.scheduled_end_time, + 'actual_end', shelling_e.actual_end_time, + 'status', shelling_e.status + ) + ) + ORDER BY er.repetition_number + ), + '[]'::jsonb + ) as repetitions + FROM public.experiments e + LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id + LEFT JOIN public.experiment_repetitions er ON er.experiment_id = e.id + LEFT JOIN public.experiment_phase_executions soaking_e + ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking' + LEFT JOIN public.experiment_phase_executions airdrying_e + ON airdrying_e.repetition_id = er.id AND airdrying_e.phase_type = 'airdrying' + LEFT JOIN public.experiment_phase_executions cracking_e + ON cracking_e.repetition_id = er.id AND cracking_e.phase_type = 'cracking' + LEFT JOIN public.experiment_phase_executions shelling_e + ON shelling_e.repetition_id = er.id AND shelling_e.phase_type = 'shelling' + LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id + WHERE e.id = p_experiment_id + GROUP BY e.id, e.experiment_number, ep.name; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER; + +GRANT SELECT ON public.experiments_with_all_reps_and_phases TO authenticated; +GRANT EXECUTE ON FUNCTION public.get_experiment_with_reps_and_phases(UUID) TO authenticated; diff --git a/supabase/migrations/00017_experiment_phase_config_tables.sql b/supabase/migrations/00017_experiment_phase_config_tables.sql new file mode 100644 index 0000000..fc31c93 --- /dev/null +++ b/supabase/migrations/00017_experiment_phase_config_tables.sql @@ -0,0 +1,118 @@ +-- Experiment-level phase config tables +-- One row per experiment per phase; linked by experiment_id. Used when creating an experiment +-- so soaking, airdrying, cracking, and shelling parameters are stored and can be applied to repetitions. + +-- ============================================= +-- 1. EXPERIMENT_SOAKING (template for soaking phase) +-- ============================================= + +CREATE TABLE IF NOT EXISTS public.experiment_soaking ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE, + soaking_duration_hr DOUBLE PRECISION NOT NULL CHECK (soaking_duration_hr >= 0), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL REFERENCES public.user_profiles(id), + CONSTRAINT unique_experiment_soaking_per_experiment UNIQUE (experiment_id) +); + +CREATE INDEX IF NOT EXISTS idx_experiment_soaking_experiment_id ON public.experiment_soaking(experiment_id); +GRANT ALL ON public.experiment_soaking TO authenticated; +ALTER TABLE public.experiment_soaking ENABLE ROW LEVEL SECURITY; +CREATE POLICY "Experiment soaking config is viewable by authenticated" ON public.experiment_soaking FOR SELECT USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment soaking config is insertable by authenticated" ON public.experiment_soaking FOR INSERT WITH CHECK (auth.role() = 'authenticated'); +CREATE POLICY "Experiment soaking config is updatable by authenticated" ON public.experiment_soaking FOR UPDATE USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment soaking config is deletable by authenticated" ON public.experiment_soaking FOR DELETE USING (auth.role() = 'authenticated'); + +CREATE TRIGGER set_updated_at_experiment_soaking + BEFORE UPDATE ON public.experiment_soaking + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- ============================================= +-- 2. EXPERIMENT_AIRDRYING (template for airdrying phase) +-- ============================================= + +CREATE TABLE IF NOT EXISTS public.experiment_airdrying ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE, + duration_minutes INTEGER NOT NULL CHECK (duration_minutes >= 0), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL REFERENCES public.user_profiles(id), + CONSTRAINT unique_experiment_airdrying_per_experiment UNIQUE (experiment_id) +); + +CREATE INDEX IF NOT EXISTS idx_experiment_airdrying_experiment_id ON public.experiment_airdrying(experiment_id); +GRANT ALL ON public.experiment_airdrying TO authenticated; +ALTER TABLE public.experiment_airdrying ENABLE ROW LEVEL SECURITY; +CREATE POLICY "Experiment airdrying config is viewable by authenticated" ON public.experiment_airdrying FOR SELECT USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment airdrying config is insertable by authenticated" ON public.experiment_airdrying FOR INSERT WITH CHECK (auth.role() = 'authenticated'); +CREATE POLICY "Experiment airdrying config is updatable by authenticated" ON public.experiment_airdrying FOR UPDATE USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment airdrying config is deletable by authenticated" ON public.experiment_airdrying FOR DELETE USING (auth.role() = 'authenticated'); + +CREATE TRIGGER set_updated_at_experiment_airdrying + BEFORE UPDATE ON public.experiment_airdrying + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- ============================================= +-- 3. EXPERIMENT_CRACKING (template for cracking; supports JC and Meyer params) +-- ============================================= + +CREATE TABLE IF NOT EXISTS public.experiment_cracking ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE, + machine_type_id UUID NOT NULL REFERENCES public.machine_types(id) ON DELETE RESTRICT, + -- JC Cracker parameters (nullable; used when machine is JC) + plate_contact_frequency_hz DOUBLE PRECISION CHECK (plate_contact_frequency_hz IS NULL OR plate_contact_frequency_hz > 0), + throughput_rate_pecans_sec DOUBLE PRECISION CHECK (throughput_rate_pecans_sec IS NULL OR throughput_rate_pecans_sec > 0), + crush_amount_in DOUBLE PRECISION CHECK (crush_amount_in IS NULL OR crush_amount_in >= 0), + entry_exit_height_diff_in DOUBLE PRECISION, + -- Meyer Cracker parameters (nullable; used when machine is Meyer) + motor_speed_hz DOUBLE PRECISION CHECK (motor_speed_hz IS NULL OR motor_speed_hz > 0), + jig_displacement_inches DOUBLE PRECISION CHECK (jig_displacement_inches IS NULL OR jig_displacement_inches >= 0), + spring_stiffness_nm DOUBLE PRECISION CHECK (spring_stiffness_nm IS NULL OR spring_stiffness_nm > 0), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL REFERENCES public.user_profiles(id), + CONSTRAINT unique_experiment_cracking_per_experiment UNIQUE (experiment_id) +); + +CREATE INDEX IF NOT EXISTS idx_experiment_cracking_experiment_id ON public.experiment_cracking(experiment_id); +CREATE INDEX IF NOT EXISTS idx_experiment_cracking_machine_type_id ON public.experiment_cracking(machine_type_id); +GRANT ALL ON public.experiment_cracking TO authenticated; +ALTER TABLE public.experiment_cracking ENABLE ROW LEVEL SECURITY; +CREATE POLICY "Experiment cracking config is viewable by authenticated" ON public.experiment_cracking FOR SELECT USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment cracking config is insertable by authenticated" ON public.experiment_cracking FOR INSERT WITH CHECK (auth.role() = 'authenticated'); +CREATE POLICY "Experiment cracking config is updatable by authenticated" ON public.experiment_cracking FOR UPDATE USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment cracking config is deletable by authenticated" ON public.experiment_cracking FOR DELETE USING (auth.role() = 'authenticated'); + +CREATE TRIGGER set_updated_at_experiment_cracking + BEFORE UPDATE ON public.experiment_cracking + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); + +-- ============================================= +-- 4. EXPERIMENT_SHELLING (template for shelling phase) +-- ============================================= + +CREATE TABLE IF NOT EXISTS public.experiment_shelling ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE, + ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches IS NULL OR ring_gap_inches > 0), + drum_rpm INTEGER CHECK (drum_rpm IS NULL OR drum_rpm > 0), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_by UUID NOT NULL REFERENCES public.user_profiles(id), + CONSTRAINT unique_experiment_shelling_per_experiment UNIQUE (experiment_id) +); + +CREATE INDEX IF NOT EXISTS idx_experiment_shelling_experiment_id ON public.experiment_shelling(experiment_id); +GRANT ALL ON public.experiment_shelling TO authenticated; +ALTER TABLE public.experiment_shelling ENABLE ROW LEVEL SECURITY; +CREATE POLICY "Experiment shelling config is viewable by authenticated" ON public.experiment_shelling FOR SELECT USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment shelling config is insertable by authenticated" ON public.experiment_shelling FOR INSERT WITH CHECK (auth.role() = 'authenticated'); +CREATE POLICY "Experiment shelling config is updatable by authenticated" ON public.experiment_shelling FOR UPDATE USING (auth.role() = 'authenticated'); +CREATE POLICY "Experiment shelling config is deletable by authenticated" ON public.experiment_shelling FOR DELETE USING (auth.role() = 'authenticated'); + +CREATE TRIGGER set_updated_at_experiment_shelling + BEFORE UPDATE ON public.experiment_shelling + FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at(); diff --git a/supabase/seed_01_users.sql b/supabase/seed_01_users.sql index 2ae1120..50629c3 100755 --- a/supabase/seed_01_users.sql +++ b/supabase/seed_01_users.sql @@ -566,11 +566,11 @@ INSERT INTO public.machine_types (name, description, created_by) VALUES ON CONFLICT (name) DO NOTHING; -- ============================================= --- 5. CREATE EXPERIMENT PHASES +-- 5. CREATE EXPERIMENT BOOKS (table renamed from experiment_phases in migration 00016) -- ============================================= --- Create "Phase 2 of JC Experiments" phase -INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) +-- Create "Phase 2 of JC Experiments" book +INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) SELECT 'Phase 2 of JC Experiments', 'Second phase of JC Cracker experiments for pecan processing optimization', @@ -584,8 +584,8 @@ FROM public.user_profiles up WHERE up.email = 's.alireza.v@gmail.com' ; --- Create "Post Workshop Meyer Experiments" phase -INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) +-- Create "Post Workshop Meyer Experiments" book +INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) SELECT 'Post Workshop Meyer Experiments', 'Post workshop Meyer Cracker experiments for pecan processing optimization', diff --git a/supabase/seed_02_phase2_experiments.sql b/supabase/seed_02_phase2_experiments.sql index 6117e7f..c6cdced 100644 --- a/supabase/seed_02_phase2_experiments.sql +++ b/supabase/seed_02_phase2_experiments.sql @@ -1,7 +1,9 @@ -- ============================================== -- 6. CREATE EXPERIMENTS FOR PHASE 2 -- ============================================== - +-- TEMPORARILY DISABLED (see config.toml sql_paths). When re-enabling, replace +-- all "experiment_phases" with "experiment_books" (table renamed in migration 00016). +-- -- This seed file creates experiments from phase_2_JC_experimental_run_sheet.csv -- Each experiment has 3 repetitions with specific parameters -- Experiment numbers are incremented by 1 (CSV 0-19 becomes DB 1-20)