WIP: integrate-old-refactors-of-github #1
5
.gitignore
vendored
5
.gitignore
vendored
@@ -6,6 +6,7 @@ __pycache__/
|
||||
.uv/
|
||||
*.env
|
||||
.env.*.local
|
||||
.host-ip
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
|
||||
@@ -35,6 +36,10 @@ management-dashboard-web-app/users.txt
|
||||
|
||||
# Jupyter Notebooks
|
||||
*.ipynb
|
||||
supabase/.temp/cli-latest
|
||||
|
||||
# Archive env backups (may contain secrets)
|
||||
archive/management-dashboard-web-app/env-backups/
|
||||
# Nix
|
||||
result
|
||||
result-*
|
||||
|
||||
7
archive/management-dashboard-web-app/README.md
Normal file
7
archive/management-dashboard-web-app/README.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# Archive: management-dashboard-web-app legacy/backup files
|
||||
|
||||
Moved from `management-dashboard-web-app/` so the app directory only contains active code and config.
|
||||
|
||||
- **env-backups/** – Old `.env.backup` and timestamped backup (Supabase URL/key). Keep out of version control if they contain secrets.
|
||||
- **experiment-data/** – CSV run sheets: `phase_2_JC_experimental_run_sheet.csv`, `post_workshop_meyer_experiments.csv`. Source/reference data for experiments.
|
||||
- **test-api-fix.js** – One-off test script for camera config API; not part of the app build.
|
||||
@@ -12,9 +12,16 @@ PROJECT_ROOT="$SCRIPT_DIR"
|
||||
# Change to project root
|
||||
cd "$PROJECT_ROOT" || exit 1
|
||||
|
||||
# Detect host IP
|
||||
HOST_IP=$("$SCRIPT_DIR/scripts/get-host-ip.sh")
|
||||
if [ $? -ne 0 ] || [ -z "$HOST_IP" ] || [ "$HOST_IP" = "127.0.0.1" ]; then
|
||||
# Host IP: use HOST_IP env, then .host-ip file, then auto-detect
|
||||
if [ -z "$HOST_IP" ] || [ "$HOST_IP" = "127.0.0.1" ]; then
|
||||
if [ -f "$PROJECT_ROOT/.host-ip" ]; then
|
||||
HOST_IP=$(cat "$PROJECT_ROOT/.host-ip" | tr -d '\n\r' | awk '{print $1}')
|
||||
fi
|
||||
fi
|
||||
if [ -z "$HOST_IP" ] || [ "$HOST_IP" = "127.0.0.1" ]; then
|
||||
HOST_IP=$("$SCRIPT_DIR/scripts/get-host-ip.sh" 2>/dev/null) || true
|
||||
fi
|
||||
if [ -z "$HOST_IP" ] || [ "$HOST_IP" = "127.0.0.1" ]; then
|
||||
echo "Warning: Could not detect host IP, using localhost" >&2
|
||||
HOST_IP="localhost"
|
||||
fi
|
||||
|
||||
@@ -2,10 +2,10 @@ networks:
|
||||
usda-vision-network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
supabase-db:
|
||||
driver: local
|
||||
supabase-storage:
|
||||
# volumes:
|
||||
# supabase-db:
|
||||
# driver: local
|
||||
# supabase-storage:
|
||||
|
||||
services:
|
||||
# ============================================================================
|
||||
@@ -17,7 +17,7 @@ services:
|
||||
# - Filter by label: docker compose ps --filter "label=com.usda-vision.service=supabase"
|
||||
# - Or use service names: docker compose ps supabase-*
|
||||
#
|
||||
# NOTE: Currently commented out to test Supabase CLI setup from management-dashboard-web-app
|
||||
# NOTE: Supabase CLI and docker-compose use root supabase/
|
||||
|
||||
# # # Supabase Database
|
||||
# # supabase-db:
|
||||
@@ -400,6 +400,8 @@ services:
|
||||
video-remote:
|
||||
container_name: usda-vision-video-remote
|
||||
image: node:20-alpine
|
||||
tty: true
|
||||
stdin_open: true
|
||||
working_dir: /app
|
||||
environment:
|
||||
- CHOKIDAR_USEPOLLING=true
|
||||
@@ -424,6 +426,8 @@ services:
|
||||
vision-system-remote:
|
||||
container_name: usda-vision-vision-system-remote
|
||||
image: node:20-alpine
|
||||
tty: true
|
||||
stdin_open: true
|
||||
working_dir: /app
|
||||
environment:
|
||||
- CHOKIDAR_USEPOLLING=true
|
||||
@@ -447,6 +451,8 @@ services:
|
||||
scheduling-remote:
|
||||
container_name: usda-vision-scheduling-remote
|
||||
image: node:20-alpine
|
||||
tty: true
|
||||
stdin_open: true
|
||||
working_dir: /app
|
||||
env_file:
|
||||
- ./management-dashboard-web-app/.env
|
||||
@@ -466,6 +472,14 @@ services:
|
||||
- "3003:3003"
|
||||
networks:
|
||||
- usda-vision-network
|
||||
develop:
|
||||
watch:
|
||||
- path: ./scheduling-remote
|
||||
action: restart
|
||||
ignore:
|
||||
- node_modules/
|
||||
- dist/
|
||||
- .git/
|
||||
|
||||
media-api:
|
||||
container_name: usda-vision-media-api
|
||||
|
||||
@@ -68,7 +68,7 @@ I've created a migration file that implements a **unified `experiment_phase_exec
|
||||
## Files Created
|
||||
|
||||
1. **`docs/database_design_analysis.md`** - Detailed analysis with comparison matrix
|
||||
2. **`management-dashboard-web-app/supabase/migrations/00012_unified_phase_executions.sql`** - Complete migration implementation
|
||||
2. **`supabase/migrations/00012_unified_phase_executions.sql`** - Complete migration implementation
|
||||
|
||||
## Migration Path
|
||||
|
||||
|
||||
@@ -50,23 +50,18 @@ If you have scripts or documentation that reference the old path, update them:
|
||||
- ❌ `management-dashboard-web-app/supabase/config.toml`
|
||||
- ✅ `supabase/config.toml`
|
||||
|
||||
## Backward Compatibility
|
||||
## Current State
|
||||
|
||||
The old directory (`management-dashboard-web-app/supabase/`) can be kept for reference, but it's no longer used by docker-compose or the Supabase CLI. You can safely remove it after verifying everything works:
|
||||
|
||||
```bash
|
||||
# After verifying everything works with the new location
|
||||
rm -rf management-dashboard-web-app/supabase
|
||||
```
|
||||
The old directory (`management-dashboard-web-app/supabase/`) has been removed. All Supabase and DB configuration, migrations, and seeds now live only under the project root `supabase/` directory. Docker Compose and the Supabase CLI use root `supabase/` exclusively.
|
||||
|
||||
## Verification
|
||||
|
||||
To verify the migration worked:
|
||||
To verify the migration:
|
||||
|
||||
1. **Check docker-compose paths**:
|
||||
1. **Check docker-compose paths** (only root supabase should be referenced):
|
||||
```bash
|
||||
grep -r "supabase" docker-compose.yml
|
||||
# Should show: ./supabase/ (not ./management-dashboard-web-app/supabase/)
|
||||
grep "supabase" docker-compose.yml
|
||||
# Should show only ./supabase/ (no management-dashboard-web-app/supabase/)
|
||||
```
|
||||
|
||||
2. **Test Supabase CLI**:
|
||||
|
||||
303
docs/database_entities.md
Normal file
303
docs/database_entities.md
Normal file
@@ -0,0 +1,303 @@
|
||||
# Database Entities Documentation
|
||||
|
||||
This document describes the core entities in the USDA Vision database schema, focusing on entity-specific attributes (excluding generic fields like `id`, `created_at`, `updated_at`, `created_by`).
|
||||
|
||||
## Entity Relationships Overview
|
||||
|
||||
```
|
||||
Experiment Phase (Template)
|
||||
↓
|
||||
Experiment
|
||||
↓
|
||||
Experiment Repetition
|
||||
↓
|
||||
Experiment Phase Execution (Soaking, Airdrying, Cracking, Shelling)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 1. Experiment Phase
|
||||
|
||||
**Table:** `experiment_phases`
|
||||
|
||||
**Purpose:** Defines a template/blueprint for experiments that specifies which processing phases are included and their configuration.
|
||||
|
||||
### Attributes
|
||||
|
||||
- **name** (TEXT, UNIQUE, NOT NULL)
|
||||
- Unique name identifying the experiment phase template
|
||||
- Example: "Phase 2 - Standard Processing"
|
||||
|
||||
- **description** (TEXT, nullable)
|
||||
- Optional description providing details about the experiment phase
|
||||
|
||||
- **has_soaking** (BOOLEAN, NOT NULL, DEFAULT false)
|
||||
- Indicates whether soaking phase is included in experiments using this template
|
||||
|
||||
- **has_airdrying** (BOOLEAN, NOT NULL, DEFAULT false)
|
||||
- Indicates whether airdrying phase is included in experiments using this template
|
||||
|
||||
- **has_cracking** (BOOLEAN, NOT NULL, DEFAULT false)
|
||||
- Indicates whether cracking phase is included in experiments using this template
|
||||
|
||||
- **has_shelling** (BOOLEAN, NOT NULL, DEFAULT false)
|
||||
- Indicates whether shelling phase is included in experiments using this template
|
||||
|
||||
- **cracking_machine_type_id** (UUID, nullable)
|
||||
- References the machine type to be used for cracking (required if `has_cracking` is true)
|
||||
- Links to `machine_types` table
|
||||
|
||||
### Constraints
|
||||
|
||||
- At least one phase (soaking, airdrying, cracking, or shelling) must be enabled
|
||||
- If `has_cracking` is true, `cracking_machine_type_id` must be provided
|
||||
|
||||
---
|
||||
|
||||
## 2. Experiment
|
||||
|
||||
**Table:** `experiments`
|
||||
|
||||
**Purpose:** Defines an experiment blueprint that specifies the parameters and requirements for conducting pecan processing experiments.
|
||||
|
||||
### Attributes
|
||||
|
||||
- **experiment_number** (INTEGER, NOT NULL)
|
||||
- Unique number identifying the experiment
|
||||
- Combined with `phase_id` must be unique
|
||||
|
||||
- **reps_required** (INTEGER, NOT NULL, CHECK > 0)
|
||||
- Number of repetitions required for this experiment
|
||||
- Must be greater than zero
|
||||
|
||||
- **weight_per_repetition_lbs** (DOUBLE PRECISION, NOT NULL, DEFAULT 5.0, CHECK > 0)
|
||||
- Weight in pounds required for each repetition of the experiment
|
||||
|
||||
- **results_status** (TEXT, NOT NULL, DEFAULT 'valid', CHECK IN ('valid', 'invalid'))
|
||||
- Status indicating whether the experiment results are considered valid or invalid
|
||||
|
||||
- **completion_status** (BOOLEAN, NOT NULL, DEFAULT false)
|
||||
- Indicates whether the experiment has been completed
|
||||
|
||||
- **phase_id** (UUID, NOT NULL)
|
||||
- References the experiment phase template this experiment belongs to
|
||||
- Links to `experiment_phases` table
|
||||
|
||||
### Constraints
|
||||
|
||||
- Combination of `experiment_number` and `phase_id` must be unique
|
||||
|
||||
---
|
||||
|
||||
## 3. Experiment Repetition
|
||||
|
||||
**Table:** `experiment_repetitions`
|
||||
|
||||
**Purpose:** Represents a single execution instance of an experiment that can be scheduled and tracked.
|
||||
|
||||
### Attributes
|
||||
|
||||
- **experiment_id** (UUID, NOT NULL)
|
||||
- References the parent experiment blueprint
|
||||
- Links to `experiments` table
|
||||
|
||||
- **repetition_number** (INTEGER, NOT NULL, CHECK > 0)
|
||||
- Sequential number identifying this repetition within the experiment
|
||||
- Must be unique per experiment
|
||||
|
||||
- **scheduled_date** (TIMESTAMP WITH TIME ZONE, nullable)
|
||||
- Date and time when the repetition is scheduled to be executed
|
||||
|
||||
- **status** (TEXT, NOT NULL, DEFAULT 'pending', CHECK IN ('pending', 'in_progress', 'completed', 'cancelled'))
|
||||
- Current status of the repetition execution
|
||||
- Values: `pending`, `in_progress`, `completed`, `cancelled`
|
||||
|
||||
### Constraints
|
||||
|
||||
- Combination of `experiment_id` and `repetition_number` must be unique
|
||||
|
||||
---
|
||||
|
||||
## 4. Experiment Phase Executions
|
||||
|
||||
**Table:** `experiment_phase_executions`
|
||||
|
||||
**Purpose:** Unified table storing execution data for all phase types (soaking, airdrying, cracking, shelling) associated with experiment repetitions.
|
||||
|
||||
### Common Attributes (All Phase Types)
|
||||
|
||||
- **repetition_id** (UUID, NOT NULL)
|
||||
- References the experiment repetition this phase execution belongs to
|
||||
- Links to `experiment_repetitions` table
|
||||
|
||||
- **phase_type** (TEXT, NOT NULL, CHECK IN ('soaking', 'airdrying', 'cracking', 'shelling'))
|
||||
- Type of phase being executed
|
||||
- Must be one of: `soaking`, `airdrying`, `cracking`, `shelling`
|
||||
|
||||
- **scheduled_start_time** (TIMESTAMP WITH TIME ZONE, NOT NULL)
|
||||
- Planned start time for the phase execution
|
||||
|
||||
- **scheduled_end_time** (TIMESTAMP WITH TIME ZONE, nullable)
|
||||
- Planned end time for the phase execution
|
||||
- Automatically calculated for soaking and airdrying based on duration
|
||||
|
||||
- **actual_start_time** (TIMESTAMP WITH TIME ZONE, nullable)
|
||||
- Actual time when the phase execution started
|
||||
|
||||
- **actual_end_time** (TIMESTAMP WITH TIME ZONE, nullable)
|
||||
- Actual time when the phase execution ended
|
||||
|
||||
- **status** (TEXT, NOT NULL, DEFAULT 'pending', CHECK IN ('pending', 'scheduled', 'in_progress', 'completed', 'cancelled'))
|
||||
- Current status of the phase execution
|
||||
|
||||
### Phase-Specific Concepts: Independent & Dependent Variables
|
||||
|
||||
> **Note:** This section describes the conceptual variables for each phase (what we measure or control), not necessarily the current physical columns in the database. Some of these variables will be added to the schema in future migrations.
|
||||
|
||||
#### Soaking Phase
|
||||
|
||||
- **Independent variables (IV)**
|
||||
- **Pre-soaking shell moisture percentage**
|
||||
- Moisture percentage of the shell **before soaking**.
|
||||
- **Pre-soaking kernel moisture percentage**
|
||||
- Moisture percentage of the kernel **before soaking**.
|
||||
- **Average pecan diameter (inches)**
|
||||
- Average diameter of pecans in the batch, measured in inches.
|
||||
- **Batch weight**
|
||||
- Total weight of the batch being soaked.
|
||||
- **Soaking duration (minutes)**
|
||||
- Duration of the soaking process in minutes (currently represented as `soaking_duration_minutes`).
|
||||
|
||||
- **Dependent variables (DV)**
|
||||
- **Post-soaking shell moisture percentage**
|
||||
- Moisture percentage of the shell **after soaking**.
|
||||
- **Post-soaking kernel moisture percentage**
|
||||
- Moisture percentage of the kernel **after soaking**.
|
||||
|
||||
#### Airdrying Phase
|
||||
|
||||
- **Independent variables (IV)**
|
||||
- **Airdrying duration (minutes)**
|
||||
- Duration of the airdrying process in minutes (currently represented as `duration_minutes`).
|
||||
|
||||
- **Dependent variables (DV)**
|
||||
- *(TBD — moisture/other measurements after airdrying can be added here when finalized.)*
|
||||
|
||||
#### Cracking Phase
|
||||
|
||||
- **Independent variables (IV)**
|
||||
- **Cracking machine type**
|
||||
- The type of cracking machine used (linked via `machine_type_id` and `experiment_phases.cracking_machine_type_id`).
|
||||
|
||||
- **Dependent variables (DV)**
|
||||
- *None defined yet for cracking.*
|
||||
Business/analysis metrics for cracking can be added later (e.g., crack quality, breakage rates).
|
||||
|
||||
#### Shelling Phase
|
||||
|
||||
- **Independent variables (IV)**
|
||||
- **Shelling machine configuration parameters** (not yet present in the DB schema)
|
||||
- **Ring gap (inches)**
|
||||
- Radial gap setting of the shelling ring (e.g., `0.34` inches).
|
||||
- **Paddle RPM**
|
||||
- Rotational speed of the paddles (integer RPM value).
|
||||
- **Third machine parameter (TBD)**
|
||||
- The shelling machine expects a third configuration parameter that will be defined and added to the schema later.
|
||||
|
||||
- **Dependent variables (DV)**
|
||||
- **Half-yield ratio (percentage)**
|
||||
- Percentage of the shelled product that is composed of half kernels, relative to total yield.
|
||||
|
||||
### Constraints
|
||||
|
||||
- Combination of `repetition_id` and `phase_type` must be unique (one execution per phase type per repetition)
|
||||
|
||||
### Notes
|
||||
|
||||
- Phase executions are automatically created when an experiment repetition is created, based on the experiment phase template configuration
|
||||
- Sequential phases (soaking → airdrying → cracking → shelling) automatically calculate their `scheduled_start_time` based on the previous phase's `scheduled_end_time`
|
||||
- For soaking and airdrying phases, `scheduled_end_time` is automatically calculated from `scheduled_start_time` + duration
|
||||
|
||||
---
|
||||
|
||||
## 5. Machine Types
|
||||
|
||||
**Table:** `machine_types`
|
||||
|
||||
**Purpose:** Defines the types of machines available for cracking operations.
|
||||
|
||||
### Attributes
|
||||
|
||||
- **name** (TEXT, UNIQUE, NOT NULL)
|
||||
- Unique name identifying the machine type
|
||||
- Example: "JC Cracker", "Meyer Cracker"
|
||||
|
||||
- **description** (TEXT, nullable)
|
||||
- Optional description of the machine type
|
||||
|
||||
### Related Tables
|
||||
|
||||
Machine types are referenced by:
|
||||
- `experiment_phases.cracking_machine_type_id` - Defines which machine type to use for cracking in an experiment phase template
|
||||
- `experiment_phase_executions.machine_type_id` - Specifies which machine was used for a specific cracking execution
|
||||
|
||||
---
|
||||
|
||||
## 6. Cracker Parameters (Machine-Specific)
|
||||
|
||||
### JC Cracker Parameters
|
||||
|
||||
**Table:** `jc_cracker_parameters`
|
||||
|
||||
**Purpose:** Stores parameters specific to JC Cracker machines.
|
||||
|
||||
#### Attributes
|
||||
|
||||
- **plate_contact_frequency_hz** (DOUBLE PRECISION, NOT NULL, CHECK > 0)
|
||||
- Frequency of plate contact in Hertz
|
||||
|
||||
- **throughput_rate_pecans_sec** (DOUBLE PRECISION, NOT NULL, CHECK > 0)
|
||||
- Rate of pecan processing in pecans per second
|
||||
|
||||
- **crush_amount_in** (DOUBLE PRECISION, NOT NULL, CHECK >= 0)
|
||||
- Amount of crushing in inches
|
||||
|
||||
- **entry_exit_height_diff_in** (DOUBLE PRECISION, NOT NULL)
|
||||
- Difference in height between entry and exit points in inches
|
||||
|
||||
### Meyer Cracker Parameters
|
||||
|
||||
**Table:** `meyer_cracker_parameters`
|
||||
|
||||
**Purpose:** Stores parameters specific to Meyer Cracker machines.
|
||||
|
||||
#### Attributes
|
||||
|
||||
- **motor_speed_hz** (DOUBLE PRECISION, NOT NULL, CHECK > 0)
|
||||
- Motor speed in Hertz
|
||||
|
||||
- **jig_displacement_inches** (DOUBLE PRECISION, NOT NULL)
|
||||
- Jig displacement in inches
|
||||
|
||||
- **spring_stiffness_nm** (DOUBLE PRECISION, NOT NULL, CHECK > 0)
|
||||
- Spring stiffness in Newton-meters
|
||||
|
||||
---
|
||||
|
||||
## Summary of Entity Relationships
|
||||
|
||||
1. **Experiment Phase** → Defines which phases are included and machine type for cracking
|
||||
2. **Experiment** → Belongs to an Experiment Phase, defines repetition requirements and weight per repetition
|
||||
3. **Experiment Repetition** → Instance of an Experiment, can be scheduled and tracked
|
||||
4. **Experiment Phase Execution** → Execution record for each phase (soaking, airdrying, cracking, shelling) within a repetition
|
||||
5. **Machine Types** → Defines available cracking machines
|
||||
6. **Cracker Parameters** → Machine-specific operational parameters (JC or Meyer)
|
||||
|
||||
### Key Relationships
|
||||
|
||||
- One Experiment Phase can have many Experiments
|
||||
- One Experiment can have many Experiment Repetitions
|
||||
- One Experiment Repetition can have multiple Phase Executions (one per phase type)
|
||||
- Phase Executions are automatically created based on the Experiment Phase template configuration
|
||||
- Cracking Phase Executions reference a Machine Type
|
||||
- Machine Types can have associated Cracker Parameters (JC or Meyer specific)
|
||||
@@ -1,4 +0,0 @@
|
||||
# Local Supabase config for Vite dev server
|
||||
VITE_SUPABASE_URL=http://127.0.0.1:54321
|
||||
VITE_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Local Supabase config for Vite dev server
|
||||
VITE_SUPABASE_URL=http://exp-dash:54321
|
||||
VITE_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0
|
||||
|
||||
|
||||
# Vision API Configuration
|
||||
VITE_VISION_API_URL=http://exp-dash:8000
|
||||
VITE_ENABLE_VIDEO_MODULE=true
|
||||
VITE_VIDEO_REMOTE_URL=http://exp-dash:3001/assets/remoteEntry.js?v=1761849082
|
||||
VITE_MEDIA_API_URL=http://exp-dash:8090
|
||||
|
||||
# Vision System Module
|
||||
VITE_ENABLE_VISION_SYSTEM_MODULE=true
|
||||
VITE_VISION_SYSTEM_REMOTE_URL=http://exp-dash:3002/assets/remoteEntry.js
|
||||
|
||||
# Enable scheduling module
|
||||
VITE_ENABLE_SCHEDULING_MODULE=true
|
||||
@@ -3,7 +3,7 @@ import type { CreateExperimentRequest, UpdateExperimentRequest, ScheduleStatus,
|
||||
import { experimentPhaseManagement, machineTypeManagement } from '../lib/supabase'
|
||||
|
||||
interface ExperimentFormProps {
|
||||
initialData?: Partial<CreateExperimentRequest & { schedule_status: ScheduleStatus; results_status: ResultsStatus; completion_status: boolean }>
|
||||
initialData?: Partial<CreateExperimentRequest & { schedule_status: ScheduleStatus; results_status: ResultsStatus; completion_status: boolean }> & { phase_id?: string | null }
|
||||
onSubmit: (data: CreateExperimentRequest | UpdateExperimentRequest) => Promise<void>
|
||||
onCancel: () => void
|
||||
isEditing?: boolean
|
||||
@@ -12,31 +12,41 @@ interface ExperimentFormProps {
|
||||
}
|
||||
|
||||
export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = false, loading = false, phaseId }: ExperimentFormProps) {
|
||||
const [formData, setFormData] = useState<CreateExperimentRequest & { schedule_status: ScheduleStatus; results_status: ResultsStatus; completion_status: boolean }>({
|
||||
experiment_number: initialData?.experiment_number || 0,
|
||||
reps_required: initialData?.reps_required || 1,
|
||||
weight_per_repetition_lbs: (initialData as any)?.weight_per_repetition_lbs || 1,
|
||||
soaking_duration_hr: initialData?.soaking_duration_hr || 0,
|
||||
air_drying_time_min: initialData?.air_drying_time_min || 0,
|
||||
plate_contact_frequency_hz: initialData?.plate_contact_frequency_hz || 1,
|
||||
throughput_rate_pecans_sec: initialData?.throughput_rate_pecans_sec || 1,
|
||||
crush_amount_in: initialData?.crush_amount_in || 0,
|
||||
entry_exit_height_diff_in: initialData?.entry_exit_height_diff_in || 0,
|
||||
// Meyer-specific (UI only)
|
||||
motor_speed_hz: (initialData as any)?.motor_speed_hz || 1,
|
||||
jig_displacement_inches: (initialData as any)?.jig_displacement_inches || 0,
|
||||
spring_stiffness_nm: (initialData as any)?.spring_stiffness_nm || 1,
|
||||
schedule_status: initialData?.schedule_status || 'pending schedule',
|
||||
results_status: initialData?.results_status || 'valid',
|
||||
completion_status: initialData?.completion_status || false,
|
||||
phase_id: initialData?.phase_id || phaseId
|
||||
const getInitialFormState = (d: any) => ({
|
||||
experiment_number: d?.experiment_number ?? 0,
|
||||
reps_required: d?.reps_required ?? 1,
|
||||
weight_per_repetition_lbs: d?.weight_per_repetition_lbs ?? 1,
|
||||
soaking_duration_hr: d?.soaking?.soaking_duration_hr ?? d?.soaking_duration_hr ?? 0,
|
||||
air_drying_time_min: d?.airdrying?.duration_minutes ?? d?.air_drying_time_min ?? 0,
|
||||
plate_contact_frequency_hz: d?.cracking?.plate_contact_frequency_hz ?? d?.plate_contact_frequency_hz ?? 1,
|
||||
throughput_rate_pecans_sec: d?.cracking?.throughput_rate_pecans_sec ?? d?.throughput_rate_pecans_sec ?? 1,
|
||||
crush_amount_in: d?.cracking?.crush_amount_in ?? d?.crush_amount_in ?? 0,
|
||||
entry_exit_height_diff_in: d?.cracking?.entry_exit_height_diff_in ?? d?.entry_exit_height_diff_in ?? 0,
|
||||
motor_speed_hz: d?.cracking?.motor_speed_hz ?? d?.motor_speed_hz ?? 1,
|
||||
jig_displacement_inches: d?.cracking?.jig_displacement_inches ?? d?.jig_displacement_inches ?? 0,
|
||||
spring_stiffness_nm: d?.cracking?.spring_stiffness_nm ?? d?.spring_stiffness_nm ?? 1,
|
||||
schedule_status: d?.schedule_status ?? 'pending schedule',
|
||||
results_status: d?.results_status ?? 'valid',
|
||||
completion_status: d?.completion_status ?? false,
|
||||
phase_id: d?.phase_id ?? phaseId,
|
||||
ring_gap_inches: d?.shelling?.ring_gap_inches ?? d?.ring_gap_inches ?? null,
|
||||
drum_rpm: d?.shelling?.drum_rpm ?? d?.drum_rpm ?? null
|
||||
})
|
||||
|
||||
const [formData, setFormData] = useState<CreateExperimentRequest & { schedule_status: ScheduleStatus; results_status: ResultsStatus; completion_status: boolean }>(() => getInitialFormState(initialData))
|
||||
|
||||
const [errors, setErrors] = useState<Record<string, string>>({})
|
||||
const [phase, setPhase] = useState<ExperimentPhase | null>(null)
|
||||
const [crackingMachine, setCrackingMachine] = useState<MachineType | null>(null)
|
||||
const [metaLoading, setMetaLoading] = useState<boolean>(false)
|
||||
|
||||
// When initialData loads with phase config (edit mode), sync form state
|
||||
useEffect(() => {
|
||||
if ((initialData as any)?.id) {
|
||||
setFormData(prev => ({ ...prev, ...getInitialFormState(initialData) }))
|
||||
}
|
||||
}, [initialData])
|
||||
|
||||
useEffect(() => {
|
||||
const loadMeta = async () => {
|
||||
if (!phaseId) return
|
||||
@@ -76,11 +86,11 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
}
|
||||
|
||||
|
||||
if (formData.soaking_duration_hr < 0) {
|
||||
if ((formData.soaking_duration_hr ?? 0) < 0) {
|
||||
newErrors.soaking_duration_hr = 'Soaking duration cannot be negative'
|
||||
}
|
||||
|
||||
if (formData.air_drying_time_min < 0) {
|
||||
if ((formData.air_drying_time_min ?? 0) < 0) {
|
||||
newErrors.air_drying_time_min = 'Air drying time cannot be negative'
|
||||
}
|
||||
|
||||
@@ -93,7 +103,7 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
if (!formData.throughput_rate_pecans_sec || formData.throughput_rate_pecans_sec <= 0) {
|
||||
newErrors.throughput_rate_pecans_sec = 'Throughput rate must be positive'
|
||||
}
|
||||
if (formData.crush_amount_in < 0) {
|
||||
if ((formData.crush_amount_in ?? 0) < 0) {
|
||||
newErrors.crush_amount_in = 'Crush amount cannot be negative'
|
||||
}
|
||||
}
|
||||
@@ -110,6 +120,16 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
}
|
||||
}
|
||||
|
||||
// Shelling: if provided, must be positive
|
||||
if (phase?.has_shelling) {
|
||||
if (formData.ring_gap_inches != null && (typeof formData.ring_gap_inches !== 'number' || formData.ring_gap_inches <= 0)) {
|
||||
newErrors.ring_gap_inches = 'Ring gap must be positive'
|
||||
}
|
||||
if (formData.drum_rpm != null && (typeof formData.drum_rpm !== 'number' || formData.drum_rpm <= 0)) {
|
||||
newErrors.drum_rpm = 'Drum RPM must be positive'
|
||||
}
|
||||
}
|
||||
|
||||
setErrors(newErrors)
|
||||
return Object.keys(newErrors).length === 0
|
||||
}
|
||||
@@ -122,14 +142,25 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
}
|
||||
|
||||
try {
|
||||
// Prepare data for submission
|
||||
// Prepare data: include all phase params so they are stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling
|
||||
const submitData = isEditing ? formData : {
|
||||
experiment_number: formData.experiment_number,
|
||||
reps_required: formData.reps_required,
|
||||
weight_per_repetition_lbs: formData.weight_per_repetition_lbs,
|
||||
results_status: formData.results_status,
|
||||
completion_status: formData.completion_status,
|
||||
phase_id: formData.phase_id
|
||||
phase_id: formData.phase_id,
|
||||
soaking_duration_hr: formData.soaking_duration_hr,
|
||||
air_drying_time_min: formData.air_drying_time_min,
|
||||
plate_contact_frequency_hz: formData.plate_contact_frequency_hz,
|
||||
throughput_rate_pecans_sec: formData.throughput_rate_pecans_sec,
|
||||
crush_amount_in: formData.crush_amount_in,
|
||||
entry_exit_height_diff_in: formData.entry_exit_height_diff_in,
|
||||
motor_speed_hz: (formData as any).motor_speed_hz,
|
||||
jig_displacement_inches: (formData as any).jig_displacement_inches,
|
||||
spring_stiffness_nm: (formData as any).spring_stiffness_nm,
|
||||
ring_gap_inches: formData.ring_gap_inches ?? undefined,
|
||||
drum_rpm: formData.drum_rpm ?? undefined
|
||||
}
|
||||
|
||||
await onSubmit(submitData)
|
||||
@@ -138,7 +169,7 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
}
|
||||
}
|
||||
|
||||
const handleInputChange = (field: keyof typeof formData, value: string | number | boolean) => {
|
||||
const handleInputChange = (field: keyof typeof formData, value: string | number | boolean | null | undefined) => {
|
||||
setFormData(prev => ({
|
||||
...prev,
|
||||
[field]: value
|
||||
@@ -441,18 +472,40 @@ export function ExperimentForm({ initialData, onSubmit, onCancel, isEditing = fa
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">Shelling</h3>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-2">
|
||||
Shelling Start Offset (minutes)
|
||||
<label htmlFor="ring_gap_inches" className="block text-sm font-medium text-gray-700 mb-2">
|
||||
Ring gap (inches)
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
value={(formData as any).shelling_start_offset_min || 0}
|
||||
onChange={(e) => handleInputChange('shelling_start_offset_min' as any, parseInt(e.target.value) || 0)}
|
||||
className="max-w-xs px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-blue-500 transition-colors text-sm"
|
||||
placeholder="0"
|
||||
id="ring_gap_inches"
|
||||
value={formData.ring_gap_inches ?? ''}
|
||||
onChange={(e) => handleInputChange('ring_gap_inches' as any, e.target.value === '' ? null : parseFloat(e.target.value))}
|
||||
className={`max-w-xs px-3 py-2 border rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-blue-500 transition-colors text-sm ${errors.ring_gap_inches ? 'border-red-300' : 'border-gray-300'}`}
|
||||
placeholder="e.g. 0.25"
|
||||
min="0"
|
||||
step="0.01"
|
||||
/>
|
||||
{errors.ring_gap_inches && (
|
||||
<p className="mt-1 text-sm text-red-600">{errors.ring_gap_inches}</p>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="drum_rpm" className="block text-sm font-medium text-gray-700 mb-2">
|
||||
Drum RPM
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
id="drum_rpm"
|
||||
value={formData.drum_rpm ?? ''}
|
||||
onChange={(e) => handleInputChange('drum_rpm' as any, e.target.value === '' ? null : parseInt(e.target.value, 10))}
|
||||
className={`max-w-xs px-3 py-2 border rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-blue-500 transition-colors text-sm ${errors.drum_rpm ? 'border-red-300' : 'border-gray-300'}`}
|
||||
placeholder="e.g. 300"
|
||||
min="1"
|
||||
step="1"
|
||||
/>
|
||||
{errors.drum_rpm && (
|
||||
<p className="mt-1 text-sm text-red-600">{errors.drum_rpm}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useState } from 'react'
|
||||
import { useState, useEffect } from 'react'
|
||||
import { ExperimentForm } from './ExperimentForm'
|
||||
import { experimentManagement } from '../lib/supabase'
|
||||
import type { Experiment, CreateExperimentRequest, UpdateExperimentRequest } from '../lib/supabase'
|
||||
@@ -13,9 +13,20 @@ interface ExperimentModalProps {
|
||||
export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseId }: ExperimentModalProps) {
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [initialData, setInitialData] = useState<Experiment | (Experiment & { soaking?: any; airdrying?: any; cracking?: any; shelling?: any }) | undefined>(experiment ?? undefined)
|
||||
|
||||
const isEditing = !!experiment
|
||||
|
||||
useEffect(() => {
|
||||
if (experiment) {
|
||||
experimentManagement.getExperimentWithPhaseConfig(experiment.id)
|
||||
.then((data) => setInitialData(data ?? experiment))
|
||||
.catch(() => setInitialData(experiment))
|
||||
} else {
|
||||
setInitialData(undefined)
|
||||
}
|
||||
}, [experiment?.id])
|
||||
|
||||
const handleSubmit = async (data: CreateExperimentRequest | UpdateExperimentRequest) => {
|
||||
setError(null)
|
||||
setLoading(true)
|
||||
@@ -24,22 +35,24 @@ export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseI
|
||||
let savedExperiment: Experiment
|
||||
|
||||
if (isEditing && experiment) {
|
||||
// Check if experiment number is unique (excluding current experiment)
|
||||
// Check if experiment number is unique within this phase (excluding current experiment)
|
||||
if ('experiment_number' in data && data.experiment_number !== undefined && data.experiment_number !== experiment.experiment_number) {
|
||||
const isUnique = await experimentManagement.isExperimentNumberUnique(data.experiment_number, experiment.id)
|
||||
const phaseIdToCheck = data.phase_id ?? experiment.phase_id ?? phaseId
|
||||
const isUnique = await experimentManagement.isExperimentNumberUnique(data.experiment_number, phaseIdToCheck ?? undefined, experiment.id)
|
||||
if (!isUnique) {
|
||||
setError('Experiment number already exists. Please choose a different number.')
|
||||
setError('Experiment number already exists in this phase. Please choose a different number.')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
savedExperiment = await experimentManagement.updateExperiment(experiment.id, data)
|
||||
} else {
|
||||
// Check if experiment number is unique for new experiments
|
||||
// Check if experiment number is unique within this phase for new experiments
|
||||
const createData = data as CreateExperimentRequest
|
||||
const isUnique = await experimentManagement.isExperimentNumberUnique(createData.experiment_number)
|
||||
const phaseIdToCheck = createData.phase_id ?? phaseId
|
||||
const isUnique = await experimentManagement.isExperimentNumberUnique(createData.experiment_number, phaseIdToCheck ?? undefined)
|
||||
if (!isUnique) {
|
||||
setError('Experiment number already exists. Please choose a different number.')
|
||||
setError('Experiment number already exists in this phase. Please choose a different number.')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -115,7 +128,7 @@ export function ExperimentModal({ experiment, onClose, onExperimentSaved, phaseI
|
||||
|
||||
{/* Form */}
|
||||
<ExperimentForm
|
||||
initialData={experiment}
|
||||
initialData={initialData ? { ...initialData, phase_id: initialData.phase_id ?? undefined } : undefined}
|
||||
onSubmit={handleSubmit}
|
||||
onCancel={handleCancel}
|
||||
isEditing={isEditing}
|
||||
|
||||
@@ -31,8 +31,8 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) {
|
||||
setPhases(phasesData)
|
||||
setCurrentUser(userData)
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Failed to load experiment phases')
|
||||
console.error('Load experiment phases error:', err)
|
||||
setError(err.message || 'Failed to load experiment books')
|
||||
console.error('Load experiment books error:', err)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
@@ -61,16 +61,16 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) {
|
||||
<div className="mb-8">
|
||||
<div className="flex justify-between items-center">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white">Experiment Phases</h1>
|
||||
<p className="mt-2 text-gray-600 dark:text-gray-400">Select an experiment phase to view and manage its experiments</p>
|
||||
<p className="mt-2 text-gray-600 dark:text-gray-400">Experiment phases help organize experiments into logical groups for easier navigation and management.</p>
|
||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white">Experiment Books</h1>
|
||||
<p className="mt-2 text-gray-600 dark:text-gray-400">Select an experiment book to view and manage its experiments</p>
|
||||
<p className="mt-2 text-gray-600 dark:text-gray-400">Experiment books help organize experiments into logical groups for easier navigation and management.</p>
|
||||
</div>
|
||||
{canManagePhases && (
|
||||
<button
|
||||
onClick={() => setShowCreateModal(true)}
|
||||
className="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500"
|
||||
>
|
||||
➕ New Phase
|
||||
➕ New Book
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
@@ -162,9 +162,9 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) {
|
||||
<svg className="mx-auto h-12 w-12 text-gray-400 dark:text-gray-500" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19.428 15.428a2 2 0 00-1.022-.547l-2.387-.477a6 6 0 00-3.86.517l-.318.158a6 6 0 01-3.86.517L6.05 15.21a2 2 0 00-1.806.547M8 4h8l-1 1v5.172a2 2 0 00.586 1.414l5 5c1.26 1.26.367 3.414-1.415 3.414H4.828c-1.782 0-2.674-2.154-1.414-3.414l5-5A2 2 0 009 10.172V5L8 4z" />
|
||||
</svg>
|
||||
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">No experiment phases found</h3>
|
||||
<h3 className="mt-2 text-sm font-medium text-gray-900 dark:text-white">No experiment books found</h3>
|
||||
<p className="mt-1 text-sm text-gray-500 dark:text-gray-400">
|
||||
Get started by creating your first experiment phase.
|
||||
Get started by creating your first experiment book.
|
||||
</p>
|
||||
{canManagePhases && (
|
||||
<div className="mt-6">
|
||||
@@ -172,7 +172,7 @@ export function ExperimentPhases({ onPhaseSelect }: ExperimentPhasesProps) {
|
||||
onClick={() => setShowCreateModal(true)}
|
||||
className="inline-flex items-center px-4 py-2 border border-transparent shadow-sm text-sm font-medium rounded-md text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500"
|
||||
>
|
||||
➕ Create First Phase
|
||||
➕ Create First Book
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -193,7 +193,7 @@ export function PhaseExperiments({ phase, onBack }: PhaseExperimentsProps) {
|
||||
<svg className="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 19l-7-7 7-7" />
|
||||
</svg>
|
||||
Back to Phases
|
||||
Back to Books
|
||||
</button>
|
||||
</div>
|
||||
|
||||
@@ -203,7 +203,7 @@ export function PhaseExperiments({ phase, onBack }: PhaseExperimentsProps) {
|
||||
{phase.description && (
|
||||
<p className="mt-2 text-gray-600">{phase.description}</p>
|
||||
)}
|
||||
<p className="mt-2 text-gray-600">Manage experiments within this phase</p>
|
||||
<p className="mt-2 text-gray-600">Manage experiments within this book</p>
|
||||
</div>
|
||||
{canManageExperiments && (
|
||||
<button
|
||||
@@ -417,9 +417,9 @@ export function PhaseExperiments({ phase, onBack }: PhaseExperimentsProps) {
|
||||
<svg className="mx-auto h-12 w-12 text-gray-400" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 5H7a2 2 0 00-2 2v10a2 2 0 002 2h8a2 2 0 002-2V7a2 2 0 00-2-2H5a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||
</svg>
|
||||
<h3 className="mt-2 text-sm font-medium text-gray-900">No experiments found in this phase</h3>
|
||||
<h3 className="mt-2 text-sm font-medium text-gray-900">No experiments found in this book</h3>
|
||||
<p className="mt-1 text-sm text-gray-500">
|
||||
Get started by creating your first experiment in this phase.
|
||||
Get started by creating your first experiment in this book.
|
||||
</p>
|
||||
{canManageExperiments && (
|
||||
<div className="mt-6">
|
||||
|
||||
@@ -147,7 +147,7 @@ export function PhaseForm({ onSubmit, onCancel, loading = false }: PhaseFormProp
|
||||
onChange={(e) => handleInputChange('description', e.target.value)}
|
||||
rows={3}
|
||||
className="w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
placeholder="Optional description of this experiment phase"
|
||||
placeholder="Optional description of this experiment book"
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -21,7 +21,7 @@ export function PhaseModal({ onClose, onPhaseCreated }: PhaseModalProps) {
|
||||
onPhaseCreated(newPhase)
|
||||
onClose()
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Failed to create experiment phase')
|
||||
setError(err.message || 'Failed to create experiment book')
|
||||
console.error('Create phase error:', err)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
@@ -35,7 +35,7 @@ export function PhaseModal({ onClose, onPhaseCreated }: PhaseModalProps) {
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between mb-6">
|
||||
<h3 className="text-lg font-medium text-gray-900">
|
||||
Create New Experiment Phase
|
||||
Create New Experiment Book
|
||||
</h3>
|
||||
<button
|
||||
onClick={onClose}
|
||||
|
||||
@@ -60,6 +60,8 @@ export interface Experiment {
|
||||
airdrying_id?: string | null
|
||||
cracking_id?: string | null
|
||||
shelling_id?: string | null
|
||||
ring_gap_inches?: number | null
|
||||
drum_rpm?: number | null
|
||||
created_at: string
|
||||
updated_at: string
|
||||
created_by: string
|
||||
@@ -170,6 +172,51 @@ export interface UpdateExperimentPhaseRequest {
|
||||
has_shelling?: boolean
|
||||
}
|
||||
|
||||
// Experiment-level phase config (one row per experiment per phase; stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling)
|
||||
export interface ExperimentSoakingConfig {
|
||||
id: string
|
||||
experiment_id: string
|
||||
soaking_duration_hr: number
|
||||
created_at: string
|
||||
updated_at: string
|
||||
created_by: string
|
||||
}
|
||||
|
||||
export interface ExperimentAirdryingConfig {
|
||||
id: string
|
||||
experiment_id: string
|
||||
duration_minutes: number
|
||||
created_at: string
|
||||
updated_at: string
|
||||
created_by: string
|
||||
}
|
||||
|
||||
export interface ExperimentCrackingConfig {
|
||||
id: string
|
||||
experiment_id: string
|
||||
machine_type_id: string
|
||||
plate_contact_frequency_hz?: number | null
|
||||
throughput_rate_pecans_sec?: number | null
|
||||
crush_amount_in?: number | null
|
||||
entry_exit_height_diff_in?: number | null
|
||||
motor_speed_hz?: number | null
|
||||
jig_displacement_inches?: number | null
|
||||
spring_stiffness_nm?: number | null
|
||||
created_at: string
|
||||
updated_at: string
|
||||
created_by: string
|
||||
}
|
||||
|
||||
export interface ExperimentShellingConfig {
|
||||
id: string
|
||||
experiment_id: string
|
||||
ring_gap_inches?: number | null
|
||||
drum_rpm?: number | null
|
||||
created_at: string
|
||||
updated_at: string
|
||||
created_by: string
|
||||
}
|
||||
|
||||
export interface CreateExperimentRequest {
|
||||
experiment_number: number
|
||||
reps_required: number
|
||||
@@ -177,6 +224,19 @@ export interface CreateExperimentRequest {
|
||||
results_status?: ResultsStatus
|
||||
completion_status?: boolean
|
||||
phase_id?: string
|
||||
// Phase config (stored in experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling)
|
||||
soaking_duration_hr?: number
|
||||
air_drying_time_min?: number
|
||||
// Cracking: machine_type comes from book; params below are JC or Meyer specific
|
||||
plate_contact_frequency_hz?: number
|
||||
throughput_rate_pecans_sec?: number
|
||||
crush_amount_in?: number
|
||||
entry_exit_height_diff_in?: number
|
||||
motor_speed_hz?: number
|
||||
jig_displacement_inches?: number
|
||||
spring_stiffness_nm?: number
|
||||
ring_gap_inches?: number | null
|
||||
drum_rpm?: number | null
|
||||
}
|
||||
|
||||
export interface UpdateExperimentRequest {
|
||||
@@ -186,6 +246,17 @@ export interface UpdateExperimentRequest {
|
||||
results_status?: ResultsStatus
|
||||
completion_status?: boolean
|
||||
phase_id?: string
|
||||
soaking_duration_hr?: number
|
||||
air_drying_time_min?: number
|
||||
plate_contact_frequency_hz?: number
|
||||
throughput_rate_pecans_sec?: number
|
||||
crush_amount_in?: number
|
||||
entry_exit_height_diff_in?: number
|
||||
motor_speed_hz?: number
|
||||
jig_displacement_inches?: number
|
||||
spring_stiffness_nm?: number
|
||||
ring_gap_inches?: number | null
|
||||
drum_rpm?: number | null
|
||||
}
|
||||
|
||||
export interface CreateRepetitionRequest {
|
||||
@@ -614,12 +685,12 @@ export const userManagement = {
|
||||
}
|
||||
}
|
||||
|
||||
// Experiment phase management utility functions
|
||||
// Experiment book management (table: experiment_books)
|
||||
export const experimentPhaseManagement = {
|
||||
// Get all experiment phases
|
||||
// Get all experiment books
|
||||
async getAllExperimentPhases(): Promise<ExperimentPhase[]> {
|
||||
const { data, error } = await supabase
|
||||
.from('experiment_phases')
|
||||
.from('experiment_books')
|
||||
.select('*')
|
||||
.order('created_at', { ascending: false })
|
||||
|
||||
@@ -627,10 +698,10 @@ export const experimentPhaseManagement = {
|
||||
return data
|
||||
},
|
||||
|
||||
// Get experiment phase by ID
|
||||
// Get experiment book by ID
|
||||
async getExperimentPhaseById(id: string): Promise<ExperimentPhase | null> {
|
||||
const { data, error } = await supabase
|
||||
.from('experiment_phases')
|
||||
.from('experiment_books')
|
||||
.select('*')
|
||||
.eq('id', id)
|
||||
.single()
|
||||
@@ -642,13 +713,13 @@ export const experimentPhaseManagement = {
|
||||
return data
|
||||
},
|
||||
|
||||
// Create a new experiment phase
|
||||
// Create a new experiment book
|
||||
async createExperimentPhase(phaseData: CreateExperimentPhaseRequest): Promise<ExperimentPhase> {
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser()
|
||||
if (authError || !user) throw new Error('User not authenticated')
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('experiment_phases')
|
||||
.from('experiment_books')
|
||||
.insert({
|
||||
...phaseData,
|
||||
created_by: user.id
|
||||
@@ -660,10 +731,10 @@ export const experimentPhaseManagement = {
|
||||
return data
|
||||
},
|
||||
|
||||
// Update an experiment phase
|
||||
// Update an experiment book
|
||||
async updateExperimentPhase(id: string, updates: UpdateExperimentPhaseRequest): Promise<ExperimentPhase> {
|
||||
const { data, error } = await supabase
|
||||
.from('experiment_phases')
|
||||
.from('experiment_books')
|
||||
.update(updates)
|
||||
.eq('id', id)
|
||||
.select()
|
||||
@@ -673,10 +744,10 @@ export const experimentPhaseManagement = {
|
||||
return data
|
||||
},
|
||||
|
||||
// Delete an experiment phase
|
||||
// Delete an experiment book
|
||||
async deleteExperimentPhase(id: string): Promise<void> {
|
||||
const { error } = await supabase
|
||||
.from('experiment_phases')
|
||||
.from('experiment_books')
|
||||
.delete()
|
||||
.eq('id', id)
|
||||
|
||||
@@ -724,33 +795,170 @@ export const experimentManagement = {
|
||||
return data
|
||||
},
|
||||
|
||||
// Create a new experiment
|
||||
// Get experiment with its phase config (soaking, airdrying, cracking, shelling) for edit form
|
||||
async getExperimentWithPhaseConfig(id: string): Promise<(Experiment & {
|
||||
soaking?: ExperimentSoakingConfig | null
|
||||
airdrying?: ExperimentAirdryingConfig | null
|
||||
cracking?: ExperimentCrackingConfig | null
|
||||
shelling?: ExperimentShellingConfig | null
|
||||
}) | null> {
|
||||
const experiment = await this.getExperimentById(id)
|
||||
if (!experiment) return null
|
||||
|
||||
const [soakingRes, airdryingRes, crackingRes, shellingRes] = await Promise.all([
|
||||
supabase.from('experiment_soaking').select('*').eq('experiment_id', id).maybeSingle(),
|
||||
supabase.from('experiment_airdrying').select('*').eq('experiment_id', id).maybeSingle(),
|
||||
supabase.from('experiment_cracking').select('*').eq('experiment_id', id).maybeSingle(),
|
||||
supabase.from('experiment_shelling').select('*').eq('experiment_id', id).maybeSingle()
|
||||
])
|
||||
if (soakingRes.error) throw soakingRes.error
|
||||
if (airdryingRes.error) throw airdryingRes.error
|
||||
if (crackingRes.error) throw crackingRes.error
|
||||
if (shellingRes.error) throw shellingRes.error
|
||||
|
||||
return {
|
||||
...experiment,
|
||||
soaking: soakingRes.data ?? null,
|
||||
airdrying: airdryingRes.data ?? null,
|
||||
cracking: crackingRes.data ?? null,
|
||||
shelling: shellingRes.data ?? null
|
||||
}
|
||||
},
|
||||
|
||||
// Create a new experiment and its phase config rows (experiment_soaking, experiment_airdrying, experiment_cracking, experiment_shelling)
|
||||
async createExperiment(experimentData: CreateExperimentRequest): Promise<Experiment> {
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser()
|
||||
if (authError || !user) throw new Error('User not authenticated')
|
||||
|
||||
const { data, error } = await supabase
|
||||
const phaseId = experimentData.phase_id
|
||||
const corePayload = {
|
||||
experiment_number: experimentData.experiment_number,
|
||||
reps_required: experimentData.reps_required,
|
||||
weight_per_repetition_lbs: experimentData.weight_per_repetition_lbs,
|
||||
results_status: experimentData.results_status ?? 'valid',
|
||||
completion_status: experimentData.completion_status ?? false,
|
||||
phase_id: phaseId,
|
||||
created_by: user.id
|
||||
}
|
||||
// phase_id required for phase configs
|
||||
if (!phaseId) {
|
||||
const { data, error } = await supabase.from('experiments').insert(corePayload).select().single()
|
||||
if (error) throw error
|
||||
return data
|
||||
}
|
||||
|
||||
const { data: experiment, error } = await supabase
|
||||
.from('experiments')
|
||||
.insert({
|
||||
...experimentData,
|
||||
created_by: user.id
|
||||
})
|
||||
.insert(corePayload)
|
||||
.select()
|
||||
.single()
|
||||
|
||||
if (error) throw error
|
||||
return data
|
||||
|
||||
const book = await experimentPhaseManagement.getExperimentPhaseById(phaseId)
|
||||
if (!book) return experiment
|
||||
|
||||
if (book.has_soaking && experimentData.soaking_duration_hr != null) {
|
||||
await supabase.from('experiment_soaking').insert({
|
||||
experiment_id: experiment.id,
|
||||
soaking_duration_hr: experimentData.soaking_duration_hr,
|
||||
created_by: user.id
|
||||
})
|
||||
}
|
||||
if (book.has_airdrying && experimentData.air_drying_time_min != null) {
|
||||
await supabase.from('experiment_airdrying').insert({
|
||||
experiment_id: experiment.id,
|
||||
duration_minutes: experimentData.air_drying_time_min,
|
||||
created_by: user.id
|
||||
})
|
||||
}
|
||||
if (book.has_cracking && book.cracking_machine_type_id) {
|
||||
const crackPayload: Record<string, unknown> = {
|
||||
experiment_id: experiment.id,
|
||||
machine_type_id: book.cracking_machine_type_id,
|
||||
created_by: user.id
|
||||
}
|
||||
if (experimentData.plate_contact_frequency_hz != null) crackPayload.plate_contact_frequency_hz = experimentData.plate_contact_frequency_hz
|
||||
if (experimentData.throughput_rate_pecans_sec != null) crackPayload.throughput_rate_pecans_sec = experimentData.throughput_rate_pecans_sec
|
||||
if (experimentData.crush_amount_in != null) crackPayload.crush_amount_in = experimentData.crush_amount_in
|
||||
if (experimentData.entry_exit_height_diff_in != null) crackPayload.entry_exit_height_diff_in = experimentData.entry_exit_height_diff_in
|
||||
if (experimentData.motor_speed_hz != null) crackPayload.motor_speed_hz = experimentData.motor_speed_hz
|
||||
if (experimentData.jig_displacement_inches != null) crackPayload.jig_displacement_inches = experimentData.jig_displacement_inches
|
||||
if (experimentData.spring_stiffness_nm != null) crackPayload.spring_stiffness_nm = experimentData.spring_stiffness_nm
|
||||
await supabase.from('experiment_cracking').insert(crackPayload)
|
||||
}
|
||||
if (book.has_shelling && (experimentData.ring_gap_inches != null || experimentData.drum_rpm != null)) {
|
||||
await supabase.from('experiment_shelling').insert({
|
||||
experiment_id: experiment.id,
|
||||
ring_gap_inches: experimentData.ring_gap_inches ?? null,
|
||||
drum_rpm: experimentData.drum_rpm ?? null,
|
||||
created_by: user.id
|
||||
})
|
||||
}
|
||||
|
||||
return experiment
|
||||
},
|
||||
|
||||
// Update an experiment
|
||||
// Update an experiment and upsert its phase config rows
|
||||
async updateExperiment(id: string, updates: UpdateExperimentRequest): Promise<Experiment> {
|
||||
const { data, error } = await supabase
|
||||
.from('experiments')
|
||||
.update(updates)
|
||||
.eq('id', id)
|
||||
.select()
|
||||
.single()
|
||||
const { data: { user }, error: authError } = await supabase.auth.getUser()
|
||||
if (authError || !user) throw new Error('User not authenticated')
|
||||
|
||||
const coreKeys = ['experiment_number', 'reps_required', 'weight_per_repetition_lbs', 'results_status', 'completion_status', 'phase_id'] as const
|
||||
const coreUpdates: Partial<UpdateExperimentRequest> = {}
|
||||
for (const k of coreKeys) {
|
||||
if (updates[k] !== undefined) coreUpdates[k] = updates[k]
|
||||
}
|
||||
if (Object.keys(coreUpdates).length > 0) {
|
||||
const { data, error } = await supabase.from('experiments').update(coreUpdates).eq('id', id).select().single()
|
||||
if (error) throw error
|
||||
}
|
||||
|
||||
if (updates.soaking_duration_hr !== undefined) {
|
||||
const { data: existing } = await supabase.from('experiment_soaking').select('id').eq('experiment_id', id).maybeSingle()
|
||||
if (existing) {
|
||||
await supabase.from('experiment_soaking').update({ soaking_duration_hr: updates.soaking_duration_hr, updated_at: new Date().toISOString() }).eq('experiment_id', id)
|
||||
} else {
|
||||
await supabase.from('experiment_soaking').insert({ experiment_id: id, soaking_duration_hr: updates.soaking_duration_hr, created_by: user.id })
|
||||
}
|
||||
}
|
||||
if (updates.air_drying_time_min !== undefined) {
|
||||
const { data: existing } = await supabase.from('experiment_airdrying').select('id').eq('experiment_id', id).maybeSingle()
|
||||
if (existing) {
|
||||
await supabase.from('experiment_airdrying').update({ duration_minutes: updates.air_drying_time_min, updated_at: new Date().toISOString() }).eq('experiment_id', id)
|
||||
} else {
|
||||
await supabase.from('experiment_airdrying').insert({ experiment_id: id, duration_minutes: updates.air_drying_time_min, created_by: user.id })
|
||||
}
|
||||
}
|
||||
const crackKeys = ['plate_contact_frequency_hz', 'throughput_rate_pecans_sec', 'crush_amount_in', 'entry_exit_height_diff_in', 'motor_speed_hz', 'jig_displacement_inches', 'spring_stiffness_nm'] as const
|
||||
const hasCrackUpdates = crackKeys.some(k => updates[k] !== undefined)
|
||||
if (hasCrackUpdates) {
|
||||
const { data: existing } = await supabase.from('experiment_cracking').select('id').eq('experiment_id', id).maybeSingle()
|
||||
const crackPayload: Record<string, unknown> = {}
|
||||
crackKeys.forEach(k => { if (updates[k] !== undefined) crackPayload[k] = updates[k] })
|
||||
if (Object.keys(crackPayload).length > 0) {
|
||||
if (existing) {
|
||||
await supabase.from('experiment_cracking').update({ ...crackPayload, updated_at: new Date().toISOString() }).eq('experiment_id', id)
|
||||
} else {
|
||||
const exp = await this.getExperimentById(id)
|
||||
const book = exp?.phase_id ? await experimentPhaseManagement.getExperimentPhaseById(exp.phase_id) : null
|
||||
if (book?.has_cracking && book.cracking_machine_type_id) {
|
||||
await supabase.from('experiment_cracking').insert({ experiment_id: id, machine_type_id: book.cracking_machine_type_id, ...crackPayload, created_by: user.id })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (updates.ring_gap_inches !== undefined || updates.drum_rpm !== undefined) {
|
||||
const { data: existing } = await supabase.from('experiment_shelling').select('id').eq('experiment_id', id).maybeSingle()
|
||||
const shellPayload = { ring_gap_inches: updates.ring_gap_inches ?? null, drum_rpm: updates.drum_rpm ?? null }
|
||||
if (existing) {
|
||||
await supabase.from('experiment_shelling').update({ ...shellPayload, updated_at: new Date().toISOString() }).eq('experiment_id', id)
|
||||
} else {
|
||||
await supabase.from('experiment_shelling').insert({ experiment_id: id, ...shellPayload, created_by: user.id })
|
||||
}
|
||||
}
|
||||
|
||||
const { data, error } = await supabase.from('experiments').select('*').eq('id', id).single()
|
||||
if (error) throw error
|
||||
return data
|
||||
},
|
||||
@@ -793,13 +1001,16 @@ export const experimentManagement = {
|
||||
|
||||
|
||||
|
||||
// Check if experiment number is unique
|
||||
async isExperimentNumberUnique(experimentNumber: number, excludeId?: string): Promise<boolean> {
|
||||
// Check if experiment number is unique within the same phase (experiment_number + phase_id must be unique)
|
||||
async isExperimentNumberUnique(experimentNumber: number, phaseId?: string, excludeId?: string): Promise<boolean> {
|
||||
let query = supabase
|
||||
.from('experiments')
|
||||
.select('id')
|
||||
.eq('experiment_number', experimentNumber)
|
||||
|
||||
if (phaseId) {
|
||||
query = query.eq('phase_id', phaseId)
|
||||
}
|
||||
if (excludeId) {
|
||||
query = query.neq('id', excludeId)
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
v2.65.5
|
||||
@@ -1,339 +0,0 @@
|
||||
# For detailed configuration reference documentation, visit:
|
||||
# https://supabase.com/docs/guides/local-development/cli/config
|
||||
# A string used to distinguish different Supabase projects on the same host. Defaults to the
|
||||
# working directory name when running `supabase init`.
|
||||
project_id = "pecan_experiments"
|
||||
|
||||
[api]
|
||||
enabled = true
|
||||
# Port to use for the API URL.
|
||||
port = 54321
|
||||
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
|
||||
# endpoints. `public` and `graphql_public` schemas are included by default.
|
||||
schemas = ["public", "graphql_public"]
|
||||
# Extra schemas to add to the search_path of every request.
|
||||
extra_search_path = ["public", "extensions"]
|
||||
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
|
||||
# for accidental or malicious requests.
|
||||
max_rows = 1000
|
||||
|
||||
[api.tls]
|
||||
# Enable HTTPS endpoints locally using a self-signed certificate.
|
||||
enabled = false
|
||||
|
||||
[db]
|
||||
# Port to use for the local database URL.
|
||||
port = 54322
|
||||
# Port used by db diff command to initialize the shadow database.
|
||||
shadow_port = 54320
|
||||
# The database major version to use. This has to be the same as your remote database's. Run `SHOW
|
||||
# server_version;` on the remote database to check.
|
||||
major_version = 17
|
||||
|
||||
[db.pooler]
|
||||
enabled = false
|
||||
# Port to use for the local connection pooler.
|
||||
port = 54329
|
||||
# Specifies when a server connection can be reused by other clients.
|
||||
# Configure one of the supported pooler modes: `transaction`, `session`.
|
||||
pool_mode = "transaction"
|
||||
# How many server connections to allow per user/database pair.
|
||||
default_pool_size = 20
|
||||
# Maximum number of client connections allowed.
|
||||
max_client_conn = 100
|
||||
|
||||
# [db.vault]
|
||||
# secret_key = "env(SECRET_VALUE)"
|
||||
|
||||
[db.migrations]
|
||||
# If disabled, migrations will be skipped during a db push or reset.
|
||||
enabled = true
|
||||
# Specifies an ordered list of schema files that describe your database.
|
||||
# Supports glob patterns relative to supabase directory: "./schemas/*.sql"
|
||||
schema_paths = []
|
||||
|
||||
[db.seed]
|
||||
# If enabled, seeds the database after migrations during a db reset.
|
||||
enabled = true
|
||||
# Specifies an ordered list of seed files to load during db reset.
|
||||
# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
|
||||
sql_paths = ["./seed_01_users.sql", "./seed_02_phase2_experiments.sql"]
|
||||
# , "./seed_04_phase2_jc_experiments.sql", "./seed_05_meyer_experiments.sql"]
|
||||
|
||||
[db.network_restrictions]
|
||||
# Enable management of network restrictions.
|
||||
enabled = false
|
||||
# List of IPv4 CIDR blocks allowed to connect to the database.
|
||||
# Defaults to allow all IPv4 connections. Set empty array to block all IPs.
|
||||
allowed_cidrs = ["0.0.0.0/0"]
|
||||
# List of IPv6 CIDR blocks allowed to connect to the database.
|
||||
# Defaults to allow all IPv6 connections. Set empty array to block all IPs.
|
||||
allowed_cidrs_v6 = ["::/0"]
|
||||
|
||||
[realtime]
|
||||
enabled = true
|
||||
# Bind realtime via either IPv4 or IPv6. (default: IPv4)
|
||||
# ip_version = "IPv6"
|
||||
# The maximum length in bytes of HTTP request headers. (default: 4096)
|
||||
# max_header_length = 4096
|
||||
|
||||
[studio]
|
||||
enabled = true
|
||||
# Port to use for Supabase Studio.
|
||||
port = 54323
|
||||
# External URL of the API server that frontend connects to.
|
||||
# Uses SUPABASE_API_URL environment variable (set by docker-compose.sh)
|
||||
# Format: http://<host-ip>:54321
|
||||
api_url = "env(SUPABASE_API_URL)"
|
||||
# OpenAI API Key to use for Supabase AI in the Supabase Studio.
|
||||
openai_api_key = "env(OPENAI_API_KEY)"
|
||||
|
||||
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
|
||||
# are monitored, and you can view the emails that would have been sent from the web interface.
|
||||
[inbucket]
|
||||
enabled = true
|
||||
# Port to use for the email testing server web interface.
|
||||
port = 54324
|
||||
# Uncomment to expose additional ports for testing user applications that send emails.
|
||||
# smtp_port = 54325
|
||||
# pop3_port = 54326
|
||||
# admin_email = "admin@email.com"
|
||||
# sender_name = "Admin"
|
||||
|
||||
[storage]
|
||||
enabled = true
|
||||
# The maximum file size allowed (e.g. "5MB", "500KB").
|
||||
file_size_limit = "50MiB"
|
||||
|
||||
# Image transformation API is available to Supabase Pro plan.
|
||||
# [storage.image_transformation]
|
||||
# enabled = true
|
||||
|
||||
# Uncomment to configure local storage buckets
|
||||
# [storage.buckets.images]
|
||||
# public = false
|
||||
# file_size_limit = "50MiB"
|
||||
# allowed_mime_types = ["image/png", "image/jpeg"]
|
||||
# objects_path = "./images"
|
||||
|
||||
[auth]
|
||||
enabled = true
|
||||
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
|
||||
# in emails.
|
||||
# Uses HOST_SITE_URL environment variable, which should be set to the full URL (e.g., http://<host-ip>:3000)
|
||||
# Set this via: export HOST_SITE_URL="http://$(./scripts/get-host-ip.sh):3000"
|
||||
# Or manually: export HOST_SITE_URL="http://192.168.1.100:3000"
|
||||
site_url = "env(HOST_SITE_URL)"
|
||||
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
|
||||
# Uses HOST_SITE_URL environment variable (same as site_url)
|
||||
additional_redirect_urls = ["env(HOST_SITE_URL)"]
|
||||
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
|
||||
jwt_expiry = 3600
|
||||
# If disabled, the refresh token will never expire.
|
||||
enable_refresh_token_rotation = true
|
||||
# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
|
||||
# Requires enable_refresh_token_rotation = true.
|
||||
refresh_token_reuse_interval = 10
|
||||
# Allow/disallow new user signups to your project.
|
||||
enable_signup = true
|
||||
# Allow/disallow anonymous sign-ins to your project.
|
||||
enable_anonymous_sign_ins = false
|
||||
# Allow/disallow testing manual linking of accounts
|
||||
enable_manual_linking = false
|
||||
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
|
||||
minimum_password_length = 6
|
||||
# Passwords that do not meet the following requirements will be rejected as weak. Supported values
|
||||
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
|
||||
password_requirements = ""
|
||||
|
||||
[auth.rate_limit]
|
||||
# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled.
|
||||
email_sent = 2
|
||||
# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled.
|
||||
sms_sent = 30
|
||||
# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true.
|
||||
anonymous_users = 30
|
||||
# Number of sessions that can be refreshed in a 5 minute interval per IP address.
|
||||
token_refresh = 150
|
||||
# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users).
|
||||
sign_in_sign_ups = 30
|
||||
# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address.
|
||||
token_verifications = 30
|
||||
# Number of Web3 logins that can be made in a 5 minute interval per IP address.
|
||||
web3 = 30
|
||||
|
||||
# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
|
||||
# [auth.captcha]
|
||||
# enabled = true
|
||||
# provider = "hcaptcha"
|
||||
# secret = ""
|
||||
|
||||
[auth.email]
|
||||
# Allow/disallow new user signups via email to your project.
|
||||
enable_signup = true
|
||||
# If enabled, a user will be required to confirm any email change on both the old, and new email
|
||||
# addresses. If disabled, only the new email is required to confirm.
|
||||
double_confirm_changes = true
|
||||
# If enabled, users need to confirm their email address before signing in.
|
||||
enable_confirmations = false
|
||||
# If enabled, users will need to reauthenticate or have logged in recently to change their password.
|
||||
secure_password_change = false
|
||||
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
|
||||
max_frequency = "1s"
|
||||
# Number of characters used in the email OTP.
|
||||
otp_length = 6
|
||||
# Number of seconds before the email OTP expires (defaults to 1 hour).
|
||||
otp_expiry = 3600
|
||||
|
||||
# Use a production-ready SMTP server
|
||||
# [auth.email.smtp]
|
||||
# enabled = true
|
||||
# host = "smtp.sendgrid.net"
|
||||
# port = 587
|
||||
# user = "apikey"
|
||||
# pass = "env(SENDGRID_API_KEY)"
|
||||
# admin_email = "admin@email.com"
|
||||
# sender_name = "Admin"
|
||||
|
||||
# Uncomment to customize email template
|
||||
# [auth.email.template.invite]
|
||||
# subject = "You have been invited"
|
||||
# content_path = "./supabase/templates/invite.html"
|
||||
|
||||
[auth.sms]
|
||||
# Allow/disallow new user signups via SMS to your project.
|
||||
enable_signup = false
|
||||
# If enabled, users need to confirm their phone number before signing in.
|
||||
enable_confirmations = false
|
||||
# Template for sending OTP to users
|
||||
template = "Your code is {{ .Code }}"
|
||||
# Controls the minimum amount of time that must pass before sending another sms otp.
|
||||
max_frequency = "5s"
|
||||
|
||||
# Use pre-defined map of phone number to OTP for testing.
|
||||
# [auth.sms.test_otp]
|
||||
# 4152127777 = "123456"
|
||||
|
||||
# Configure logged in session timeouts.
|
||||
# [auth.sessions]
|
||||
# Force log out after the specified duration.
|
||||
# timebox = "24h"
|
||||
# Force log out if the user has been inactive longer than the specified duration.
|
||||
# inactivity_timeout = "8h"
|
||||
|
||||
# This hook runs before a new user is created and allows developers to reject the request based on the incoming user object.
|
||||
# [auth.hook.before_user_created]
|
||||
# enabled = true
|
||||
# uri = "pg-functions://postgres/auth/before-user-created-hook"
|
||||
|
||||
# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
|
||||
# [auth.hook.custom_access_token]
|
||||
# enabled = true
|
||||
# uri = "pg-functions://<database>/<schema>/<hook_name>"
|
||||
|
||||
# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
|
||||
[auth.sms.twilio]
|
||||
enabled = false
|
||||
account_sid = ""
|
||||
message_service_sid = ""
|
||||
# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
|
||||
auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
|
||||
|
||||
# Multi-factor-authentication is available to Supabase Pro plan.
|
||||
[auth.mfa]
|
||||
# Control how many MFA factors can be enrolled at once per user.
|
||||
max_enrolled_factors = 10
|
||||
|
||||
# Control MFA via App Authenticator (TOTP)
|
||||
[auth.mfa.totp]
|
||||
enroll_enabled = false
|
||||
verify_enabled = false
|
||||
|
||||
# Configure MFA via Phone Messaging
|
||||
[auth.mfa.phone]
|
||||
enroll_enabled = false
|
||||
verify_enabled = false
|
||||
otp_length = 6
|
||||
template = "Your code is {{ .Code }}"
|
||||
max_frequency = "5s"
|
||||
|
||||
# Configure MFA via WebAuthn
|
||||
# [auth.mfa.web_authn]
|
||||
# enroll_enabled = true
|
||||
# verify_enabled = true
|
||||
|
||||
# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
|
||||
# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
|
||||
# `twitter`, `slack`, `spotify`, `workos`, `zoom`.
|
||||
[auth.external.apple]
|
||||
enabled = false
|
||||
client_id = ""
|
||||
# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
|
||||
secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
|
||||
# Overrides the default auth redirectUrl.
|
||||
redirect_uri = ""
|
||||
# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
|
||||
# or any other third-party OIDC providers.
|
||||
url = ""
|
||||
# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
|
||||
skip_nonce_check = false
|
||||
|
||||
# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard.
|
||||
# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting.
|
||||
[auth.web3.solana]
|
||||
enabled = false
|
||||
|
||||
# Use Firebase Auth as a third-party provider alongside Supabase Auth.
|
||||
[auth.third_party.firebase]
|
||||
enabled = false
|
||||
# project_id = "my-firebase-project"
|
||||
|
||||
# Use Auth0 as a third-party provider alongside Supabase Auth.
|
||||
[auth.third_party.auth0]
|
||||
enabled = false
|
||||
# tenant = "my-auth0-tenant"
|
||||
# tenant_region = "us"
|
||||
|
||||
# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
|
||||
[auth.third_party.aws_cognito]
|
||||
enabled = false
|
||||
# user_pool_id = "my-user-pool-id"
|
||||
# user_pool_region = "us-east-1"
|
||||
|
||||
# Use Clerk as a third-party provider alongside Supabase Auth.
|
||||
[auth.third_party.clerk]
|
||||
enabled = false
|
||||
# Obtain from https://clerk.com/setup/supabase
|
||||
# domain = "example.clerk.accounts.dev"
|
||||
|
||||
[edge_runtime]
|
||||
enabled = true
|
||||
# Configure one of the supported request policies: `oneshot`, `per_worker`.
|
||||
# Use `oneshot` for hot reload, or `per_worker` for load testing.
|
||||
policy = "oneshot"
|
||||
# Port to attach the Chrome inspector for debugging edge functions.
|
||||
inspector_port = 8083
|
||||
# The Deno major version to use.
|
||||
deno_version = 1
|
||||
|
||||
# [edge_runtime.secrets]
|
||||
# secret_key = "env(SECRET_VALUE)"
|
||||
|
||||
[analytics]
|
||||
enabled = true
|
||||
port = 54327
|
||||
# Configure one of the supported backends: `postgres`, `bigquery`.
|
||||
backend = "postgres"
|
||||
|
||||
# Experimental features may be deprecated any time
|
||||
[experimental]
|
||||
# Configures Postgres storage engine to use OrioleDB (S3)
|
||||
orioledb_version = ""
|
||||
# Configures S3 bucket URL, eg. <bucket_name>.s3-<region>.amazonaws.com
|
||||
s3_host = "env(S3_HOST)"
|
||||
# Configures S3 bucket region, eg. us-east-1
|
||||
s3_region = "env(S3_REGION)"
|
||||
# Configures AWS_ACCESS_KEY_ID for S3 bucket
|
||||
s3_access_key = "env(S3_ACCESS_KEY)"
|
||||
# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
|
||||
s3_secret_key = "env(S3_SECRET_KEY)"
|
||||
@@ -1,87 +0,0 @@
|
||||
-- Extensions and Utility Functions
|
||||
-- This migration creates required extensions and utility functions used across the database
|
||||
|
||||
-- =============================================
|
||||
-- 1. EXTENSIONS
|
||||
-- =============================================
|
||||
|
||||
-- Enable UUID generation
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Enable password hashing
|
||||
CREATE EXTENSION IF NOT EXISTS "pgcrypto";
|
||||
|
||||
-- =============================================
|
||||
-- 2. UTILITY FUNCTIONS
|
||||
-- =============================================
|
||||
|
||||
-- Function to handle updated_at timestamp
|
||||
CREATE OR REPLACE FUNCTION public.handle_updated_at()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Helper function to get current user's roles
|
||||
CREATE OR REPLACE FUNCTION public.get_user_roles()
|
||||
RETURNS TEXT[] AS $$
|
||||
BEGIN
|
||||
RETURN ARRAY(
|
||||
SELECT r.name
|
||||
FROM public.user_roles ur
|
||||
JOIN public.roles r ON ur.role_id = r.id
|
||||
WHERE ur.user_id = auth.uid()
|
||||
);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Helper function to get current user's first role (for backward compatibility)
|
||||
CREATE OR REPLACE FUNCTION public.get_user_role()
|
||||
RETURNS TEXT AS $$
|
||||
BEGIN
|
||||
-- Return the first role found (for backward compatibility)
|
||||
RETURN (
|
||||
SELECT r.name
|
||||
FROM public.user_roles ur
|
||||
JOIN public.roles r ON ur.role_id = r.id
|
||||
WHERE ur.user_id = auth.uid()
|
||||
LIMIT 1
|
||||
);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Helper function to check if user is admin
|
||||
CREATE OR REPLACE FUNCTION public.is_admin()
|
||||
RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
RETURN 'admin' = ANY(public.get_user_roles());
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Helper function to check if user has specific role
|
||||
CREATE OR REPLACE FUNCTION public.has_role(role_name TEXT)
|
||||
RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
RETURN role_name = ANY(public.get_user_roles());
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Helper function to check if user can manage experiments
|
||||
CREATE OR REPLACE FUNCTION public.can_manage_experiments()
|
||||
RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
RETURN EXISTS (
|
||||
SELECT 1
|
||||
FROM public.user_roles ur
|
||||
JOIN public.roles r ON ur.role_id = r.id
|
||||
WHERE ur.user_id = auth.uid()
|
||||
AND r.name IN ('admin', 'conductor')
|
||||
);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,237 +0,0 @@
|
||||
-- Users and Roles
|
||||
-- This migration creates user-related tables with clean separation
|
||||
|
||||
-- =============================================
|
||||
-- 1. ROLES TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.roles (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. USER PROFILES TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.user_profiles (
|
||||
id UUID PRIMARY KEY REFERENCES auth.users(id) ON DELETE CASCADE,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
first_name TEXT,
|
||||
last_name TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'active' CHECK (status IN ('active', 'inactive', 'suspended')),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 3. USER ROLES JUNCTION TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.user_roles (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES public.user_profiles(id) ON DELETE CASCADE,
|
||||
role_id UUID NOT NULL REFERENCES public.roles(id) ON DELETE CASCADE,
|
||||
assigned_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
assigned_by UUID REFERENCES public.user_profiles(id),
|
||||
UNIQUE(user_id, role_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 4. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_user_profiles_email ON public.user_profiles(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_roles_user_id ON public.user_roles(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_user_roles_role_id ON public.user_roles(role_id);
|
||||
|
||||
-- =============================================
|
||||
-- 5. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on user_profiles
|
||||
CREATE TRIGGER set_updated_at_user_profiles
|
||||
BEFORE UPDATE ON public.user_profiles
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- Create trigger for updated_at on roles
|
||||
CREATE TRIGGER set_updated_at_roles
|
||||
BEFORE UPDATE ON public.roles
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 6. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.roles TO authenticated;
|
||||
GRANT ALL ON public.user_profiles TO authenticated;
|
||||
GRANT ALL ON public.user_roles TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 7. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.roles ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.user_profiles ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.user_roles ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 8. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
-- Create RLS policies for roles (read-only for all authenticated users)
|
||||
CREATE POLICY "Roles are viewable by authenticated users" ON public.roles
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
-- Create RLS policies for user_profiles
|
||||
CREATE POLICY "User profiles are viewable by authenticated users" ON public.user_profiles
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User profiles are insertable by authenticated users" ON public.user_profiles
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User profiles are updatable by authenticated users" ON public.user_profiles
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User profiles are deletable by authenticated users" ON public.user_profiles
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
-- Create RLS policies for user_roles
|
||||
CREATE POLICY "User roles are viewable by authenticated users" ON public.user_roles
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User roles are insertable by authenticated users" ON public.user_roles
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User roles are updatable by authenticated users" ON public.user_roles
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "User roles are deletable by authenticated users" ON public.user_roles
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
-- =============================================
|
||||
-- 9. USER MANAGEMENT FUNCTIONS
|
||||
-- =============================================
|
||||
|
||||
-- Function to create a new user with roles
|
||||
CREATE OR REPLACE FUNCTION public.create_user_with_roles(
|
||||
user_email TEXT,
|
||||
role_names TEXT[],
|
||||
temp_password TEXT
|
||||
)
|
||||
RETURNS JSON AS $$
|
||||
DECLARE
|
||||
new_user_id UUID;
|
||||
encrypted_pwd TEXT;
|
||||
role_name TEXT;
|
||||
role_id_val UUID;
|
||||
assigned_by_id UUID;
|
||||
result JSON;
|
||||
user_roles_array TEXT[];
|
||||
BEGIN
|
||||
-- Generate new user ID
|
||||
new_user_id := uuid_generate_v4();
|
||||
|
||||
-- Encrypt the password
|
||||
encrypted_pwd := crypt(temp_password, gen_salt('bf'));
|
||||
|
||||
-- Get the current user ID for assigned_by, but only if they have a profile
|
||||
-- Otherwise, use the new user ID (which we'll create next)
|
||||
SELECT id INTO assigned_by_id
|
||||
FROM public.user_profiles
|
||||
WHERE id = auth.uid();
|
||||
|
||||
-- If no valid assigned_by user found, use the new user ID (self-assigned)
|
||||
IF assigned_by_id IS NULL THEN
|
||||
assigned_by_id := new_user_id;
|
||||
END IF;
|
||||
|
||||
-- Create user in auth.users
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
new_user_id,
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
user_email,
|
||||
encrypted_pwd,
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
-- Create user profile
|
||||
INSERT INTO public.user_profiles (id, email, status)
|
||||
VALUES (new_user_id, user_email, 'active');
|
||||
|
||||
-- Assign roles
|
||||
user_roles_array := ARRAY[]::TEXT[];
|
||||
FOREACH role_name IN ARRAY role_names
|
||||
LOOP
|
||||
-- Get role ID
|
||||
SELECT id INTO role_id_val
|
||||
FROM public.roles
|
||||
WHERE name = role_name;
|
||||
|
||||
-- If role exists, assign it
|
||||
IF role_id_val IS NOT NULL THEN
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
VALUES (new_user_id, role_id_val, assigned_by_id)
|
||||
ON CONFLICT (user_id, role_id) DO NOTHING;
|
||||
|
||||
-- Add to roles array for return value
|
||||
user_roles_array := array_append(user_roles_array, role_name);
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- Return the result as JSON
|
||||
result := json_build_object(
|
||||
'user_id', new_user_id::TEXT,
|
||||
'email', user_email,
|
||||
'temp_password', temp_password,
|
||||
'roles', user_roles_array,
|
||||
'status', 'active'
|
||||
);
|
||||
|
||||
RETURN result;
|
||||
|
||||
EXCEPTION
|
||||
WHEN unique_violation THEN
|
||||
RAISE EXCEPTION 'User with email % already exists', user_email;
|
||||
WHEN OTHERS THEN
|
||||
RAISE EXCEPTION 'Error creating user: %', SQLERRM;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Grant execute permission on the function
|
||||
GRANT EXECUTE ON FUNCTION public.create_user_with_roles(TEXT, TEXT[], TEXT) TO authenticated;
|
||||
|
||||
-- Comment for documentation
|
||||
COMMENT ON FUNCTION public.create_user_with_roles(TEXT, TEXT[], TEXT) IS
|
||||
'Creates a new user in auth.users, creates a profile in user_profiles, and assigns the specified roles. Returns user information including user_id, email, temp_password, roles, and status.';
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
-- Machine Types
|
||||
-- This migration creates the machine types table
|
||||
|
||||
-- =============================================
|
||||
-- 1. MACHINE TYPES TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.machine_types (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_machine_types_name ON public.machine_types(name);
|
||||
|
||||
-- =============================================
|
||||
-- 3. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on machine_types
|
||||
CREATE TRIGGER set_updated_at_machine_types
|
||||
BEFORE UPDATE ON public.machine_types
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.machine_types TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 5. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.machine_types ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Machine types are viewable by authenticated users" ON public.machine_types
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Machine types are insertable by authenticated users" ON public.machine_types
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Machine types are updatable by authenticated users" ON public.machine_types
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Machine types are deletable by authenticated users" ON public.machine_types
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
-- Experiment Phases
|
||||
-- This migration creates the experiment phases table
|
||||
|
||||
-- =============================================
|
||||
-- 1. EXPERIMENT PHASES TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_phases (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
has_soaking BOOLEAN NOT NULL DEFAULT false,
|
||||
has_airdrying BOOLEAN NOT NULL DEFAULT false,
|
||||
has_cracking BOOLEAN NOT NULL DEFAULT false,
|
||||
has_shelling BOOLEAN NOT NULL DEFAULT false,
|
||||
cracking_machine_type_id UUID REFERENCES public.machine_types(id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure at least one phase is selected
|
||||
CONSTRAINT check_at_least_one_phase
|
||||
CHECK (has_soaking = true OR has_airdrying = true OR has_cracking = true OR has_shelling = true),
|
||||
|
||||
-- If has_cracking is true, then cracking_machine_type_id must not be null
|
||||
CONSTRAINT ck_experiment_phases_machine_required_when_cracking
|
||||
CHECK ((has_cracking = false) OR (cracking_machine_type_id IS NOT NULL))
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_phases_name ON public.experiment_phases(name);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_phases_cracking_machine_type_id ON public.experiment_phases(cracking_machine_type_id);
|
||||
|
||||
-- =============================================
|
||||
-- 3. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on experiment_phases
|
||||
CREATE TRIGGER set_updated_at_experiment_phases
|
||||
BEFORE UPDATE ON public.experiment_phases
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.experiment_phases TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 5. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_phases ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Experiment phases are viewable by authenticated users" ON public.experiment_phases
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment phases are insertable by authenticated users" ON public.experiment_phases
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment phases are updatable by authenticated users" ON public.experiment_phases
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment phases are deletable by authenticated users" ON public.experiment_phases
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
-- Experiments
|
||||
-- This migration creates the experiments table
|
||||
|
||||
-- =============================================
|
||||
-- 1. EXPERIMENTS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_number INTEGER NOT NULL,
|
||||
reps_required INTEGER NOT NULL CHECK (reps_required > 0),
|
||||
weight_per_repetition_lbs DOUBLE PRECISION NOT NULL DEFAULT 5.0 CHECK (weight_per_repetition_lbs > 0),
|
||||
results_status TEXT NOT NULL DEFAULT 'valid' CHECK (results_status IN ('valid', 'invalid')),
|
||||
completion_status BOOLEAN NOT NULL DEFAULT false,
|
||||
phase_id UUID NOT NULL REFERENCES public.experiment_phases(id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure unique combination of experiment_number and phase_id
|
||||
CONSTRAINT unique_experiment_number_phase UNIQUE (experiment_number, phase_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiments_phase_id ON public.experiments(phase_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiments_experiment_number ON public.experiments(experiment_number);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiments_created_by ON public.experiments(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiments_id ON public.experiments(id);
|
||||
|
||||
-- =============================================
|
||||
-- 3. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on experiments
|
||||
CREATE TRIGGER set_updated_at_experiments
|
||||
BEFORE UPDATE ON public.experiments
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.experiments TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 5. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiments ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Experiments are viewable by authenticated users" ON public.experiments
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiments are insertable by authenticated users" ON public.experiments
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiments are updatable by authenticated users" ON public.experiments
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiments are deletable by authenticated users" ON public.experiments
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
-- Experiment Repetitions
|
||||
-- This migration creates the experiment repetitions table
|
||||
|
||||
-- =============================================
|
||||
-- 1. EXPERIMENT REPETITIONS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_repetitions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE,
|
||||
repetition_number INTEGER NOT NULL CHECK (repetition_number > 0),
|
||||
scheduled_date TIMESTAMP WITH TIME ZONE,
|
||||
status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'in_progress', 'completed', 'cancelled')),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure unique repetition numbers per experiment
|
||||
UNIQUE(experiment_id, repetition_number)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_repetitions_experiment_id ON public.experiment_repetitions(experiment_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_repetitions_created_by ON public.experiment_repetitions(created_by);
|
||||
|
||||
-- =============================================
|
||||
-- 3. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on experiment_repetitions
|
||||
CREATE TRIGGER set_updated_at_experiment_repetitions
|
||||
BEFORE UPDATE ON public.experiment_repetitions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.experiment_repetitions TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 5. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_repetitions ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Experiment repetitions are viewable by authenticated users" ON public.experiment_repetitions
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment repetitions are insertable by authenticated users" ON public.experiment_repetitions
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment repetitions are updatable by authenticated users" ON public.experiment_repetitions
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment repetitions are deletable by authenticated users" ON public.experiment_repetitions
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
-- Cracker Parameters
|
||||
-- This migration creates machine-specific parameter tables (must be created before cracking table)
|
||||
|
||||
-- =============================================
|
||||
-- 1. JC CRACKER PARAMETERS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.jc_cracker_parameters (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
plate_contact_frequency_hz DOUBLE PRECISION NOT NULL CHECK (plate_contact_frequency_hz > 0),
|
||||
throughput_rate_pecans_sec DOUBLE PRECISION NOT NULL CHECK (throughput_rate_pecans_sec > 0),
|
||||
crush_amount_in DOUBLE PRECISION NOT NULL CHECK (crush_amount_in >= 0),
|
||||
entry_exit_height_diff_in DOUBLE PRECISION NOT NULL,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. MEYER CRACKER PARAMETERS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.meyer_cracker_parameters (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
motor_speed_hz DOUBLE PRECISION NOT NULL CHECK (motor_speed_hz > 0),
|
||||
jig_displacement_inches DOUBLE PRECISION NOT NULL,
|
||||
spring_stiffness_nm DOUBLE PRECISION NOT NULL CHECK (spring_stiffness_nm > 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 3. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
CREATE TRIGGER set_updated_at_jc_cracker_parameters
|
||||
BEFORE UPDATE ON public.jc_cracker_parameters
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
CREATE TRIGGER set_updated_at_meyer_cracker_parameters
|
||||
BEFORE UPDATE ON public.meyer_cracker_parameters
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.jc_cracker_parameters TO authenticated;
|
||||
GRANT ALL ON public.meyer_cracker_parameters TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 5. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.jc_cracker_parameters ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.meyer_cracker_parameters ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "JC Cracker parameters are viewable by authenticated users" ON public.jc_cracker_parameters
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "JC Cracker parameters are insertable by authenticated users" ON public.jc_cracker_parameters
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "JC Cracker parameters are updatable by authenticated users" ON public.jc_cracker_parameters
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "JC Cracker parameters are deletable by authenticated users" ON public.jc_cracker_parameters
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Meyer Cracker parameters are viewable by authenticated users" ON public.meyer_cracker_parameters
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Meyer Cracker parameters are insertable by authenticated users" ON public.meyer_cracker_parameters
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Meyer Cracker parameters are updatable by authenticated users" ON public.meyer_cracker_parameters
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Meyer Cracker parameters are deletable by authenticated users" ON public.meyer_cracker_parameters
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,274 +0,0 @@
|
||||
-- Phase Data Tables
|
||||
-- This migration creates phase-specific data entry tables (soaking, airdrying, cracking, shelling)
|
||||
|
||||
-- =============================================
|
||||
-- 1. SOAKING TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.soaking (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
soaking_duration_minutes INTEGER NOT NULL CHECK (soaking_duration_minutes > 0),
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure only one soaking per repetition
|
||||
CONSTRAINT unique_soaking_per_repetition UNIQUE (repetition_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. AIRDRYING TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.airdrying (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
duration_minutes INTEGER NOT NULL CHECK (duration_minutes > 0),
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure only one airdrying per repetition
|
||||
CONSTRAINT unique_airdrying_per_repetition UNIQUE (repetition_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 3. CRACKING TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.cracking (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
machine_type_id UUID NOT NULL REFERENCES public.machine_types(id),
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure only one cracking per repetition
|
||||
CONSTRAINT unique_cracking_per_repetition UNIQUE (repetition_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 4. SHELLING TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.shelling (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure only one shelling per repetition
|
||||
CONSTRAINT unique_shelling_per_repetition UNIQUE (repetition_id)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 5. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
-- Create indexes for repetition references
|
||||
CREATE INDEX IF NOT EXISTS idx_soaking_repetition_id ON public.soaking(repetition_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_airdrying_repetition_id ON public.airdrying(repetition_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_cracking_repetition_id ON public.cracking(repetition_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_shelling_repetition_id ON public.shelling(repetition_id);
|
||||
|
||||
-- Create indexes for machine type references
|
||||
CREATE INDEX IF NOT EXISTS idx_cracking_machine_type_id ON public.cracking(machine_type_id);
|
||||
|
||||
-- Create indexes for created_by references
|
||||
CREATE INDEX IF NOT EXISTS idx_soaking_created_by ON public.soaking(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_airdrying_created_by ON public.airdrying(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_cracking_created_by ON public.cracking(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_shelling_created_by ON public.shelling(created_by);
|
||||
|
||||
-- =============================================
|
||||
-- 6. TRIGGER FUNCTIONS FOR AUTOMATIC TIMESTAMP CALCULATIONS
|
||||
-- =============================================
|
||||
|
||||
-- Function to calculate scheduled end time for soaking
|
||||
CREATE OR REPLACE FUNCTION calculate_soaking_scheduled_end_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.scheduled_end_time = NEW.scheduled_start_time + (NEW.soaking_duration_minutes || ' minutes')::INTERVAL;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Function to calculate scheduled end time for airdrying
|
||||
CREATE OR REPLACE FUNCTION calculate_airdrying_scheduled_end_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.scheduled_end_time = NEW.scheduled_start_time + (NEW.duration_minutes || ' minutes')::INTERVAL;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Function to set airdrying scheduled start time based on soaking end time
|
||||
CREATE OR REPLACE FUNCTION set_airdrying_scheduled_start_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- If this is a new airdrying record and no scheduled_start_time is provided,
|
||||
-- try to get it from the associated soaking's scheduled_end_time for the same repetition
|
||||
IF NEW.scheduled_start_time IS NULL THEN
|
||||
SELECT s.scheduled_end_time INTO NEW.scheduled_start_time
|
||||
FROM public.soaking s
|
||||
WHERE s.repetition_id = NEW.repetition_id
|
||||
LIMIT 1;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Function to set cracking scheduled start time based on airdrying end time
|
||||
CREATE OR REPLACE FUNCTION set_cracking_scheduled_start_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- If this is a new cracking record and no scheduled_start_time is provided,
|
||||
-- try to get it from the associated airdrying's scheduled_end_time for the same repetition
|
||||
IF NEW.scheduled_start_time IS NULL THEN
|
||||
SELECT a.scheduled_end_time INTO NEW.scheduled_start_time
|
||||
FROM public.airdrying a
|
||||
WHERE a.repetition_id = NEW.repetition_id
|
||||
LIMIT 1;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 7. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Triggers for automatic timestamp calculations
|
||||
DROP TRIGGER IF EXISTS trigger_calculate_soaking_scheduled_end_time ON public.soaking;
|
||||
CREATE TRIGGER trigger_calculate_soaking_scheduled_end_time
|
||||
BEFORE INSERT OR UPDATE ON public.soaking
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION calculate_soaking_scheduled_end_time();
|
||||
|
||||
DROP TRIGGER IF EXISTS trigger_calculate_airdrying_scheduled_end_time ON public.airdrying;
|
||||
CREATE TRIGGER trigger_calculate_airdrying_scheduled_end_time
|
||||
BEFORE INSERT OR UPDATE ON public.airdrying
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION calculate_airdrying_scheduled_end_time();
|
||||
|
||||
DROP TRIGGER IF EXISTS trigger_set_airdrying_scheduled_start_time ON public.airdrying;
|
||||
CREATE TRIGGER trigger_set_airdrying_scheduled_start_time
|
||||
BEFORE INSERT ON public.airdrying
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION set_airdrying_scheduled_start_time();
|
||||
|
||||
DROP TRIGGER IF EXISTS trigger_set_cracking_scheduled_start_time ON public.cracking;
|
||||
CREATE TRIGGER trigger_set_cracking_scheduled_start_time
|
||||
BEFORE INSERT ON public.cracking
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION set_cracking_scheduled_start_time();
|
||||
|
||||
-- Triggers for updated_at on all phase tables
|
||||
CREATE TRIGGER set_updated_at_soaking
|
||||
BEFORE UPDATE ON public.soaking
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
CREATE TRIGGER set_updated_at_airdrying
|
||||
BEFORE UPDATE ON public.airdrying
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
CREATE TRIGGER set_updated_at_cracking
|
||||
BEFORE UPDATE ON public.cracking
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
CREATE TRIGGER set_updated_at_shelling
|
||||
BEFORE UPDATE ON public.shelling
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 8. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.soaking TO authenticated;
|
||||
GRANT ALL ON public.airdrying TO authenticated;
|
||||
GRANT ALL ON public.cracking TO authenticated;
|
||||
GRANT ALL ON public.shelling TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 9. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.soaking ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.airdrying ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.cracking ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE public.shelling ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 10. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
-- Create RLS policies for phase tables
|
||||
CREATE POLICY "Soaking data is viewable by authenticated users" ON public.soaking
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Soaking data is insertable by authenticated users" ON public.soaking
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Soaking data is updatable by authenticated users" ON public.soaking
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Soaking data is deletable by authenticated users" ON public.soaking
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Airdrying data is viewable by authenticated users" ON public.airdrying
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Airdrying data is insertable by authenticated users" ON public.airdrying
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Airdrying data is updatable by authenticated users" ON public.airdrying
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Airdrying data is deletable by authenticated users" ON public.airdrying
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Cracking data is viewable by authenticated users" ON public.cracking
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Cracking data is insertable by authenticated users" ON public.cracking
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Cracking data is updatable by authenticated users" ON public.cracking
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Cracking data is deletable by authenticated users" ON public.cracking
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Shelling data is viewable by authenticated users" ON public.shelling
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Shelling data is insertable by authenticated users" ON public.shelling
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Shelling data is updatable by authenticated users" ON public.shelling
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Shelling data is deletable by authenticated users" ON public.shelling
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
-- Conductor Availability
|
||||
-- This migration creates the conductor availability table
|
||||
|
||||
-- =============================================
|
||||
-- 1. CONDUCTOR AVAILABILITY TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.conductor_availability (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
user_id UUID NOT NULL REFERENCES public.user_profiles(id) ON DELETE CASCADE,
|
||||
available_from TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
available_to TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
notes TEXT, -- Optional notes about the availability
|
||||
status TEXT NOT NULL DEFAULT 'active' CHECK (status IN ('active', 'cancelled')),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure available_to is after available_from
|
||||
CONSTRAINT valid_time_range CHECK (available_to > available_from),
|
||||
|
||||
-- Ensure availability is in the future (can be modified if needed for past records)
|
||||
CONSTRAINT future_availability CHECK (available_from >= NOW() - INTERVAL '1 day')
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_user_id ON public.conductor_availability(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_available_from ON public.conductor_availability(available_from);
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_available_to ON public.conductor_availability(available_to);
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_status ON public.conductor_availability(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_created_by ON public.conductor_availability(created_by);
|
||||
CREATE INDEX IF NOT EXISTS idx_conductor_availability_time_range ON public.conductor_availability(available_from, available_to);
|
||||
|
||||
-- =============================================
|
||||
-- 3. FUNCTIONS FOR OVERLAP PREVENTION
|
||||
-- =============================================
|
||||
|
||||
-- Function to check for overlapping availabilities
|
||||
CREATE OR REPLACE FUNCTION public.check_availability_overlap()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
overlap_count INTEGER;
|
||||
BEGIN
|
||||
-- Check for overlapping availabilities for the same user
|
||||
SELECT COUNT(*) INTO overlap_count
|
||||
FROM public.conductor_availability
|
||||
WHERE user_id = NEW.user_id
|
||||
AND id != COALESCE(NEW.id, '00000000-0000-0000-0000-000000000000'::UUID)
|
||||
AND status = 'active'
|
||||
AND (
|
||||
-- New availability starts during an existing one
|
||||
(NEW.available_from >= available_from AND NEW.available_from < available_to) OR
|
||||
-- New availability ends during an existing one
|
||||
(NEW.available_to > available_from AND NEW.available_to <= available_to) OR
|
||||
-- New availability completely contains an existing one
|
||||
(NEW.available_from <= available_from AND NEW.available_to >= available_to) OR
|
||||
-- Existing availability completely contains the new one
|
||||
(available_from <= NEW.available_from AND available_to >= NEW.available_to)
|
||||
);
|
||||
|
||||
IF overlap_count > 0 THEN
|
||||
RAISE EXCEPTION 'Availability overlaps with existing availability for user %. Please adjust the time range or cancel the conflicting availability.', NEW.user_id;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Function to get available conductors for a specific time range
|
||||
CREATE OR REPLACE FUNCTION public.get_available_conductors(
|
||||
start_time TIMESTAMP WITH TIME ZONE,
|
||||
end_time TIMESTAMP WITH TIME ZONE
|
||||
)
|
||||
RETURNS TABLE (
|
||||
user_id UUID,
|
||||
email TEXT,
|
||||
available_from TIMESTAMP WITH TIME ZONE,
|
||||
available_to TIMESTAMP WITH TIME ZONE
|
||||
) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT
|
||||
ca.user_id,
|
||||
up.email,
|
||||
ca.available_from,
|
||||
ca.available_to
|
||||
FROM public.conductor_availability ca
|
||||
JOIN public.user_profiles up ON ca.user_id = up.id
|
||||
JOIN public.user_roles ur ON up.id = ur.user_id
|
||||
JOIN public.roles r ON ur.role_id = r.id
|
||||
WHERE ca.status = 'active'
|
||||
AND r.name = 'conductor'
|
||||
AND ca.available_from <= start_time
|
||||
AND ca.available_to >= end_time
|
||||
ORDER BY up.email;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Function to check if a conductor is available for a specific time range
|
||||
CREATE OR REPLACE FUNCTION public.is_conductor_available(
|
||||
conductor_user_id UUID,
|
||||
start_time TIMESTAMP WITH TIME ZONE,
|
||||
end_time TIMESTAMP WITH TIME ZONE
|
||||
)
|
||||
RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
availability_count INTEGER;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO availability_count
|
||||
FROM public.conductor_availability
|
||||
WHERE user_id = conductor_user_id
|
||||
AND status = 'active'
|
||||
AND available_from <= start_time
|
||||
AND available_to >= end_time;
|
||||
|
||||
RETURN availability_count > 0;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- =============================================
|
||||
-- 4. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Create trigger for updated_at on conductor_availability
|
||||
CREATE TRIGGER set_updated_at_conductor_availability
|
||||
BEFORE UPDATE ON public.conductor_availability
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- Create trigger to prevent overlapping availabilities
|
||||
CREATE TRIGGER trigger_check_availability_overlap
|
||||
BEFORE INSERT OR UPDATE ON public.conductor_availability
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.check_availability_overlap();
|
||||
|
||||
-- =============================================
|
||||
-- 5. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.conductor_availability TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 6. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.conductor_availability ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 7. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "conductor_availability_select_policy" ON public.conductor_availability
|
||||
FOR SELECT
|
||||
TO authenticated
|
||||
USING (
|
||||
-- Users can view their own availability, admins can view all
|
||||
user_id = auth.uid() OR public.is_admin()
|
||||
);
|
||||
|
||||
CREATE POLICY "conductor_availability_insert_policy" ON public.conductor_availability
|
||||
FOR INSERT
|
||||
TO authenticated
|
||||
WITH CHECK (
|
||||
-- Users can create their own availability, admins can create for anyone
|
||||
(user_id = auth.uid() AND created_by = auth.uid()) OR public.is_admin()
|
||||
);
|
||||
|
||||
CREATE POLICY "conductor_availability_update_policy" ON public.conductor_availability
|
||||
FOR UPDATE
|
||||
TO authenticated
|
||||
USING (
|
||||
-- Users can update their own availability, admins can update any
|
||||
user_id = auth.uid() OR public.is_admin()
|
||||
)
|
||||
WITH CHECK (
|
||||
-- Users can update their own availability, admins can update any
|
||||
user_id = auth.uid() OR public.is_admin()
|
||||
);
|
||||
|
||||
CREATE POLICY "conductor_availability_delete_policy" ON public.conductor_availability
|
||||
FOR DELETE
|
||||
TO authenticated
|
||||
USING (
|
||||
-- Users can delete their own availability, admins can delete any
|
||||
user_id = auth.uid() OR public.is_admin()
|
||||
);
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
-- Views
|
||||
-- This migration creates views for easier querying (must run last after all tables are created)
|
||||
|
||||
-- =============================================
|
||||
-- 1. CREATE VIEWS FOR EASIER QUERYING
|
||||
-- =============================================
|
||||
|
||||
-- View for experiments with all phase information
|
||||
-- Note: Since phases are now per-repetition, this view shows phase data from the first repetition
|
||||
CREATE OR REPLACE VIEW public.experiments_with_phases AS
|
||||
SELECT
|
||||
e.id,
|
||||
e.experiment_number,
|
||||
e.reps_required,
|
||||
e.weight_per_repetition_lbs,
|
||||
e.results_status,
|
||||
e.completion_status,
|
||||
e.phase_id,
|
||||
e.created_at,
|
||||
e.updated_at,
|
||||
e.created_by,
|
||||
ep.name as phase_name,
|
||||
ep.description as phase_description,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
er.id as first_repetition_id,
|
||||
er.repetition_number as first_repetition_number,
|
||||
s.id as soaking_id,
|
||||
s.scheduled_start_time as soaking_scheduled_start,
|
||||
s.actual_start_time as soaking_actual_start,
|
||||
s.soaking_duration_minutes,
|
||||
s.scheduled_end_time as soaking_scheduled_end,
|
||||
s.actual_end_time as soaking_actual_end,
|
||||
ad.id as airdrying_id,
|
||||
ad.scheduled_start_time as airdrying_scheduled_start,
|
||||
ad.actual_start_time as airdrying_actual_start,
|
||||
ad.duration_minutes as airdrying_duration,
|
||||
ad.scheduled_end_time as airdrying_scheduled_end,
|
||||
ad.actual_end_time as airdrying_actual_end,
|
||||
c.id as cracking_id,
|
||||
c.scheduled_start_time as cracking_scheduled_start,
|
||||
c.actual_start_time as cracking_actual_start,
|
||||
c.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
sh.id as shelling_id,
|
||||
sh.scheduled_start_time as shelling_scheduled_start,
|
||||
sh.actual_start_time as shelling_actual_start,
|
||||
sh.actual_end_time as shelling_actual_end
|
||||
FROM public.experiments e
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT id, repetition_number
|
||||
FROM public.experiment_repetitions
|
||||
WHERE experiment_id = e.id
|
||||
ORDER BY repetition_number
|
||||
LIMIT 1
|
||||
) er ON true
|
||||
LEFT JOIN public.soaking s ON s.repetition_id = er.id
|
||||
LEFT JOIN public.airdrying ad ON ad.repetition_id = er.id
|
||||
LEFT JOIN public.cracking c ON c.repetition_id = er.id
|
||||
LEFT JOIN public.machine_types mt ON c.machine_type_id = mt.id
|
||||
LEFT JOIN public.shelling sh ON sh.repetition_id = er.id;
|
||||
|
||||
-- View for repetitions with phase information
|
||||
CREATE OR REPLACE VIEW public.repetitions_with_phases AS
|
||||
SELECT
|
||||
er.id,
|
||||
er.experiment_id,
|
||||
er.repetition_number,
|
||||
er.status,
|
||||
er.created_at,
|
||||
er.updated_at,
|
||||
er.created_by,
|
||||
e.experiment_number,
|
||||
e.phase_id,
|
||||
e.weight_per_repetition_lbs,
|
||||
ep.name as phase_name,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
s.scheduled_start_time as soaking_scheduled_start,
|
||||
s.actual_start_time as soaking_actual_start,
|
||||
s.soaking_duration_minutes,
|
||||
s.scheduled_end_time as soaking_scheduled_end,
|
||||
s.actual_end_time as soaking_actual_end,
|
||||
ad.scheduled_start_time as airdrying_scheduled_start,
|
||||
ad.actual_start_time as airdrying_actual_start,
|
||||
ad.duration_minutes as airdrying_duration,
|
||||
ad.scheduled_end_time as airdrying_scheduled_end,
|
||||
ad.actual_end_time as airdrying_actual_end,
|
||||
c.scheduled_start_time as cracking_scheduled_start,
|
||||
c.actual_start_time as cracking_actual_start,
|
||||
c.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
sh.scheduled_start_time as shelling_scheduled_start,
|
||||
sh.actual_start_time as shelling_actual_start,
|
||||
sh.actual_end_time as shelling_actual_end
|
||||
FROM public.experiment_repetitions er
|
||||
JOIN public.experiments e ON er.experiment_id = e.id
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.soaking s ON er.id = s.repetition_id
|
||||
LEFT JOIN public.airdrying ad ON er.id = ad.repetition_id
|
||||
LEFT JOIN public.cracking c ON er.id = c.repetition_id
|
||||
LEFT JOIN public.machine_types mt ON c.machine_type_id = mt.id
|
||||
LEFT JOIN public.shelling sh ON er.id = sh.repetition_id;
|
||||
|
||||
-- View for available conductors with their roles
|
||||
CREATE OR REPLACE VIEW public.available_conductors AS
|
||||
SELECT
|
||||
ca.*,
|
||||
up.email,
|
||||
up.first_name,
|
||||
up.last_name,
|
||||
r.name as role_name
|
||||
FROM public.conductor_availability ca
|
||||
JOIN public.user_profiles up ON ca.user_id = up.id
|
||||
JOIN public.user_roles ur ON up.id = ur.user_id
|
||||
JOIN public.roles r ON ur.role_id = r.id
|
||||
WHERE ca.status = 'active'
|
||||
AND r.name = 'conductor';
|
||||
|
||||
-- =============================================
|
||||
-- 2. GRANT PERMISSIONS FOR VIEWS
|
||||
-- =============================================
|
||||
|
||||
GRANT SELECT ON public.experiments_with_phases TO authenticated;
|
||||
GRANT SELECT ON public.repetitions_with_phases TO authenticated;
|
||||
GRANT SELECT ON public.available_conductors TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 3. COMMENTS FOR DOCUMENTATION
|
||||
-- =============================================
|
||||
|
||||
COMMENT ON VIEW public.experiments_with_phases IS 'Comprehensive view of experiments with all phase information and timing details';
|
||||
COMMENT ON VIEW public.repetitions_with_phases IS 'View of experiment repetitions with associated phase data';
|
||||
COMMENT ON VIEW public.available_conductors IS 'View of currently available conductors with their profile information';
|
||||
|
||||
-- =============================================
|
||||
-- 4. SAMPLE DATA FUNCTIONS (OPTIONAL)
|
||||
-- =============================================
|
||||
|
||||
-- Function to create sample roles
|
||||
CREATE OR REPLACE FUNCTION public.create_sample_roles()
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.roles (name, description) VALUES
|
||||
('admin', 'System administrator with full access'),
|
||||
('conductor', 'Experiment conductor with limited access'),
|
||||
('researcher', 'Research staff with read-only access')
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Function to create sample machine types
|
||||
CREATE OR REPLACE FUNCTION public.create_sample_machine_types()
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
INSERT INTO public.machine_types (name, description, created_by) VALUES
|
||||
('JC Cracker', 'Johnson Cracker machine for pecan shelling', (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Meyer Cracker', 'Meyer Cracker machine for pecan shelling', (SELECT id FROM public.user_profiles LIMIT 1))
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Function to create sample experiment phases
|
||||
CREATE OR REPLACE FUNCTION public.create_sample_experiment_phases()
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
jc_cracker_id UUID;
|
||||
meyer_cracker_id UUID;
|
||||
BEGIN
|
||||
-- Get machine type IDs
|
||||
SELECT id INTO jc_cracker_id FROM public.machine_types WHERE name = 'JC Cracker';
|
||||
SELECT id INTO meyer_cracker_id FROM public.machine_types WHERE name = 'Meyer Cracker';
|
||||
|
||||
INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) VALUES
|
||||
('Full Process - JC Cracker', 'Complete pecan processing with JC Cracker', true, true, true, true, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Full Process - Meyer Cracker', 'Complete pecan processing with Meyer Cracker', true, true, true, true, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Cracking Only - JC Cracker', 'JC Cracker cracking process only', false, false, true, false, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Cracking Only - Meyer Cracker', 'Meyer Cracker cracking process only', false, false, true, false, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1))
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- =============================================
|
||||
-- 5. GRANT PERMISSIONS FOR SAMPLE DATA FUNCTIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT EXECUTE ON FUNCTION public.create_sample_roles() TO authenticated;
|
||||
GRANT EXECUTE ON FUNCTION public.create_sample_machine_types() TO authenticated;
|
||||
GRANT EXECUTE ON FUNCTION public.create_sample_experiment_phases() TO authenticated;
|
||||
|
||||
@@ -1,300 +0,0 @@
|
||||
-- Unified Phase Executions Table
|
||||
-- This migration replaces the separate phase tables (soaking, airdrying, cracking, shelling)
|
||||
-- with a unified table that properly supports repetitions
|
||||
|
||||
-- =============================================
|
||||
-- 1. CREATE UNIFIED PHASE EXECUTIONS TABLE
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_phase_executions (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
repetition_id UUID NOT NULL REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE,
|
||||
phase_type TEXT NOT NULL CHECK (phase_type IN ('soaking', 'airdrying', 'cracking', 'shelling')),
|
||||
|
||||
-- Scheduling fields (common to all phases)
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Phase-specific parameters (nullable, only relevant for specific phases)
|
||||
-- Soaking
|
||||
soaking_duration_minutes INTEGER CHECK (soaking_duration_minutes > 0),
|
||||
|
||||
-- Airdrying
|
||||
duration_minutes INTEGER CHECK (duration_minutes > 0),
|
||||
|
||||
-- Cracking
|
||||
machine_type_id UUID REFERENCES public.machine_types(id),
|
||||
|
||||
-- Status tracking
|
||||
status TEXT NOT NULL DEFAULT 'pending'
|
||||
CHECK (status IN ('pending', 'scheduled', 'in_progress', 'completed', 'cancelled')),
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure one execution per phase type per repetition
|
||||
CONSTRAINT unique_phase_per_repetition UNIQUE (repetition_id, phase_type)
|
||||
);
|
||||
|
||||
-- =============================================
|
||||
-- 2. INDEXES FOR PERFORMANCE
|
||||
-- =============================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_repetition_id
|
||||
ON public.experiment_phase_executions(repetition_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_phase_type
|
||||
ON public.experiment_phase_executions(phase_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_status
|
||||
ON public.experiment_phase_executions(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_scheduled_start_time
|
||||
ON public.experiment_phase_executions(scheduled_start_time);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_machine_type_id
|
||||
ON public.experiment_phase_executions(machine_type_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_created_by
|
||||
ON public.experiment_phase_executions(created_by);
|
||||
|
||||
-- =============================================
|
||||
-- 3. FUNCTION: Calculate Sequential Phase Start Times
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION calculate_sequential_phase_start_time()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
prev_phase_end_time TIMESTAMP WITH TIME ZONE;
|
||||
phase_order TEXT[] := ARRAY['soaking', 'airdrying', 'cracking', 'shelling'];
|
||||
current_phase_index INT;
|
||||
prev_phase_name TEXT;
|
||||
BEGIN
|
||||
-- Find current phase index in the sequence
|
||||
SELECT array_position(phase_order, NEW.phase_type) INTO current_phase_index;
|
||||
|
||||
-- If not the first phase, get previous phase's end time from the same repetition
|
||||
IF current_phase_index > 1 THEN
|
||||
prev_phase_name := phase_order[current_phase_index - 1];
|
||||
|
||||
SELECT scheduled_end_time INTO prev_phase_end_time
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE repetition_id = NEW.repetition_id
|
||||
AND phase_type = prev_phase_name
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1;
|
||||
|
||||
-- If previous phase exists and has an end time, use it as start time
|
||||
IF prev_phase_end_time IS NOT NULL THEN
|
||||
NEW.scheduled_start_time := prev_phase_end_time;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Calculate end time based on duration (for phases with duration)
|
||||
IF NEW.phase_type = 'soaking' AND NEW.soaking_duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time +
|
||||
(NEW.soaking_duration_minutes || ' minutes')::INTERVAL;
|
||||
ELSIF NEW.phase_type = 'airdrying' AND NEW.duration_minutes IS NOT NULL THEN
|
||||
NEW.scheduled_end_time := NEW.scheduled_start_time +
|
||||
(NEW.duration_minutes || ' minutes')::INTERVAL;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 4. FUNCTION: Auto-create Phase Executions for New Repetition
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION create_phase_executions_for_repetition()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
exp_phase_config RECORD;
|
||||
phase_type_list TEXT[] := ARRAY[]::TEXT[];
|
||||
phase_name TEXT;
|
||||
BEGIN
|
||||
-- Get experiment phase configuration
|
||||
-- Note: Phase durations may need to be set later when scheduling
|
||||
SELECT
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
ep.cracking_machine_type_id
|
||||
INTO exp_phase_config
|
||||
FROM public.experiments e
|
||||
JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
WHERE e.id = NEW.experiment_id;
|
||||
|
||||
-- Build list of phases to create based on experiment configuration
|
||||
IF exp_phase_config.has_soaking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'soaking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_airdrying THEN
|
||||
phase_type_list := array_append(phase_type_list, 'airdrying');
|
||||
END IF;
|
||||
IF exp_phase_config.has_cracking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'cracking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_shelling THEN
|
||||
phase_type_list := array_append(phase_type_list, 'shelling');
|
||||
END IF;
|
||||
|
||||
-- Create phase executions for each required phase
|
||||
FOREACH phase_name IN ARRAY phase_type_list
|
||||
LOOP
|
||||
INSERT INTO public.experiment_phase_executions (
|
||||
repetition_id,
|
||||
phase_type,
|
||||
scheduled_start_time,
|
||||
status,
|
||||
created_by,
|
||||
-- Phase-specific parameters
|
||||
soaking_duration_minutes,
|
||||
duration_minutes,
|
||||
machine_type_id
|
||||
)
|
||||
VALUES (
|
||||
NEW.id,
|
||||
phase_name,
|
||||
NOW(), -- Default start time, will be updated when scheduled or by sequential calculation
|
||||
'pending',
|
||||
NEW.created_by,
|
||||
-- Set phase-specific parameters
|
||||
-- Note: Durations will be set when the repetition is scheduled
|
||||
-- These can be NULL initially and updated later
|
||||
NULL, -- soaking_duration_minutes (set when scheduled)
|
||||
NULL, -- duration_minutes (set when scheduled)
|
||||
CASE WHEN phase_name = 'cracking'
|
||||
THEN exp_phase_config.cracking_machine_type_id
|
||||
ELSE NULL END
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 5. TRIGGERS
|
||||
-- =============================================
|
||||
|
||||
-- Trigger to automatically calculate sequential times and durations
|
||||
CREATE TRIGGER trigger_calculate_sequential_phase_times
|
||||
BEFORE INSERT OR UPDATE ON public.experiment_phase_executions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION calculate_sequential_phase_start_time();
|
||||
|
||||
-- Trigger to auto-create phases when repetition is created
|
||||
CREATE TRIGGER trigger_create_phase_executions
|
||||
AFTER INSERT ON public.experiment_repetitions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION create_phase_executions_for_repetition();
|
||||
|
||||
-- Trigger for updated_at
|
||||
CREATE TRIGGER set_updated_at_phase_executions
|
||||
BEFORE UPDATE ON public.experiment_phase_executions
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 6. CREATE VIEWS FOR PHASE-SPECIFIC ACCESS (Backward Compatibility)
|
||||
-- =============================================
|
||||
|
||||
-- These views allow existing code to work with phase-specific "tables"
|
||||
CREATE OR REPLACE VIEW public.soaking_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
soaking_duration_minutes,
|
||||
scheduled_end_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'soaking';
|
||||
|
||||
CREATE OR REPLACE VIEW public.airdrying_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
duration_minutes,
|
||||
scheduled_end_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'airdrying';
|
||||
|
||||
CREATE OR REPLACE VIEW public.cracking_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
machine_type_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'cracking';
|
||||
|
||||
CREATE OR REPLACE VIEW public.shelling_view AS
|
||||
SELECT
|
||||
id,
|
||||
(SELECT experiment_id FROM experiment_repetitions WHERE id = repetition_id) as experiment_id,
|
||||
repetition_id,
|
||||
scheduled_start_time,
|
||||
actual_start_time,
|
||||
actual_end_time,
|
||||
created_at,
|
||||
updated_at,
|
||||
created_by
|
||||
FROM public.experiment_phase_executions
|
||||
WHERE phase_type = 'shelling';
|
||||
|
||||
-- =============================================
|
||||
-- 7. GRANT PERMISSIONS
|
||||
-- =============================================
|
||||
|
||||
GRANT ALL ON public.experiment_phase_executions TO authenticated;
|
||||
GRANT SELECT ON public.soaking_view TO authenticated;
|
||||
GRANT SELECT ON public.airdrying_view TO authenticated;
|
||||
GRANT SELECT ON public.cracking_view TO authenticated;
|
||||
GRANT SELECT ON public.shelling_view TO authenticated;
|
||||
|
||||
-- =============================================
|
||||
-- 8. ENABLE ROW LEVEL SECURITY
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_phase_executions ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- =============================================
|
||||
-- 9. CREATE RLS POLICIES
|
||||
-- =============================================
|
||||
|
||||
CREATE POLICY "Phase executions are viewable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are insertable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are updatable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase executions are deletable by authenticated users"
|
||||
ON public.experiment_phase_executions
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
-- Add repetition_id foreign key to cracker parameters tables
|
||||
-- This migration adds a foreign key to link cracker parameters to their repetitions
|
||||
|
||||
-- =============================================
|
||||
-- 1. ADD REPETITION_ID TO JC CRACKER PARAMETERS
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.jc_cracker_parameters
|
||||
ADD COLUMN IF NOT EXISTS repetition_id UUID REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE;
|
||||
|
||||
-- Add index for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_jc_cracker_parameters_repetition_id
|
||||
ON public.jc_cracker_parameters(repetition_id);
|
||||
|
||||
-- Add unique constraint to ensure one parameter set per repetition
|
||||
ALTER TABLE public.jc_cracker_parameters
|
||||
ADD CONSTRAINT unique_jc_cracker_parameters_per_repetition
|
||||
UNIQUE (repetition_id);
|
||||
|
||||
-- =============================================
|
||||
-- 2. ADD REPETITION_ID TO MEYER CRACKER PARAMETERS
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.meyer_cracker_parameters
|
||||
ADD COLUMN IF NOT EXISTS repetition_id UUID REFERENCES public.experiment_repetitions(id) ON DELETE CASCADE;
|
||||
|
||||
-- Add index for performance
|
||||
CREATE INDEX IF NOT EXISTS idx_meyer_cracker_parameters_repetition_id
|
||||
ON public.meyer_cracker_parameters(repetition_id);
|
||||
|
||||
-- Add unique constraint to ensure one parameter set per repetition
|
||||
ALTER TABLE public.meyer_cracker_parameters
|
||||
ADD CONSTRAINT unique_meyer_cracker_parameters_per_repetition
|
||||
UNIQUE (repetition_id);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,600 +0,0 @@
|
||||
-- Seed Data for USDA Vision Pecan Experiments System
|
||||
-- This file populates the database with initial data
|
||||
|
||||
-- =============================================
|
||||
-- 1. INSERT ROLES
|
||||
-- =============================================
|
||||
|
||||
INSERT INTO public.roles (name, description) VALUES
|
||||
('admin', 'System administrator with full access to all features'),
|
||||
('conductor', 'Experiment conductor who can manage experiments and view all data'),
|
||||
('analyst', 'Data analyst who can view and analyze experiment results'),
|
||||
('data recorder', 'Data entry specialist who can record experiment measurements');
|
||||
|
||||
-- =============================================
|
||||
-- 2. CREATE ADMIN USER
|
||||
-- =============================================
|
||||
|
||||
-- Create admin user in auth.users
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
's.alireza.v@gmail.com',
|
||||
crypt('admin123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
-- Create user profile
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Alireza', 'Vaezi', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 's.alireza.v@gmail.com'
|
||||
;
|
||||
|
||||
-- Assign admin role
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
up.id
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 's.alireza.v@gmail.com'
|
||||
AND r.name = 'admin'
|
||||
;
|
||||
|
||||
-- =============================================
|
||||
-- 3. CREATE ADDITIONAL USERS
|
||||
-- =============================================
|
||||
|
||||
-- Create Claire Floyd (Conductor & Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'Ashlyn.Floyd@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Claire', 'Floyd', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'Ashlyn.Floyd@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'Ashlyn.Floyd@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
;
|
||||
|
||||
-- Create Bruna Dos-Santos (Conductor & Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'bkvsantos@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Bruna', 'Dos-Santos', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'bkvsantos@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'bkvsantos@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
;
|
||||
|
||||
-- Create Beni Rodriguez (Conductor & Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'Beni.Rodriguez@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Beni', 'Rodriguez', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'Beni.Rodriguez@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'Beni.Rodriguez@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
;
|
||||
|
||||
-- Create Brendan Surio (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'Brendan.Surio@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Brendan', 'Surio', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'Brendan.Surio@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'Brendan.Surio@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create William Mcconnell (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'William.McConnell@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'William', 'Mcconnell', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'William.McConnell@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'William.McConnell@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create Camille Deguzman (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'cpd08598@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Camille', 'Deguzman', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'cpd08598@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'cpd08598@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create Justin Hetzler (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'Justin.Hetzler@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Justin', 'Hetzler', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'Justin.Hetzler@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'Justin.Hetzler@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create Joshua Wilson (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'jdw58940@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Joshua', 'Wilson', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'jdw58940@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'jdw58940@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create Sydney Orlofsky (Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'Sydney.Orlofsky@uga.edu',
|
||||
crypt('password123', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, first_name, last_name, status)
|
||||
SELECT id, email, 'Sydney', 'Orlofsky', 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'Sydney.Orlofsky@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'Sydney.Orlofsky@uga.edu'
|
||||
AND r.name IN ('conductor', 'data recorder')
|
||||
|
||||
;
|
||||
|
||||
-- Create engr-ugaif user (Conductor, Analyst & Data Recorder)
|
||||
INSERT INTO auth.users (
|
||||
instance_id,
|
||||
id,
|
||||
aud,
|
||||
role,
|
||||
email,
|
||||
encrypted_password,
|
||||
email_confirmed_at,
|
||||
created_at,
|
||||
updated_at,
|
||||
confirmation_token,
|
||||
email_change,
|
||||
email_change_token_new,
|
||||
recovery_token
|
||||
) VALUES (
|
||||
'00000000-0000-0000-0000-000000000000',
|
||||
uuid_generate_v4(),
|
||||
'authenticated',
|
||||
'authenticated',
|
||||
'engr-ugaif@uga.edu',
|
||||
crypt('1048lab&2021', gen_salt('bf')),
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW(),
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
''
|
||||
);
|
||||
|
||||
INSERT INTO public.user_profiles (id, email, status)
|
||||
SELECT id, email, 'active'
|
||||
FROM auth.users
|
||||
WHERE email = 'engr-ugaif@uga.edu'
|
||||
;
|
||||
|
||||
INSERT INTO public.user_roles (user_id, role_id, assigned_by)
|
||||
SELECT
|
||||
up.id,
|
||||
r.id,
|
||||
(SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')
|
||||
FROM public.user_profiles up
|
||||
CROSS JOIN public.roles r
|
||||
WHERE up.email = 'engr-ugaif@uga.edu'
|
||||
AND r.name IN ('conductor', 'analyst', 'data recorder')
|
||||
;
|
||||
|
||||
-- =============================================
|
||||
-- 4. CREATE MACHINE TYPES
|
||||
-- =============================================
|
||||
|
||||
-- Insert default machine types
|
||||
INSERT INTO public.machine_types (name, description, created_by) VALUES
|
||||
('JC Cracker', 'JC Cracker machine with plate contact frequency and throughput parameters', (SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com')),
|
||||
('Meyer Cracker', 'Meyer Cracker machine with motor speed and jig displacement parameters', (SELECT id FROM public.user_profiles WHERE email = 's.alireza.v@gmail.com'))
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
-- =============================================
|
||||
-- 5. CREATE EXPERIMENT PHASES
|
||||
-- =============================================
|
||||
|
||||
-- Create "Phase 2 of JC Experiments" phase
|
||||
INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
SELECT
|
||||
'Phase 2 of JC Experiments',
|
||||
'Second phase of JC Cracker experiments for pecan processing optimization',
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
(SELECT id FROM public.machine_types WHERE name = 'JC Cracker'),
|
||||
up.id
|
||||
FROM public.user_profiles up
|
||||
WHERE up.email = 's.alireza.v@gmail.com'
|
||||
;
|
||||
|
||||
-- Create "Post Workshop Meyer Experiments" phase
|
||||
INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
SELECT
|
||||
'Post Workshop Meyer Experiments',
|
||||
'Post workshop Meyer Cracker experiments for pecan processing optimization',
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
(SELECT id FROM public.machine_types WHERE name = 'Meyer Cracker'),
|
||||
up.id
|
||||
FROM public.user_profiles up
|
||||
WHERE up.email = 's.alireza.v@gmail.com'
|
||||
;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,10 +42,32 @@ get_ip_from_hostname() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Method 5: Prefer 100.x.x.x (e.g. Tailscale) when present on the host
|
||||
get_ip_prefer_100() {
|
||||
local ip=""
|
||||
if command -v ip >/dev/null 2>&1; then
|
||||
# Third column is ADDR/CIDR (e.g. 100.93.40.84/32); extract IPv4 and prefer 100.x
|
||||
ip=$(ip -br addr show 2>/dev/null | awk '{print $3}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+' | grep -E '^100\.' | head -1)
|
||||
fi
|
||||
if [ -z "$ip" ] && command -v hostname >/dev/null 2>&1; then
|
||||
ip=$(hostname -I 2>/dev/null | tr ' ' '\n' | grep -E '^100\.' | head -1)
|
||||
fi
|
||||
echo "$ip"
|
||||
}
|
||||
|
||||
# Main logic: Try methods in order of reliability
|
||||
detect_host_ip() {
|
||||
local ip=""
|
||||
|
||||
|
||||
# When on the host (not in Docker), prefer 100.x.x.x (Tailscale) so the dashboard is reachable via that IP
|
||||
if [ ! -f /.dockerenv ] && [ -z "${DOCKER_CONTAINER:-}" ]; then
|
||||
ip=$(get_ip_prefer_100)
|
||||
if [ -n "$ip" ] && [ "$ip" != "127.0.0.1" ]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# If running inside Docker, try gateway method first
|
||||
if [ -f /.dockerenv ] || [ -n "${DOCKER_CONTAINER:-}" ]; then
|
||||
ip=$(get_ip_from_gateway)
|
||||
@@ -53,7 +75,7 @@ detect_host_ip() {
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Try host.docker.internal
|
||||
ip=$(get_ip_from_host_docker_internal)
|
||||
if [ -n "$ip" ] && [ "$ip" != "127.0.0.1" ]; then
|
||||
@@ -61,21 +83,21 @@ detect_host_ip() {
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Try interface method (works on host machine)
|
||||
ip=$(get_ip_from_interface)
|
||||
if [ -n "$ip" ] && [ "$ip" != "127.0.0.1" ]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Try hostname method
|
||||
ip=$(get_ip_from_hostname)
|
||||
if [ -n "$ip" ] && [ "$ip" != "127.0.0.1" ]; then
|
||||
echo "$ip"
|
||||
return 0
|
||||
fi
|
||||
|
||||
|
||||
# Fallback: return localhost (not ideal but better than nothing)
|
||||
echo "127.0.0.1"
|
||||
return 1
|
||||
|
||||
0
management-dashboard-web-app/supabase/.gitignore → supabase/.gitignore
vendored
Executable file → Normal file
0
management-dashboard-web-app/supabase/.gitignore → supabase/.gitignore
vendored
Executable file → Normal file
@@ -1 +1 @@
|
||||
v2.67.1
|
||||
v2.75.0
|
||||
@@ -64,9 +64,9 @@ supabase gen types typescript --local > management-dashboard-web-app/src/types/s
|
||||
|
||||
## Seed Data
|
||||
|
||||
Seed files are run automatically after migrations when using docker-compose. They populate the database with initial data:
|
||||
- `seed_01_users.sql`: Creates admin user and initial user profiles
|
||||
- `seed_02_phase2_experiments.sql`: Creates initial experiment data
|
||||
Seed files are run automatically after migrations when using `supabase db reset` (see `config.toml` → `[db.seed]` → `sql_paths`). Currently only user seed is enabled:
|
||||
- `seed_01_users.sql`: Creates admin user and initial user profiles (enabled)
|
||||
- `seed_02_phase2_experiments.sql`: Initial experiment data (temporarily disabled; add back to `sql_paths` in `config.toml` to re-enable)
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
@@ -57,7 +57,9 @@ schema_paths = []
|
||||
enabled = true
|
||||
# Specifies an ordered list of seed files to load during db reset.
|
||||
# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
|
||||
sql_paths = ["./seed_01_users.sql", "./seed_02_phase2_experiments.sql"]
|
||||
# Temporarily only user seed; other seeds suppressed.
|
||||
sql_paths = ["./seed_01_users.sql"]
|
||||
# sql_paths = ["./seed_01_users.sql", "./seed_02_phase2_experiments.sql"]
|
||||
# , "./seed_04_phase2_jc_experiments.sql", "./seed_05_meyer_experiments.sql"]
|
||||
|
||||
[db.network_restrictions]
|
||||
|
||||
@@ -70,6 +70,10 @@ CREATE TABLE IF NOT EXISTS public.shelling (
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
actual_start_time TIMESTAMP WITH TIME ZONE,
|
||||
actual_end_time TIMESTAMP WITH TIME ZONE,
|
||||
-- The space (in inches) between the sheller's rings
|
||||
ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches > 0),
|
||||
-- The revolutions per minute for the sheller drum
|
||||
drum_rpm INTEGER CHECK (drum_rpm > 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
@@ -56,6 +56,80 @@ CREATE INDEX IF NOT EXISTS idx_phase_executions_machine_type_id
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_executions_created_by
|
||||
ON public.experiment_phase_executions(created_by);
|
||||
|
||||
-- =============================================
|
||||
-- 2.5. CREATE CONDUCTOR ASSIGNMENTS TABLE
|
||||
-- =============================================
|
||||
|
||||
-- Table to store conductor assignments to phase executions
|
||||
-- This allows multiple conductors to be assigned to each phase execution
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_phase_assignments (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
phase_execution_id UUID NOT NULL REFERENCES public.experiment_phase_executions(id) ON DELETE CASCADE,
|
||||
conductor_id UUID NOT NULL REFERENCES public.user_profiles(id) ON DELETE CASCADE,
|
||||
|
||||
-- Scheduled times for this assignment (should match phase_execution times, but stored for clarity)
|
||||
scheduled_start_time TIMESTAMP WITH TIME ZONE NOT NULL,
|
||||
scheduled_end_time TIMESTAMP WITH TIME ZONE,
|
||||
|
||||
-- Status tracking
|
||||
status TEXT NOT NULL DEFAULT 'scheduled'
|
||||
CHECK (status IN ('scheduled', 'in_progress', 'completed', 'cancelled')),
|
||||
|
||||
-- Optional notes about the assignment
|
||||
notes TEXT,
|
||||
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
|
||||
-- Ensure scheduled_end_time is after scheduled_start_time
|
||||
CONSTRAINT valid_scheduled_time_range CHECK (scheduled_end_time IS NULL OR scheduled_end_time > scheduled_start_time),
|
||||
|
||||
-- Ensure unique assignment per conductor per phase execution
|
||||
CONSTRAINT unique_conductor_phase_execution UNIQUE (phase_execution_id, conductor_id)
|
||||
);
|
||||
|
||||
-- Indexes for conductor assignments
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_assignments_phase_execution_id
|
||||
ON public.experiment_phase_assignments(phase_execution_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_assignments_conductor_id
|
||||
ON public.experiment_phase_assignments(conductor_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_assignments_status
|
||||
ON public.experiment_phase_assignments(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_assignments_scheduled_start_time
|
||||
ON public.experiment_phase_assignments(scheduled_start_time);
|
||||
CREATE INDEX IF NOT EXISTS idx_phase_assignments_created_by
|
||||
ON public.experiment_phase_assignments(created_by);
|
||||
|
||||
-- Trigger for updated_at on conductor assignments
|
||||
CREATE TRIGGER set_updated_at_phase_assignments
|
||||
BEFORE UPDATE ON public.experiment_phase_assignments
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- Grant permissions
|
||||
GRANT ALL ON public.experiment_phase_assignments TO authenticated;
|
||||
|
||||
-- Enable Row Level Security
|
||||
ALTER TABLE public.experiment_phase_assignments ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- RLS Policies for conductor assignments
|
||||
CREATE POLICY "Phase assignments are viewable by authenticated users"
|
||||
ON public.experiment_phase_assignments
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase assignments are insertable by authenticated users"
|
||||
ON public.experiment_phase_assignments
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase assignments are updatable by authenticated users"
|
||||
ON public.experiment_phase_assignments
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Phase assignments are deletable by authenticated users"
|
||||
ON public.experiment_phase_assignments
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
-- =============================================
|
||||
-- 3. FUNCTION: Calculate Sequential Phase Start Times
|
||||
-- =============================================
|
||||
|
||||
9
supabase/migrations/00015_experiment_shelling_params.sql
Normal file
9
supabase/migrations/00015_experiment_shelling_params.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
-- Add experiment-level shelling parameters (defaults for repetitions)
|
||||
-- These match the shelling table attributes: ring_gap_inches, drum_rpm
|
||||
|
||||
ALTER TABLE public.experiments
|
||||
ADD COLUMN IF NOT EXISTS ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches IS NULL OR ring_gap_inches > 0),
|
||||
ADD COLUMN IF NOT EXISTS drum_rpm INTEGER CHECK (drum_rpm IS NULL OR drum_rpm > 0);
|
||||
|
||||
COMMENT ON COLUMN public.experiments.ring_gap_inches IS 'Default space (inches) between sheller rings for this experiment';
|
||||
COMMENT ON COLUMN public.experiments.drum_rpm IS 'Default sheller drum revolutions per minute for this experiment';
|
||||
@@ -1,10 +1,259 @@
|
||||
-- View: Experiments with All Repetitions and Phase Parameters
|
||||
-- This view provides a comprehensive view of experiments with all their repetitions
|
||||
-- and all phase execution parameters (soaking, airdrying, cracking, shelling)
|
||||
-- Rename table experiment_phases to experiment_books
|
||||
-- This migration renames the table and updates all dependent objects (views, functions, triggers, indexes, RLS).
|
||||
|
||||
-- =============================================
|
||||
-- 1. RENAME TABLE
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_phases RENAME TO experiment_books;
|
||||
|
||||
-- =============================================
|
||||
-- 2. RENAME TRIGGER
|
||||
-- =============================================
|
||||
|
||||
DROP TRIGGER IF EXISTS set_updated_at_experiment_phases ON public.experiment_books;
|
||||
CREATE TRIGGER set_updated_at_experiment_books
|
||||
BEFORE UPDATE ON public.experiment_books
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 3. RENAME CONSTRAINT
|
||||
-- =============================================
|
||||
|
||||
ALTER TABLE public.experiment_books
|
||||
RENAME CONSTRAINT ck_experiment_phases_machine_required_when_cracking
|
||||
TO ck_experiment_books_machine_required_when_cracking;
|
||||
|
||||
-- =============================================
|
||||
-- 4. RENAME INDEXES
|
||||
-- =============================================
|
||||
|
||||
ALTER INDEX IF EXISTS public.idx_experiment_phases_name RENAME TO idx_experiment_books_name;
|
||||
ALTER INDEX IF EXISTS public.idx_experiment_phases_cracking_machine_type_id RENAME TO idx_experiment_books_cracking_machine_type_id;
|
||||
|
||||
-- =============================================
|
||||
-- 5. RLS POLICIES (drop old, create new with updated names)
|
||||
-- =============================================
|
||||
|
||||
DROP POLICY IF EXISTS "Experiment phases are viewable by authenticated users" ON public.experiment_books;
|
||||
DROP POLICY IF EXISTS "Experiment phases are insertable by authenticated users" ON public.experiment_books;
|
||||
DROP POLICY IF EXISTS "Experiment phases are updatable by authenticated users" ON public.experiment_books;
|
||||
DROP POLICY IF EXISTS "Experiment phases are deletable by authenticated users" ON public.experiment_books;
|
||||
|
||||
CREATE POLICY "Experiment books are viewable by authenticated users" ON public.experiment_books
|
||||
FOR SELECT USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment books are insertable by authenticated users" ON public.experiment_books
|
||||
FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment books are updatable by authenticated users" ON public.experiment_books
|
||||
FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE POLICY "Experiment books are deletable by authenticated users" ON public.experiment_books
|
||||
FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
-- =============================================
|
||||
-- 6. UPDATE FUNCTION: create_phase_executions_for_repetition (references experiment_phases)
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION create_phase_executions_for_repetition()
|
||||
RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
exp_phase_config RECORD;
|
||||
phase_type_list TEXT[] := ARRAY[]::TEXT[];
|
||||
phase_name TEXT;
|
||||
BEGIN
|
||||
SELECT
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
ep.cracking_machine_type_id
|
||||
INTO exp_phase_config
|
||||
FROM public.experiments e
|
||||
JOIN public.experiment_books ep ON e.phase_id = ep.id
|
||||
WHERE e.id = NEW.experiment_id;
|
||||
|
||||
IF exp_phase_config.has_soaking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'soaking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_airdrying THEN
|
||||
phase_type_list := array_append(phase_type_list, 'airdrying');
|
||||
END IF;
|
||||
IF exp_phase_config.has_cracking THEN
|
||||
phase_type_list := array_append(phase_type_list, 'cracking');
|
||||
END IF;
|
||||
IF exp_phase_config.has_shelling THEN
|
||||
phase_type_list := array_append(phase_type_list, 'shelling');
|
||||
END IF;
|
||||
|
||||
FOREACH phase_name IN ARRAY phase_type_list
|
||||
LOOP
|
||||
INSERT INTO public.experiment_phase_executions (
|
||||
repetition_id,
|
||||
phase_type,
|
||||
scheduled_start_time,
|
||||
status,
|
||||
created_by,
|
||||
soaking_duration_minutes,
|
||||
duration_minutes,
|
||||
machine_type_id
|
||||
)
|
||||
VALUES (
|
||||
NEW.id,
|
||||
phase_name,
|
||||
NOW(),
|
||||
'pending',
|
||||
NEW.created_by,
|
||||
NULL,
|
||||
NULL,
|
||||
CASE WHEN phase_name = 'cracking'
|
||||
THEN exp_phase_config.cracking_machine_type_id
|
||||
ELSE NULL END
|
||||
);
|
||||
END LOOP;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- =============================================
|
||||
-- 7. UPDATE FUNCTION: create_sample_experiment_phases (INSERT into experiment_books)
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.create_sample_experiment_phases()
|
||||
RETURNS VOID AS $$
|
||||
DECLARE
|
||||
jc_cracker_id UUID;
|
||||
meyer_cracker_id UUID;
|
||||
BEGIN
|
||||
SELECT id INTO jc_cracker_id FROM public.machine_types WHERE name = 'JC Cracker';
|
||||
SELECT id INTO meyer_cracker_id FROM public.machine_types WHERE name = 'Meyer Cracker';
|
||||
|
||||
INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by) VALUES
|
||||
('Full Process - JC Cracker', 'Complete pecan processing with JC Cracker', true, true, true, true, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Full Process - Meyer Cracker', 'Complete pecan processing with Meyer Cracker', true, true, true, true, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Cracking Only - JC Cracker', 'JC Cracker cracking process only', false, false, true, false, jc_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1)),
|
||||
('Cracking Only - Meyer Cracker', 'Meyer Cracker cracking process only', false, false, true, false, meyer_cracker_id, (SELECT id FROM public.user_profiles LIMIT 1))
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- =============================================
|
||||
-- 8. UPDATE VIEWS (from 00014 - experiments_with_phases, repetitions_with_phases, experiments_with_all_reps_and_phases, get_experiment_with_reps_and_phases)
|
||||
-- =============================================
|
||||
|
||||
CREATE OR REPLACE VIEW public.experiments_with_phases AS
|
||||
SELECT
|
||||
e.id,
|
||||
e.experiment_number,
|
||||
e.reps_required,
|
||||
e.weight_per_repetition_lbs,
|
||||
e.results_status,
|
||||
e.completion_status,
|
||||
e.phase_id,
|
||||
e.created_at,
|
||||
e.updated_at,
|
||||
e.created_by,
|
||||
ep.name as phase_name,
|
||||
ep.description as phase_description,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
er.id as first_repetition_id,
|
||||
er.repetition_number as first_repetition_number,
|
||||
soaking_e.id as soaking_id,
|
||||
soaking_e.scheduled_start_time as soaking_scheduled_start,
|
||||
soaking_e.actual_start_time as soaking_actual_start,
|
||||
soaking_e.soaking_duration_minutes,
|
||||
soaking_e.scheduled_end_time as soaking_scheduled_end,
|
||||
soaking_e.actual_end_time as soaking_actual_end,
|
||||
airdrying_e.id as airdrying_id,
|
||||
airdrying_e.scheduled_start_time as airdrying_scheduled_start,
|
||||
airdrying_e.actual_start_time as airdrying_actual_start,
|
||||
airdrying_e.duration_minutes as airdrying_duration,
|
||||
airdrying_e.scheduled_end_time as airdrying_scheduled_end,
|
||||
airdrying_e.actual_end_time as airdrying_actual_end,
|
||||
cracking_e.id as cracking_id,
|
||||
cracking_e.scheduled_start_time as cracking_scheduled_start,
|
||||
cracking_e.actual_start_time as cracking_actual_start,
|
||||
cracking_e.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
shelling_e.id as shelling_id,
|
||||
shelling_e.scheduled_start_time as shelling_scheduled_start,
|
||||
shelling_e.actual_start_time as shelling_actual_start,
|
||||
shelling_e.actual_end_time as shelling_actual_end
|
||||
FROM public.experiments e
|
||||
LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT id, repetition_number
|
||||
FROM public.experiment_repetitions
|
||||
WHERE experiment_id = e.id
|
||||
ORDER BY repetition_number
|
||||
LIMIT 1
|
||||
) er ON true
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking'
|
||||
LEFT JOIN public.experiment_phase_executions airdrying_e
|
||||
ON airdrying_e.repetition_id = er.id AND airdrying_e.phase_type = 'airdrying'
|
||||
LEFT JOIN public.experiment_phase_executions cracking_e
|
||||
ON cracking_e.repetition_id = er.id AND cracking_e.phase_type = 'cracking'
|
||||
LEFT JOIN public.experiment_phase_executions shelling_e
|
||||
ON shelling_e.repetition_id = er.id AND shelling_e.phase_type = 'shelling'
|
||||
LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id;
|
||||
|
||||
CREATE OR REPLACE VIEW public.repetitions_with_phases AS
|
||||
SELECT
|
||||
er.id,
|
||||
er.experiment_id,
|
||||
er.repetition_number,
|
||||
er.status,
|
||||
er.created_at,
|
||||
er.updated_at,
|
||||
er.created_by,
|
||||
e.experiment_number,
|
||||
e.phase_id,
|
||||
e.weight_per_repetition_lbs,
|
||||
ep.name as phase_name,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
soaking_e.scheduled_start_time as soaking_scheduled_start,
|
||||
soaking_e.actual_start_time as soaking_actual_start,
|
||||
soaking_e.soaking_duration_minutes,
|
||||
soaking_e.scheduled_end_time as soaking_scheduled_end,
|
||||
soaking_e.actual_end_time as soaking_actual_end,
|
||||
airdrying_e.scheduled_start_time as airdrying_scheduled_start,
|
||||
airdrying_e.actual_start_time as airdrying_actual_start,
|
||||
airdrying_e.duration_minutes as airdrying_duration,
|
||||
airdrying_e.scheduled_end_time as airdrying_scheduled_end,
|
||||
airdrying_e.actual_end_time as airdrying_actual_end,
|
||||
cracking_e.scheduled_start_time as cracking_scheduled_start,
|
||||
cracking_e.actual_start_time as cracking_actual_start,
|
||||
cracking_e.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
shelling_e.scheduled_start_time as shelling_scheduled_start,
|
||||
shelling_e.actual_start_time as shelling_actual_start,
|
||||
shelling_e.actual_end_time as shelling_actual_end
|
||||
FROM public.experiment_repetitions er
|
||||
JOIN public.experiments e ON er.experiment_id = e.id
|
||||
LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON er.id = soaking_e.repetition_id AND soaking_e.phase_type = 'soaking'
|
||||
LEFT JOIN public.experiment_phase_executions airdrying_e
|
||||
ON er.id = airdrying_e.repetition_id AND airdrying_e.phase_type = 'airdrying'
|
||||
LEFT JOIN public.experiment_phase_executions cracking_e
|
||||
ON er.id = cracking_e.repetition_id AND cracking_e.phase_type = 'cracking'
|
||||
LEFT JOIN public.experiment_phase_executions shelling_e
|
||||
ON er.id = shelling_e.repetition_id AND shelling_e.phase_type = 'shelling'
|
||||
LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id;
|
||||
|
||||
-- experiments_with_all_reps_and_phases
|
||||
CREATE OR REPLACE VIEW public.experiments_with_all_reps_and_phases AS
|
||||
SELECT
|
||||
-- Experiment fields
|
||||
e.id as experiment_id,
|
||||
e.experiment_number,
|
||||
e.reps_required,
|
||||
@@ -15,8 +264,6 @@ SELECT
|
||||
e.created_at as experiment_created_at,
|
||||
e.updated_at as experiment_updated_at,
|
||||
e.created_by as experiment_created_by,
|
||||
|
||||
-- Phase information
|
||||
ep.name as phase_name,
|
||||
ep.description as phase_description,
|
||||
ep.has_soaking,
|
||||
@@ -24,8 +271,6 @@ SELECT
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
ep.cracking_machine_type_id as phase_cracking_machine_type_id,
|
||||
|
||||
-- Repetition fields
|
||||
er.id as repetition_id,
|
||||
er.repetition_number,
|
||||
er.status as repetition_status,
|
||||
@@ -33,8 +278,6 @@ SELECT
|
||||
er.created_at as repetition_created_at,
|
||||
er.updated_at as repetition_updated_at,
|
||||
er.created_by as repetition_created_by,
|
||||
|
||||
-- Soaking phase execution
|
||||
soaking_e.id as soaking_execution_id,
|
||||
soaking_e.scheduled_start_time as soaking_scheduled_start,
|
||||
soaking_e.actual_start_time as soaking_actual_start,
|
||||
@@ -42,8 +285,6 @@ SELECT
|
||||
soaking_e.scheduled_end_time as soaking_scheduled_end,
|
||||
soaking_e.actual_end_time as soaking_actual_end,
|
||||
soaking_e.status as soaking_status,
|
||||
|
||||
-- Airdrying phase execution
|
||||
airdrying_e.id as airdrying_execution_id,
|
||||
airdrying_e.scheduled_start_time as airdrying_scheduled_start,
|
||||
airdrying_e.actual_start_time as airdrying_actual_start,
|
||||
@@ -51,8 +292,6 @@ SELECT
|
||||
airdrying_e.scheduled_end_time as airdrying_scheduled_end,
|
||||
airdrying_e.actual_end_time as airdrying_actual_end,
|
||||
airdrying_e.status as airdrying_status,
|
||||
|
||||
-- Cracking phase execution
|
||||
cracking_e.id as cracking_execution_id,
|
||||
cracking_e.scheduled_start_time as cracking_scheduled_start,
|
||||
cracking_e.actual_start_time as cracking_actual_start,
|
||||
@@ -61,17 +300,14 @@ SELECT
|
||||
cracking_e.machine_type_id as cracking_machine_type_id,
|
||||
cracking_e.status as cracking_status,
|
||||
mt.name as machine_type_name,
|
||||
|
||||
-- Shelling phase execution
|
||||
shelling_e.id as shelling_execution_id,
|
||||
shelling_e.scheduled_start_time as shelling_scheduled_start,
|
||||
shelling_e.actual_start_time as shelling_actual_start,
|
||||
shelling_e.scheduled_end_time as shelling_scheduled_end,
|
||||
shelling_e.actual_end_time as shelling_actual_end,
|
||||
shelling_e.status as shelling_status
|
||||
|
||||
FROM public.experiments e
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_repetitions er ON er.experiment_id = e.id
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking'
|
||||
@@ -84,11 +320,7 @@ LEFT JOIN public.experiment_phase_executions shelling_e
|
||||
LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id
|
||||
ORDER BY e.experiment_number, er.repetition_number;
|
||||
|
||||
-- Grant permissions
|
||||
GRANT SELECT ON public.experiments_with_all_reps_and_phases TO authenticated;
|
||||
|
||||
-- Function: Get experiment with all repetitions and phase parameters
|
||||
-- This function returns a JSON structure with experiment and all its repetitions
|
||||
-- get_experiment_with_reps_and_phases function
|
||||
CREATE OR REPLACE FUNCTION public.get_experiment_with_reps_and_phases(p_experiment_id UUID)
|
||||
RETURNS TABLE (
|
||||
experiment_id UUID,
|
||||
@@ -147,7 +379,7 @@ BEGIN
|
||||
'[]'::jsonb
|
||||
) as repetitions
|
||||
FROM public.experiments e
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_books ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_repetitions er ON er.experiment_id = e.id
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking'
|
||||
@@ -163,115 +395,5 @@ BEGIN
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
-- Grant execute permission
|
||||
GRANT SELECT ON public.experiments_with_all_reps_and_phases TO authenticated;
|
||||
GRANT EXECUTE ON FUNCTION public.get_experiment_with_reps_and_phases(UUID) TO authenticated;
|
||||
|
||||
-- Update the existing experiments_with_phases view to use unified table
|
||||
CREATE OR REPLACE VIEW public.experiments_with_phases AS
|
||||
SELECT
|
||||
e.id,
|
||||
e.experiment_number,
|
||||
e.reps_required,
|
||||
e.weight_per_repetition_lbs,
|
||||
e.results_status,
|
||||
e.completion_status,
|
||||
e.phase_id,
|
||||
e.created_at,
|
||||
e.updated_at,
|
||||
e.created_by,
|
||||
ep.name as phase_name,
|
||||
ep.description as phase_description,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
er.id as first_repetition_id,
|
||||
er.repetition_number as first_repetition_number,
|
||||
soaking_e.id as soaking_id,
|
||||
soaking_e.scheduled_start_time as soaking_scheduled_start,
|
||||
soaking_e.actual_start_time as soaking_actual_start,
|
||||
soaking_e.soaking_duration_minutes,
|
||||
soaking_e.scheduled_end_time as soaking_scheduled_end,
|
||||
soaking_e.actual_end_time as soaking_actual_end,
|
||||
airdrying_e.id as airdrying_id,
|
||||
airdrying_e.scheduled_start_time as airdrying_scheduled_start,
|
||||
airdrying_e.actual_start_time as airdrying_actual_start,
|
||||
airdrying_e.duration_minutes as airdrying_duration,
|
||||
airdrying_e.scheduled_end_time as airdrying_scheduled_end,
|
||||
airdrying_e.actual_end_time as airdrying_actual_end,
|
||||
cracking_e.id as cracking_id,
|
||||
cracking_e.scheduled_start_time as cracking_scheduled_start,
|
||||
cracking_e.actual_start_time as cracking_actual_start,
|
||||
cracking_e.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
shelling_e.id as shelling_id,
|
||||
shelling_e.scheduled_start_time as shelling_scheduled_start,
|
||||
shelling_e.actual_start_time as shelling_actual_start,
|
||||
shelling_e.actual_end_time as shelling_actual_end
|
||||
FROM public.experiments e
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT id, repetition_number
|
||||
FROM public.experiment_repetitions
|
||||
WHERE experiment_id = e.id
|
||||
ORDER BY repetition_number
|
||||
LIMIT 1
|
||||
) er ON true
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON soaking_e.repetition_id = er.id AND soaking_e.phase_type = 'soaking'
|
||||
LEFT JOIN public.experiment_phase_executions airdrying_e
|
||||
ON airdrying_e.repetition_id = er.id AND airdrying_e.phase_type = 'airdrying'
|
||||
LEFT JOIN public.experiment_phase_executions cracking_e
|
||||
ON cracking_e.repetition_id = er.id AND cracking_e.phase_type = 'cracking'
|
||||
LEFT JOIN public.experiment_phase_executions shelling_e
|
||||
ON shelling_e.repetition_id = er.id AND shelling_e.phase_type = 'shelling'
|
||||
LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id;
|
||||
|
||||
-- Update repetitions_with_phases view to use unified table
|
||||
CREATE OR REPLACE VIEW public.repetitions_with_phases AS
|
||||
SELECT
|
||||
er.id,
|
||||
er.experiment_id,
|
||||
er.repetition_number,
|
||||
er.status,
|
||||
er.created_at,
|
||||
er.updated_at,
|
||||
er.created_by,
|
||||
e.experiment_number,
|
||||
e.phase_id,
|
||||
e.weight_per_repetition_lbs,
|
||||
ep.name as phase_name,
|
||||
ep.has_soaking,
|
||||
ep.has_airdrying,
|
||||
ep.has_cracking,
|
||||
ep.has_shelling,
|
||||
soaking_e.scheduled_start_time as soaking_scheduled_start,
|
||||
soaking_e.actual_start_time as soaking_actual_start,
|
||||
soaking_e.soaking_duration_minutes,
|
||||
soaking_e.scheduled_end_time as soaking_scheduled_end,
|
||||
soaking_e.actual_end_time as soaking_actual_end,
|
||||
airdrying_e.scheduled_start_time as airdrying_scheduled_start,
|
||||
airdrying_e.actual_start_time as airdrying_actual_start,
|
||||
airdrying_e.duration_minutes as airdrying_duration,
|
||||
airdrying_e.scheduled_end_time as airdrying_scheduled_end,
|
||||
airdrying_e.actual_end_time as airdrying_actual_end,
|
||||
cracking_e.scheduled_start_time as cracking_scheduled_start,
|
||||
cracking_e.actual_start_time as cracking_actual_start,
|
||||
cracking_e.actual_end_time as cracking_actual_end,
|
||||
mt.name as machine_type_name,
|
||||
shelling_e.scheduled_start_time as shelling_scheduled_start,
|
||||
shelling_e.actual_start_time as shelling_actual_start,
|
||||
shelling_e.actual_end_time as shelling_actual_end
|
||||
FROM public.experiment_repetitions er
|
||||
JOIN public.experiments e ON er.experiment_id = e.id
|
||||
LEFT JOIN public.experiment_phases ep ON e.phase_id = ep.id
|
||||
LEFT JOIN public.experiment_phase_executions soaking_e
|
||||
ON er.id = soaking_e.repetition_id AND soaking_e.phase_type = 'soaking'
|
||||
LEFT JOIN public.experiment_phase_executions airdrying_e
|
||||
ON er.id = airdrying_e.repetition_id AND airdrying_e.phase_type = 'airdrying'
|
||||
LEFT JOIN public.experiment_phase_executions cracking_e
|
||||
ON er.id = cracking_e.repetition_id AND cracking_e.phase_type = 'cracking'
|
||||
LEFT JOIN public.experiment_phase_executions shelling_e
|
||||
ON er.id = shelling_e.repetition_id AND shelling_e.phase_type = 'shelling'
|
||||
LEFT JOIN public.machine_types mt ON cracking_e.machine_type_id = mt.id;
|
||||
|
||||
118
supabase/migrations/00017_experiment_phase_config_tables.sql
Normal file
118
supabase/migrations/00017_experiment_phase_config_tables.sql
Normal file
@@ -0,0 +1,118 @@
|
||||
-- Experiment-level phase config tables
|
||||
-- One row per experiment per phase; linked by experiment_id. Used when creating an experiment
|
||||
-- so soaking, airdrying, cracking, and shelling parameters are stored and can be applied to repetitions.
|
||||
|
||||
-- =============================================
|
||||
-- 1. EXPERIMENT_SOAKING (template for soaking phase)
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_soaking (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE,
|
||||
soaking_duration_hr DOUBLE PRECISION NOT NULL CHECK (soaking_duration_hr >= 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
CONSTRAINT unique_experiment_soaking_per_experiment UNIQUE (experiment_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_soaking_experiment_id ON public.experiment_soaking(experiment_id);
|
||||
GRANT ALL ON public.experiment_soaking TO authenticated;
|
||||
ALTER TABLE public.experiment_soaking ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY "Experiment soaking config is viewable by authenticated" ON public.experiment_soaking FOR SELECT USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment soaking config is insertable by authenticated" ON public.experiment_soaking FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment soaking config is updatable by authenticated" ON public.experiment_soaking FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment soaking config is deletable by authenticated" ON public.experiment_soaking FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE TRIGGER set_updated_at_experiment_soaking
|
||||
BEFORE UPDATE ON public.experiment_soaking
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 2. EXPERIMENT_AIRDRYING (template for airdrying phase)
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_airdrying (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE,
|
||||
duration_minutes INTEGER NOT NULL CHECK (duration_minutes >= 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
CONSTRAINT unique_experiment_airdrying_per_experiment UNIQUE (experiment_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_airdrying_experiment_id ON public.experiment_airdrying(experiment_id);
|
||||
GRANT ALL ON public.experiment_airdrying TO authenticated;
|
||||
ALTER TABLE public.experiment_airdrying ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY "Experiment airdrying config is viewable by authenticated" ON public.experiment_airdrying FOR SELECT USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment airdrying config is insertable by authenticated" ON public.experiment_airdrying FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment airdrying config is updatable by authenticated" ON public.experiment_airdrying FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment airdrying config is deletable by authenticated" ON public.experiment_airdrying FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE TRIGGER set_updated_at_experiment_airdrying
|
||||
BEFORE UPDATE ON public.experiment_airdrying
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 3. EXPERIMENT_CRACKING (template for cracking; supports JC and Meyer params)
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_cracking (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE,
|
||||
machine_type_id UUID NOT NULL REFERENCES public.machine_types(id) ON DELETE RESTRICT,
|
||||
-- JC Cracker parameters (nullable; used when machine is JC)
|
||||
plate_contact_frequency_hz DOUBLE PRECISION CHECK (plate_contact_frequency_hz IS NULL OR plate_contact_frequency_hz > 0),
|
||||
throughput_rate_pecans_sec DOUBLE PRECISION CHECK (throughput_rate_pecans_sec IS NULL OR throughput_rate_pecans_sec > 0),
|
||||
crush_amount_in DOUBLE PRECISION CHECK (crush_amount_in IS NULL OR crush_amount_in >= 0),
|
||||
entry_exit_height_diff_in DOUBLE PRECISION,
|
||||
-- Meyer Cracker parameters (nullable; used when machine is Meyer)
|
||||
motor_speed_hz DOUBLE PRECISION CHECK (motor_speed_hz IS NULL OR motor_speed_hz > 0),
|
||||
jig_displacement_inches DOUBLE PRECISION CHECK (jig_displacement_inches IS NULL OR jig_displacement_inches >= 0),
|
||||
spring_stiffness_nm DOUBLE PRECISION CHECK (spring_stiffness_nm IS NULL OR spring_stiffness_nm > 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
CONSTRAINT unique_experiment_cracking_per_experiment UNIQUE (experiment_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_cracking_experiment_id ON public.experiment_cracking(experiment_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_cracking_machine_type_id ON public.experiment_cracking(machine_type_id);
|
||||
GRANT ALL ON public.experiment_cracking TO authenticated;
|
||||
ALTER TABLE public.experiment_cracking ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY "Experiment cracking config is viewable by authenticated" ON public.experiment_cracking FOR SELECT USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment cracking config is insertable by authenticated" ON public.experiment_cracking FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment cracking config is updatable by authenticated" ON public.experiment_cracking FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment cracking config is deletable by authenticated" ON public.experiment_cracking FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE TRIGGER set_updated_at_experiment_cracking
|
||||
BEFORE UPDATE ON public.experiment_cracking
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();
|
||||
|
||||
-- =============================================
|
||||
-- 4. EXPERIMENT_SHELLING (template for shelling phase)
|
||||
-- =============================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.experiment_shelling (
|
||||
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
experiment_id UUID NOT NULL REFERENCES public.experiments(id) ON DELETE CASCADE,
|
||||
ring_gap_inches NUMERIC(6,2) CHECK (ring_gap_inches IS NULL OR ring_gap_inches > 0),
|
||||
drum_rpm INTEGER CHECK (drum_rpm IS NULL OR drum_rpm > 0),
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
created_by UUID NOT NULL REFERENCES public.user_profiles(id),
|
||||
CONSTRAINT unique_experiment_shelling_per_experiment UNIQUE (experiment_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_experiment_shelling_experiment_id ON public.experiment_shelling(experiment_id);
|
||||
GRANT ALL ON public.experiment_shelling TO authenticated;
|
||||
ALTER TABLE public.experiment_shelling ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY "Experiment shelling config is viewable by authenticated" ON public.experiment_shelling FOR SELECT USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment shelling config is insertable by authenticated" ON public.experiment_shelling FOR INSERT WITH CHECK (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment shelling config is updatable by authenticated" ON public.experiment_shelling FOR UPDATE USING (auth.role() = 'authenticated');
|
||||
CREATE POLICY "Experiment shelling config is deletable by authenticated" ON public.experiment_shelling FOR DELETE USING (auth.role() = 'authenticated');
|
||||
|
||||
CREATE TRIGGER set_updated_at_experiment_shelling
|
||||
BEFORE UPDATE ON public.experiment_shelling
|
||||
FOR EACH ROW EXECUTE FUNCTION public.handle_updated_at();
|
||||
@@ -566,11 +566,11 @@ INSERT INTO public.machine_types (name, description, created_by) VALUES
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
-- =============================================
|
||||
-- 5. CREATE EXPERIMENT PHASES
|
||||
-- 5. CREATE EXPERIMENT BOOKS (table renamed from experiment_phases in migration 00016)
|
||||
-- =============================================
|
||||
|
||||
-- Create "Phase 2 of JC Experiments" phase
|
||||
INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
-- Create "Phase 2 of JC Experiments" book
|
||||
INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
SELECT
|
||||
'Phase 2 of JC Experiments',
|
||||
'Second phase of JC Cracker experiments for pecan processing optimization',
|
||||
@@ -584,8 +584,8 @@ FROM public.user_profiles up
|
||||
WHERE up.email = 's.alireza.v@gmail.com'
|
||||
;
|
||||
|
||||
-- Create "Post Workshop Meyer Experiments" phase
|
||||
INSERT INTO public.experiment_phases (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
-- Create "Post Workshop Meyer Experiments" book
|
||||
INSERT INTO public.experiment_books (name, description, has_soaking, has_airdrying, has_cracking, has_shelling, cracking_machine_type_id, created_by)
|
||||
SELECT
|
||||
'Post Workshop Meyer Experiments',
|
||||
'Post workshop Meyer Cracker experiments for pecan processing optimization',
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
-- ==============================================
|
||||
-- 6. CREATE EXPERIMENTS FOR PHASE 2
|
||||
-- ==============================================
|
||||
|
||||
-- TEMPORARILY DISABLED (see config.toml sql_paths). When re-enabling, replace
|
||||
-- all "experiment_phases" with "experiment_books" (table renamed in migration 00016).
|
||||
--
|
||||
-- This seed file creates experiments from phase_2_JC_experimental_run_sheet.csv
|
||||
-- Each experiment has 3 repetitions with specific parameters
|
||||
-- Experiment numbers are incremented by 1 (CSV 0-19 becomes DB 1-20)
|
||||
|
||||
Reference in New Issue
Block a user