290 lines
9.3 KiB
Nix
290 lines
9.3 KiB
Nix
{ usda-vision-packages ? null
|
|
, envFile ? null
|
|
, azureEnvFile ? null
|
|
, ...
|
|
}:
|
|
|
|
# ============================================================================
|
|
# USDA Dashboard External System Module
|
|
# ============================================================================
|
|
# External system configuration for usda-dash
|
|
# This module can be referenced from nixos-systems/inventory.nix using:
|
|
#
|
|
# nix-lxc = {
|
|
# devices = {
|
|
# "usda-dash" = builtins.fetchGit {
|
|
# url = "https://git.factory.uga.edu/MODEL/usda-dash-config.git";
|
|
# rev = "commit-hash";
|
|
# submodules = true; # REQUIRED for usda-vision submodule
|
|
# };
|
|
# };
|
|
# };
|
|
#
|
|
# IMPORTANT: For LXC containers running Docker, the Proxmox LXC must be configured with:
|
|
# - Features: nesting=1, keyctl=1
|
|
# - Unprivileged: no (or privileged: yes)
|
|
# Edit the container config in Proxmox: /etc/pve/lxc/<VMID>.conf
|
|
# Add: features: nesting=1,keyctl=1
|
|
#
|
|
# USAGE FROM ATHENIX:
|
|
#
|
|
# 1. Add usda-vision as a flake input in athenix/flake.nix:
|
|
#
|
|
# inputs.usda-vision = {
|
|
# url = "path:/path/to/usda-dash-config/usda-vision";
|
|
# inputs.nixpkgs.follows = "nixpkgs";
|
|
# };
|
|
#
|
|
# 2. In inventory.nix, pass the usda-vision packages and ragenix-managed secrets:
|
|
#
|
|
# imports = [
|
|
# (import /path/to/usda-dash-config/default.nix {
|
|
# usda-vision-packages = inputs.usda-vision.packages.${system};
|
|
# envFile = config.age.secrets.usda-vision-env.path;
|
|
# azureEnvFile = config.age.secrets.usda-vision-azure-env.path;
|
|
# })
|
|
# ];
|
|
|
|
{
|
|
config,
|
|
lib,
|
|
pkgs,
|
|
...
|
|
}:
|
|
|
|
let
|
|
# Get packages from the parameter passed by athenix
|
|
# Fallback to local callPackage if not provided (for standalone testing)
|
|
camera-sdk =
|
|
if usda-vision-packages != null
|
|
then usda-vision-packages.camera-sdk
|
|
else pkgs.callPackage ./usda-vision/camera-sdk.nix {};
|
|
|
|
usda-vision-app =
|
|
if usda-vision-packages != null
|
|
then usda-vision-packages.usda-vision
|
|
else pkgs.callPackage ./usda-vision/package.nix {};
|
|
in
|
|
|
|
{
|
|
# ========== Module Configuration ==========
|
|
config = {
|
|
# Nix configuration for LXC container without sandbox support
|
|
nix.settings = {
|
|
sandbox = false; # LXC containers don't support kernel namespaces for sandboxing
|
|
experimental-features = [ "nix-command" "flakes" ];
|
|
};
|
|
|
|
# System packages specific to usda-dash
|
|
environment.systemPackages = with pkgs; [
|
|
# Core tools
|
|
git
|
|
vim
|
|
htop
|
|
curl
|
|
wget
|
|
nfs-utils
|
|
|
|
# Docker and Docker Compose for running usda-vision
|
|
docker
|
|
docker-compose
|
|
|
|
# Supabase
|
|
supabase-cli
|
|
|
|
# Camera SDK
|
|
camera-sdk
|
|
|
|
# USDA Vision application package with convenience scripts
|
|
usda-vision-app
|
|
];
|
|
|
|
# Make camera SDK libraries available system-wide
|
|
environment.variables = {
|
|
LD_LIBRARY_PATH = "${camera-sdk}/lib";
|
|
};
|
|
|
|
# Enable Docker service with LXC-compatible settings
|
|
virtualisation.docker = {
|
|
enable = true;
|
|
autoPrune.enable = true;
|
|
# Enable experimental features for better LXC compatibility
|
|
daemon.settings = {
|
|
experimental = true;
|
|
};
|
|
};
|
|
|
|
# LXC-specific settings for nested containers
|
|
boot.kernel.sysctl = {
|
|
# Required for Docker networking in LXC
|
|
"net.ipv4.ip_forward" = 1;
|
|
"net.ipv4.conf.all.forwarding" = 1;
|
|
};
|
|
|
|
# Configure users
|
|
athenix.users.sv22900.enable = true;
|
|
|
|
# Add users to docker group
|
|
users.users.sv22900.extraGroups = [ "docker" ];
|
|
users.users.engr-ugaif.extraGroups = [ "docker" ];
|
|
|
|
# Create persistent directories and .env file location
|
|
systemd.tmpfiles.rules = [
|
|
"d /var/lib/usda-vision 0755 root root -"
|
|
"f /var/lib/usda-vision/.env 0644 root root -"
|
|
"d /var/lib/supabase 0755 root root -"
|
|
"d /mnt/nfs_share 0755 root root -"
|
|
];
|
|
|
|
# Enable NFS client support
|
|
services.rpcbind.enable = true;
|
|
|
|
# NFS mount for shared storage
|
|
fileSystems."/mnt/nfs_share" = {
|
|
device = "192.168.1.249:/mnt/nfs_share";
|
|
fsType = "nfs";
|
|
options = [ "nfsvers=4" "rw" "soft" "_netdev" ];
|
|
};
|
|
|
|
# Supabase CLI configuration - runs in writable directory
|
|
systemd.services.supabase-cli = {
|
|
enable = true;
|
|
description = "Supabase CLI Service";
|
|
|
|
preStart = ''
|
|
# Clean slate - remove old content but keep the directory
|
|
rm -rf /var/lib/supabase/*
|
|
rm -rf /var/lib/supabase/.* 2>/dev/null || true
|
|
|
|
# Copy supabase directory structure from the app
|
|
if [ -d ${usda-vision-app}/opt/usda-vision/supabase ]; then
|
|
${pkgs.rsync}/bin/rsync -av ${usda-vision-app}/opt/usda-vision/supabase/ /var/lib/supabase/supabase/
|
|
fi
|
|
|
|
# Create necessary directories for supabase
|
|
mkdir -p /var/lib/supabase/supabase/.branches
|
|
chmod -R 755 /var/lib/supabase
|
|
'';
|
|
|
|
serviceConfig = {
|
|
WorkingDirectory = "/var/lib/supabase";
|
|
EnvironmentFile="/var/lib/usda-vision/.env";
|
|
ExecStart = "${pkgs.supabase-cli}/bin/supabase start";
|
|
ExecStop = "${pkgs.supabase-cli}/bin/supabase stop";
|
|
Type = "oneshot";
|
|
RemainAfterExit = true;
|
|
User = "root";
|
|
Group = "root";
|
|
};
|
|
};
|
|
|
|
# Systemd service to manage usda-vision docker compose
|
|
systemd.services.usda-vision = {
|
|
description = "USDA Vision Docker Compose Stack";
|
|
after = [ "docker.service" "network-online.target" "systemd-tmpfiles-setup.service" ];
|
|
wants = [ "network-online.target" ];
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
# Only start if .env file exists (will be managed by ragenix)
|
|
unitConfig = lib.mkIf (envFile != null) {
|
|
ConditionPathExists = envFile;
|
|
};
|
|
|
|
preStart = ''
|
|
# Copy application code to writable directory if not already present or if source is newer
|
|
echo "Syncing application code to /var/lib/usda-vision..."
|
|
${pkgs.rsync}/bin/rsync -av --delete \
|
|
--checksum \
|
|
--exclude='node_modules' \
|
|
--exclude='.env' \
|
|
--exclude='.env.azure' \
|
|
--exclude='__pycache__' \
|
|
--exclude='.venv' \
|
|
${usda-vision-app}/opt/usda-vision/ /var/lib/usda-vision/
|
|
|
|
# Copy ragenix-managed secrets to working directory
|
|
${lib.optionalString (envFile != null) ''
|
|
echo "Copying environment file from ragenix-managed secret..."
|
|
cp ${envFile} /var/lib/usda-vision/.env
|
|
chmod 644 /var/lib/usda-vision/.env
|
|
''}
|
|
|
|
${lib.optionalString (azureEnvFile != null) ''
|
|
echo "Copying Azure environment file from ragenix-managed secret..."
|
|
cp ${azureEnvFile} /var/lib/usda-vision/.env.azure
|
|
chmod 644 /var/lib/usda-vision/.env.azure
|
|
''}
|
|
|
|
# Fallback: use example file if no secrets provided
|
|
${lib.optionalString (envFile == null) ''
|
|
if [ ! -s /var/lib/usda-vision/.env ]; then
|
|
if [ -f ${usda-vision-app}/opt/usda-vision/.env.example ]; then
|
|
echo "WARNING: No ragenix-managed secrets provided, using .env.example"
|
|
echo "Please configure secrets in athenix using ragenix"
|
|
cp ${usda-vision-app}/opt/usda-vision/.env.example /var/lib/usda-vision/.env
|
|
fi
|
|
fi
|
|
''}
|
|
'';
|
|
|
|
serviceConfig = {
|
|
Type = "oneshot";
|
|
RemainAfterExit = true;
|
|
WorkingDirectory = "/var/lib/usda-vision";
|
|
User = "root";
|
|
Group = "root";
|
|
|
|
# Start: pull latest images and start containers from writable directory
|
|
ExecStart = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build";
|
|
|
|
# Stop: gracefully stop containers
|
|
ExecStop = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down";
|
|
|
|
# Reload: restart containers
|
|
ExecReload = "${pkgs.bash}/bin/bash -c '${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down && ${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build'";
|
|
|
|
TimeoutStartSec = 300;
|
|
TimeoutStopSec = 120;
|
|
};
|
|
};
|
|
|
|
# Firewall configuration - open ports for USDA Vision services
|
|
networking.firewall = {
|
|
enable = false;
|
|
allowedTCPPorts = [
|
|
# Web services
|
|
80 # HTTP
|
|
443 # HTTPS
|
|
3000 # Main web app (if exposed directly)
|
|
3001 # Vision video remote web app
|
|
3002 # Vision system remote web app
|
|
3003 # Scheduling remote web app
|
|
4000 # Analytics service web app
|
|
|
|
# Supabase services
|
|
54321 # Supabase Kong (API Gateway)
|
|
54322 # Supabase PostgreSQL
|
|
54323 # Supabase Studio
|
|
54324 # Supabase Inbucket (email testing)
|
|
54327 # Supabase Analytics
|
|
|
|
# USDA Vision services
|
|
8000 # Camera Management API
|
|
8025 # Mailpit (email testing)
|
|
8090 # Media API
|
|
8189 # MediaMTX API
|
|
8554 # RTSP (MediaMTX)
|
|
8889 # MediaMTX WebRTC
|
|
];
|
|
|
|
allowedUDPPorts = [
|
|
3956 # GigE Vision Control Protocol (GVCP)
|
|
];
|
|
|
|
allowPing = true;
|
|
};
|
|
|
|
# Any other usda-dash specific configuration
|
|
};
|
|
}
|