feat: Export NixOS Module for usda-vision service config
This commit is contained in:
@@ -95,6 +95,9 @@
|
||||
DOCKER_BUILDKIT = "1";
|
||||
COMPOSE_DOCKER_CLI_BUILD = "1";
|
||||
};
|
||||
|
||||
# NixOS module for deployment
|
||||
nixosModules.default = import ./module.nix;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
160
module.nix
Normal file
160
module.nix
Normal file
@@ -0,0 +1,160 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
let
|
||||
cfg = config.services.usda-vision;
|
||||
|
||||
# Get packages from the flake (self reference)
|
||||
camera-sdk = config.services.usda-vision.package.camera-sdk or pkgs.callPackage ./camera-sdk.nix {};
|
||||
usda-vision-app = config.services.usda-vision.package.usda-vision or pkgs.callPackage ./package.nix {};
|
||||
in
|
||||
|
||||
{
|
||||
options.services.usda-vision = {
|
||||
enable = lib.mkEnableOption "USDA Vision system";
|
||||
|
||||
package = lib.mkOption {
|
||||
type = lib.types.attrs;
|
||||
default = {};
|
||||
description = "Package set containing camera-sdk and usda-vision packages";
|
||||
};
|
||||
|
||||
hostname = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "exp-dash";
|
||||
description = "Hostname or IP address to replace exp-dash and localhost with in configuration";
|
||||
};
|
||||
|
||||
replaceHostnames = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = false;
|
||||
description = "Whether to replace exp-dash and localhost hostnames with the configured hostname";
|
||||
};
|
||||
|
||||
envFile = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.path;
|
||||
default = null;
|
||||
description = "Path to environment file (managed by ragenix in deployment)";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
# System packages
|
||||
environment.systemPackages = with pkgs; [
|
||||
docker
|
||||
docker-compose
|
||||
supabase-cli
|
||||
camera-sdk
|
||||
usda-vision-app
|
||||
];
|
||||
|
||||
# Make camera SDK libraries available system-wide
|
||||
environment.variables = {
|
||||
LD_LIBRARY_PATH = "${camera-sdk}/lib";
|
||||
};
|
||||
|
||||
# Enable Docker service
|
||||
virtualisation.docker = {
|
||||
enable = true;
|
||||
autoPrune.enable = true;
|
||||
daemon.settings = {
|
||||
experimental = true;
|
||||
};
|
||||
};
|
||||
|
||||
# Create persistent directories
|
||||
systemd.tmpfiles.rules = [
|
||||
"d /var/lib/usda-vision 0755 root root -"
|
||||
"f /var/lib/usda-vision/.env 0644 root root -"
|
||||
"d /var/lib/supabase 0755 root root -"
|
||||
];
|
||||
|
||||
# Supabase CLI service
|
||||
systemd.services.supabase-cli = {
|
||||
enable = true;
|
||||
description = "Supabase CLI Service";
|
||||
|
||||
preStart = ''
|
||||
rm -rf /var/lib/supabase/*
|
||||
rm -rf /var/lib/supabase/.* 2>/dev/null || true
|
||||
|
||||
if [ -d ${usda-vision-app}/opt/usda-vision/supabase ]; then
|
||||
${pkgs.rsync}/bin/rsync -av ${usda-vision-app}/opt/usda-vision/supabase/ /var/lib/supabase/supabase/
|
||||
fi
|
||||
|
||||
mkdir -p /var/lib/supabase/supabase/.branches
|
||||
chmod -R 755 /var/lib/supabase
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
WorkingDirectory = "/var/lib/supabase";
|
||||
EnvironmentFile = lib.mkIf (cfg.envFile != null) cfg.envFile;
|
||||
ExecStart = "${pkgs.supabase-cli}/bin/supabase start";
|
||||
ExecStop = "${pkgs.supabase-cli}/bin/supabase stop";
|
||||
Type = "oneshot";
|
||||
RemainAfterExit = true;
|
||||
User = "root";
|
||||
Group = "root";
|
||||
};
|
||||
};
|
||||
|
||||
# USDA Vision docker compose service
|
||||
systemd.services.usda-vision = {
|
||||
description = "USDA Vision Docker Compose Stack";
|
||||
after = [ "docker.service" "network-online.target" "systemd-tmpfiles-setup.service" ];
|
||||
wants = [ "network-online.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
unitConfig = lib.mkIf (cfg.envFile != null) {
|
||||
ConditionPathExists = cfg.envFile;
|
||||
};
|
||||
|
||||
preStart = ''
|
||||
echo "Syncing application code to /var/lib/usda-vision..."
|
||||
${pkgs.rsync}/bin/rsync -av --delete \
|
||||
--checksum \
|
||||
--exclude='node_modules' \
|
||||
--exclude='.env' \
|
||||
--exclude='.env.azure' \
|
||||
--exclude='__pycache__' \
|
||||
--exclude='.venv' \
|
||||
${usda-vision-app}/opt/usda-vision/ /var/lib/usda-vision/
|
||||
|
||||
${lib.optionalString cfg.replaceHostnames ''
|
||||
echo "Replacing hostnames (exp-dash, localhost) with ${cfg.hostname} in docker-compose.yml..."
|
||||
${pkgs.gnused}/bin/sed -i \
|
||||
-e 's|exp-dash|${cfg.hostname}|g' \
|
||||
-e 's|localhost|${cfg.hostname}|g' \
|
||||
/var/lib/usda-vision/docker-compose.yml
|
||||
''}
|
||||
|
||||
${lib.optionalString (cfg.envFile != null) ''
|
||||
echo "Copying environment file from managed secret..."
|
||||
cp ${cfg.envFile} /var/lib/usda-vision/.env
|
||||
chmod 644 /var/lib/usda-vision/.env
|
||||
''}
|
||||
|
||||
${lib.optionalString (cfg.envFile == null) ''
|
||||
if [ ! -s /var/lib/usda-vision/.env ]; then
|
||||
if [ -f ${usda-vision-app}/opt/usda-vision/.env.example ]; then
|
||||
echo "WARNING: No environment file provided, using .env.example"
|
||||
cp ${usda-vision-app}/opt/usda-vision/.env.example /var/lib/usda-vision/.env
|
||||
fi
|
||||
fi
|
||||
''}
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
RemainAfterExit = true;
|
||||
WorkingDirectory = "/var/lib/usda-vision";
|
||||
User = "root";
|
||||
Group = "root";
|
||||
ExecStart = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build";
|
||||
ExecStop = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down";
|
||||
ExecReload = "${pkgs.bash}/bin/bash -c '${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down && ${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build'";
|
||||
TimeoutStartSec = 300;
|
||||
TimeoutStopSec = 120;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user