Reorganization and better Terraform

Signed-off-by: main <magic_rb@redalder.org>
This commit is contained in:
main 2022-07-30 23:27:40 +02:00
parent b0de53e57f
commit 2e61e7ef3c
No known key found for this signature in database
GPG key ID: 08D5287CC5DDCA0E
55 changed files with 1572 additions and 1477 deletions

View file

@ -1,3 +0,0 @@
path "kv/data/camptules" {
capabilities = ["read"]
}

View file

@ -1,29 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
# data "nomad_plugin" "nomad-driver-containerd" {
# plugin_id = "nomad-driver-containerd"
# wait_for_healthy = true
# }
resource "vault_policy" "camputules-policy" {
name = "camptules-policy"
policy = file("${path.module}/camptules-policy.hcl")
}
resource "nomad_job" "camptules" {
jobspec = file("${path.module}/nomad.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}

View file

@ -1,3 +0,0 @@
path "kv/data/dovecot" {
capabilities = ["read"]
}

View file

@ -1,3 +0,0 @@
path "kv/data/getmail" {
capabilities = ["read"]
}

View file

@ -1,3 +0,0 @@
path "kv/data/gitea" {
capabilities = ["read"]
}

View file

@ -1,3 +0,0 @@
path "kv/data/home-assistant" {
capabilities = ["read"]
}

View file

@ -1,3 +0,0 @@
path "kv/data/mqtt" {
capabilities = ["read"]
}

View file

@ -1,7 +0,0 @@
path "kv/data/mqtt" {
capabilities = ["read"]
}
path "kv/data/zigbee2mqtt" {
capabilities = ["read"]
}

View file

@ -1,3 +0,0 @@
path "kv/data/hydra" {
capabilities = ["read"]
}

View file

@ -1,583 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
job "ingress" {
datacenters = [ "do-1", "homelab-1" ]
type = "service"
group "ingress-toothpick" {
count = 1
constraint {
attribute = "${attr.unique.hostname}"
value = "toothpick"
}
volume "ingress-letsencrypt" {
type = "csi"
source = "ingress-letsencrypt"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
port "http" {
static = 80
to = 80
host_network = "public"
}
port "https" {
static = 443
to = 443
host_network = "public"
}
port "minecraft" {
static = 25565
to = 25565
host_network = "public"
}
}
service {
name = "ingress-toothpick"
port = "http"
connect {
sidecar_service {
proxy {
upstreams {
destination_name = "gitea"
local_bind_port = 3000
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "hydra"
local_bind_port = 8666
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "nextcloud"
local_bind_port = 8777
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "website"
local_bind_port = 8080
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "minecraft"
local_bind_port = 2666
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "reicio"
local_bind_port = 8000
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
}
}
}
}
task "nginx" {
driver = "docker"
volume_mount {
volume = "ingress-letsencrypt"
destination = "/etc/letsencrypt"
read_only = false
}
# artifact {
# source = "http://hydra/build/99/download/1/image.tar.gz"
# }
config {
# load = "nixng-ingress.tar.gz"
image = "nixng-ingress:local"
ports = ["http", "https", "minecraft"]
memory_hard_limit = 128
}
resources {
cpu = 200
memory = 32
}
template {
data = <<EOF
ssl_certificate_key /etc/letsencrypt/live/redalder.org/privkey.pem;
ssl_certificate /etc/letsencrypt/live/redalder.org/fullchain.pem;
ssl_session_cache shared:le_nginx_SSL:10m;
ssl_session_timeout 1440m;
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
EOF
destination = "local/ssl.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
EOF
destination = "local/headers.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
add_header X-Frame-Options "SAMEORIGIN";
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
EOF
destination = "local/security.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream minecraft {
server {{ env "NOMAD_UPSTREAM_ADDR_minecraft" }};
}
server {
listen 25565;
proxy_pass minecraft;
}
EOF
destination = "local/streams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream gitea {
server {{ env "NOMAD_UPSTREAM_ADDR_gitea" }};
}
upstream hydra {
server {{ env "NOMAD_UPSTREAM_ADDR_hydra" }};
}
upstream nextcloud {
server {{ env "NOMAD_UPSTREAM_ADDR_nextcloud" }};
}
upstream website {
server {{ env "NOMAD_UPSTREAM_ADDR_website" }};
}
upstream reicio {
server {{ env "NOMAD_UPSTREAM_ADDR_reicio" }};
}
server {
listen 443 ssl;
server_name _;
include /local/ssl.conf;
return 404;
}
server {
listen 443 ssl;
server_name gitea.redalder.org;
include /local/security.conf;
include /local/ssl.conf;
client_max_body_size 100M;
location / {
include /local/headers.conf;
proxy_pass http://gitea;
}
}
server {
listen 443 ssl;
server_name hydra.redalder.org;
include /local/security.conf;
include /local/ssl.conf;
location / {
include /local/headers.conf;
proxy_pass http://hydra;
}
}
server {
listen 443 ssl;
server_name redalder.org nixng.org;
include /local/security.conf;
include /local/ssl.conf;
location /nextcloud/ {
include /local/headers.conf;
proxy_pass http://nextcloud/;
}
location /reicio/ {
include /local/headers.conf;
proxy_pass http://reicio/;
}
location / {
include /local/headers.conf;
proxy_pass http://website;
}
}
EOF
destination = "local/upstreams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
}
}
group "ingress-blowhole" {
count = 1
constraint {
attribute = "${attr.unique.hostname}"
value = "blowhole"
}
network {
mode = "bridge"
port "http" {
static = 8080
to = 80
}
port "https" {
static = 443
to = 443
}
port "jellyfin" {
static = 8096
to = 8096
}
port "imap" {
static = 143
to = 143
}
}
service {
name = "ingress-blowhole"
port = "80"
connect {
sidecar_service {
proxy {
upstreams {
destination_name = "jellyfin"
local_bind_port = 8001
}
upstreams {
destination_name = "zigbee2mqtt"
local_bind_port = 8002
}
upstreams {
destination_name = "home-assistant"
local_bind_port = 8003
}
upstreams {
destination_name = "syncthing"
local_bind_port = 8004
}
upstreams {
destination_name = "dovecot-imap"
local_bind_port = 8005
}
}
}
sidecar_task {
resources {
cpu = 75
memory = 48
}
config {
memory_hard_limit = 96
image = "envoyproxy/envoy:v1.20.2"
}
}
}
}
task "nginx" {
driver = "docker"
config {
image = "ra-systems-ingress-blowhole:local"
ports = ["http", "https", "jellyfin"]
memory_hard_limit = 128
}
resources {
cpu = 200
memory = 32
}
template {
data = <<EOF
ssl_certificate_key /etc/letsencrypt/live/redalder.org/privkey.pem;
ssl_certificate /etc/letsencrypt/live/redalder.org/fullchain.pem;
ssl_session_cache shared:le_nginx_SSL:10m;
ssl_session_timeout 1440m;
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
EOF
destination = "local/ssl.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
EOF
destination = "local/headers.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
add_header X-Frame-Options "SAMEORIGIN";
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
EOF
destination = "local/security.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream jellyfin {
server {{ env "NOMAD_UPSTREAM_ADDR_jellyfin" }};
}
upstream zigbee2mqtt {
server {{ env "NOMAD_UPSTREAM_ADDR_zigbee2mqtt" }};
}
upstream home-assistant {
server {{ env "NOMAD_UPSTREAM_ADDR_home-assistant" }};
}
upstream syncthing {
server {{ env "NOMAD_UPSTREAM_ADDR_syncthing" }};
}
server {
listen 8096;
server_name _;
include /local/security.conf;
client_max_body_size 100M;
location /jellyfin/ {
# Proxy main Jellyfin traffic
# The / at the end is significant.
# https://www.acunetix.com/blog/articles/a-fresh-look-on-reverse-proxy-related-attacks/
proxy_pass http://jellyfin/jellyfin/;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
# Disable buffering when the nginx proxy gets very resource heavy upon streaming
proxy_buffering off;
}
location /syncthing/ {
include /local/headers.conf;
proxy_pass http://syncthing/;
}
location ~ "^/(static/icons/|static/fonts/|static/translations/|static/images/|static/panels/|static/polyfills|api/|auth/|frontend_latest/|frontend_es5/|local/|lovelace|map|config|developer-tools|history|logbook|profile|states|hassio|onboarding.html|service_worker.js|authorize.html|manifest.json)" {
include /local/headers.conf;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://home-assistant;
}
location /home-assistant {
include /local/headers.conf;
rewrite /home-assistant/(.*) /$1 break;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://home-assistant;
}
location /zigbee2mqtt/ {
include /local/headers.conf;
# rewrite /zigbee2mqtt/(.*) /$1 break;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://zigbee2mqtt/;
}
}
server {
listen 80;
server_name _;
include /local/security.conf;
client_max_body_size 100M;
location /jellyfin/ {
# Proxy main Jellyfin traffic
# The / at the end is significant.
# https://www.acunetix.com/blog/articles/a-fresh-look-on-reverse-proxy-related-attacks/
proxy_pass http://jellyfin/jellyfin/;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
# Disable buffering when the nginx proxy gets very resource heavy upon streaming
proxy_buffering off;
}
}
EOF
destination = "local/upstreams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream dovecot-imap {
server {{ env "NOMAD_UPSTREAM_ADDR_dovecot-imap" }};
}
server {
listen 143;
proxy_pass dovecot-imap;
}
EOF
destination = "local/streams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
}
}
}

View file

@ -1,101 +0,0 @@
variable "flake_rev" {
type = string
}
variable "flake_host" {
type = string
}
variable "flake_host_alt" {
type = string
}
variable "flake_sha" {
type = string
}
module "camptules" {
source = "./camptules"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "email" {
source = "./email"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "gitea" {
source = "./gitea"
flake_ref = "${var.flake_host_alt}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "home-assistant" {
source = "./home-assistant"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "hydra" {
source = "./hydra"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "ingress" {
source = "./ingress"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "jellyfin" {
source = "./jellyfin"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "mesh" {
source = "./syncthing"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
// minecraft
module "plugin-nfs" {
source = "./plugin-nfs"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "reicio" {
source = "./reicio"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "syncthing" {
source = "./syncthing"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
module "website" {
source = "./website"
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}

View file

@ -1,19 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "nomad_job" "mesh" {
jobspec = file("${path.module}/nomad.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}

View file

@ -1,19 +0,0 @@
type = "csi"
id = "minecraft"
name = "minecraft"
plugin_id = "nfs"
capability {
access_mode = "single-node-writer"
attachment_mode = "file-system"
}
context {
server = "10.64.1.201"
share = "/var/nfs/minecraft/atm6"
}
mount_options {
fs_type = "nfs"
mount_flags = [ "nolock" ]
}

View file

@ -1,58 +0,0 @@
job "minecraft" {
datacenters = [ "homelab-1" ]
type = "service"
group "minecraft" {
count = 1
volume "minecraft" {
type = "csi"
source = "minecraft"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
}
service {
name = "minecraft"
port = "25565"
connect {
sidecar_service {}
}
}
task "minecraft" {
driver = "docker"
volume_mount {
volume = "minecraft"
destination = "/run/cfg/minecraft"
read_only = false
}
config {
image = "nixng-minecraft:local"
memory_hard_limit = 8192
cap_add = ["sys_admin"]
devices = [
{
host_path = "/dev/fuse"
container_path = "/dev/fuse"
}
]
}
resources {
cpu = 4096
memory = 4096
}
}
}
}

View file

@ -1,31 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "nomad_job" "nfs-controller" {
jobspec = file("${path.module}/nfs-controller.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}
resource "nomad_job" "nfs-nodes" {
jobspec = file("${path.module}/nfs-nodes.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}

View file

@ -1,43 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
job "plugin-nfs-nodes" {
datacenters = [ "homelab-1", "do-1" ]
type = "system"
group "nodes" {
task "plugin" {
driver = "docker"
config {
image = "mcr.microsoft.com/k8s/csi/nfs-csi:latest" # "csi-driver-nfs:local"
args = [
"--endpoint=unix://csi/csi.sock",
"--nodeid=blowhole-0",
"--logtostderr",
"--v=5",
]
privileged = true
}
csi_plugin {
id = "nfs"
type = "node"
mount_dir = "/csi"
}
resources {
cpu = 250
memory = 128
}
}
}
}

View file

@ -1,19 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "nomad_job" "reicio" {
jobspec = file("${path.module}/reicio.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}

View file

@ -1,111 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
job "syncthing" {
datacenters = [ "homelab-1" ]
type = "service"
group "syncthing" {
count = 1
volume "syncthing-data" {
type = "csi"
source = "syncthing-data"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
volume "syncthing-config" {
type = "csi"
source = "syncthing-config"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
volume "syncthing-storage" {
type = "csi"
source = "syncthing-storage"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
}
service {
name = "syncthing"
port = "8384"
## Syncthing with auth returns 402: Unauthorized and Nomad interprets it as
## service failure.
# check {
# type = "http"
# address_mode = "alloc"
# path = "/"
# port = "8384"
# interval = "10s"
# timeout = "10s"
# }
connect {
sidecar_service {}
sidecar_task {
resources {
cpu = 75
memory = 48
}
config {
memory_hard_limit = 96
}
}
}
}
task "syncthing" {
driver = "containerd-driver"
config {
flake_ref = "${var.flake_ref}#nixngSystems.syncthing.config.system.build.toplevel"
flake_sha = var.flake_sha
entrypoint = [ "init" ]
}
resources {
cpu = 128
memory = 128
}
volume_mount {
volume = "syncthing-data"
destination = "/var/syncthing/data"
read_only = false
}
volume_mount {
volume = "syncthing-config"
destination = "/var/syncthing/config"
read_only = false
}
volume_mount {
volume = "syncthing-storage"
destination = "/var/syncthing/storage"
read_only = false
}
}
}
}

View file

@ -1,24 +0,0 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
# data "nomad_plugin" "nomad-driver-containerd" {
# plugin_id = "nomad-driver-containerd"
# wait_for_healthy = true
# }
resource "nomad_job" "website" {
jobspec = file("${path.module}/nomad.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = var.flake_ref
flake_sha = var.flake_sha
}
}
}

17
main.tf
View file

@ -8,6 +8,14 @@ terraform {
provider "nomad" { provider "nomad" {
address = "http://10.64.1.201:4646" address = "http://10.64.1.201:4646"
region = "homelab-1"
alias = "homelab"
}
provider "nomad" {
address = "http://10.64.1.201:4646"
region = "do-1"
alias = "do"
} }
provider "vault" { provider "vault" {
@ -38,3 +46,12 @@ module "infrastructure" {
flake_host_alt = var.flake_host_alt flake_host_alt = var.flake_host_alt
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
module "nomad" {
source = "./nomad"
flake_rev = var.flake_rev
flake_host = var.flake_host
flake_host_alt = var.flake_host_alt
flake_sha = var.flake_sha
}

24
nomad/main.tf Normal file
View file

@ -0,0 +1,24 @@
variable "flake_rev" {
type = string
}
variable "flake_host" {
type = string
}
variable "flake_host_alt" {
type = string
}
variable "flake_sha" {
type = string
}
module "regions" {
source = "./regions"
flake_rev = var.flake_rev
flake_host = var.flake_host
flake_host_alt = var.flake_host_alt
flake_sha = var.flake_sha
}

View file

@ -1,21 +1,16 @@
variable "flake_ref" { variable "datacenters" {
type = string type = list(string)
}
variable "flake_sha" {
type = string
} }
job "gateway-mesh" { job "gateway-mesh" {
datacenters = [ "homelab-1", "do-1" ] datacenters = var.datacenters
type = "system"
group "envoy" { group "envoy" {
network { network {
mode = "bridge" mode = "bridge"
port "mesh_wan" { port "mesh_wan" {
host_network = "mesh"
} }
} }

View file

@ -0,0 +1,20 @@
variable "datacenters" {
type = list(string)
}
terraform {
required_providers {
nomad = {}
}
}
resource "nomad_job" "mesh" {
jobspec = file("${path.module}/main.hcl")
hcl2 {
enabled = true
vars = {
"datacenters" = jsonencode(var.datacenters)
}
}
}

View file

@ -1,25 +1,24 @@
variable "flake_ref" { variable "region" {
type = string type = string
} }
variable "flake_sha" { variable "datacenters" {
type = string type = list(string)
} }
job "plugin-nfs-controller" { job "nfs-controller" {
datacenters = [ "homelab-1" ] datacenters = var.datacenters
region = var.region
group "controller" { group "controller" {
task "plugin" { task "plugin" {
driver = "docker" driver = "docker"
config { config {
image = "mcr.microsoft.com/k8s/csi/nfs-csi:latest" # "csi-driver-nfs:local" image = "mcr.microsoft.com/k8s/csi/nfs-csi:latest"
args = [ args = [
"--endpoint=unix://csi/csi.sock", "--endpoint=unix://csi/csi.sock",
"--nodeid=contoller",
"--logtostderr",
"-v=5", "-v=5",
] ]
} }

42
nomad/modules/nfs/main.tf Normal file
View file

@ -0,0 +1,42 @@
variable "region" {
type = string
}
variable "node_dcs" {
type = list(string)
}
variable "controller_dcs" {
type = list(string)
default = null
}
terraform {
required_providers {
nomad = {}
}
}
resource "nomad_job" "nfs-controller" {
jobspec = file("${path.module}/controller.hcl")
hcl2 {
enabled = true
vars = {
"region" = var.region
"datacenters" = jsonencode(var.controller_dcs == null ? var.node_dcs : var.controller_dcs)
}
}
}
resource "nomad_job" "nfs-node" {
jobspec = file("${path.module}/node.hcl")
hcl2 {
enabled = true
vars = {
"region" = var.region
"datacenters" = jsonencode(var.node_dcs)
}
}
}

View file

@ -0,0 +1,44 @@
variable "region" {
type = string
}
variable "datacenters" {
type = list(string)
}
job "nfs-node" {
datacenters = var.datacenters
region = var.region
type = "system"
group "nodes" {
task "plugin" {
driver = "docker"
config {
image = "mcr.microsoft.com/k8s/csi/nfs-csi:latest"
args = [
"--v=5",
"--nodeid=${attr.unique.hostname}",
"--endpoint=unix:///csi/csi.sock",
"--drivername=nfs.csi.k8s.io"
]
privileged = true
}
csi_plugin {
id = "nfs"
type = "node"
mount_dir = "/csi"
}
resources {
cpu = 250
memory = 128
}
}
}
}

View file

@ -0,0 +1,83 @@
upstream gitea {
server {{ env "NOMAD_UPSTREAM_ADDR_gitea" }};
}
upstream hydra {
server {{ env "NOMAD_UPSTREAM_ADDR_hydra" }};
}
upstream nextcloud {
server {{ env "NOMAD_UPSTREAM_ADDR_nextcloud" }};
}
upstream website {
server {{ env "NOMAD_UPSTREAM_ADDR_website" }};
}
upstream reicio {
server {{ env "NOMAD_UPSTREAM_ADDR_reicio" }};
}
server {
listen 443 ssl;
server_name _;
include /local/ssl.conf;
return 404;
}
server {
listen 443 ssl;
server_name gitea.redalder.org;
include /local/security.conf;
include /local/ssl.conf;
client_max_body_size 100M;
location / {
include /local/headers.conf;
proxy_pass http://gitea;
}
}
server {
listen 443 ssl;
server_name hydra.redalder.org;
include /local/security.conf;
include /local/ssl.conf;
location / {
include /local/headers.conf;
proxy_pass http://hydra;
}
}
server {
listen 443 ssl;
server_name redalder.org nixng.org;
include /local/security.conf;
include /local/ssl.conf;
location /nextcloud/ {
include /local/headers.conf;
proxy_pass http://nextcloud/;
}
location /reicio/ {
include /local/headers.conf;
proxy_pass http://reicio/;
}
location / {
include /local/headers.conf;
proxy_pass http://website;
}
}

View file

@ -0,0 +1,219 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
variable "upstreams" {
type = string
}
job "ingress" {
datacenters = [ "do-1" ]
region = "do-1"
type = "service"
group "ingress" {
count = 1
constraint {
attribute = "${attr.unique.hostname}"
value = "toothpick"
}
volume "ingress-letsencrypt" {
type = "csi"
source = "ingress-letsencrypt"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
port "http" {
static = 80
to = 80
host_network = "public"
}
port "https" {
static = 443
to = 443
host_network = "public"
}
port "minecraft" {
static = 25565
to = 25565
host_network = "public"
}
}
service {
name = "ingress-toothpick"
port = "http"
connect {
sidecar_service {
proxy {
upstreams {
destination_name = "gitea"
local_bind_port = 3000
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "hydra"
local_bind_port = 8666
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "nextcloud"
local_bind_port = 8777
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "website"
local_bind_port = 8080
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "minecraft"
local_bind_port = 2666
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
upstreams {
destination_name = "reicio"
local_bind_port = 8000
datacenter = "homelab-1"
mesh_gateway {
mode = "local"
}
}
}
}
}
}
task "nginx" {
driver = "docker"
volume_mount {
volume = "ingress-letsencrypt"
destination = "/etc/letsencrypt"
read_only = false
}
# artifact {
# source = "http://hydra/build/99/download/1/image.tar.gz"
# }
config {
# load = "nixng-ingress.tar.gz"
image = "nixng-ingress:local"
ports = ["http", "https", "minecraft"]
memory_hard_limit = 128
}
resources {
cpu = 200
memory = 32
}
template {
data = <<EOF
ssl_certificate_key /etc/letsencrypt/live/redalder.org/privkey.pem;
ssl_certificate /etc/letsencrypt/live/redalder.org/fullchain.pem;
ssl_session_cache shared:le_nginx_SSL:10m;
ssl_session_timeout 1440m;
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
EOF
destination = "local/ssl.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
EOF
destination = "local/headers.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
add_header X-Frame-Options "SAMEORIGIN";
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
EOF
destination = "local/security.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream minecraft {
server {{ env "NOMAD_UPSTREAM_ADDR_minecraft" }};
}
server {
listen 25565;
proxy_pass minecraft;
}
EOF
destination = "local/streams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = var.upstreams
destination = "local/upstreams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
}
}
}

View file

@ -1,13 +1,8 @@
variable "flake_ref" { data "local_file" "ingress-upstreams" {
type = string filename = "${path.module}/ingress-upstreams.conf"
}
variable "flake_sha" {
type = string
} }
resource "nomad_volume" "ingress-letsencrypt" { resource "nomad_volume" "ingress-letsencrypt" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "ingress-letsencrypt" volume_id = "ingress-letsencrypt"
@ -31,13 +26,15 @@ resource "nomad_volume" "ingress-letsencrypt" {
} }
resource "nomad_job" "ingress" { resource "nomad_job" "ingress" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/ingress.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host_alt}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
upstreams = data.local_file.ingress-upstreams.content
} }
} }
} }

View file

@ -0,0 +1,43 @@
variable "flake_rev" {
type = string
}
variable "flake_sha" {
type = string
}
variable "flake_host" {
type = string
}
variable "flake_host_alt" {
type = string
}
provider "nomad" {
address = "http://10.64.1.201:4646"
region = "do-1"
alias = "do-1"
}
module "nfs" {
source = "../../modules/nfs"
node_dcs = [ "do-1" ]
region = "do-1"
providers = {
nomad = nomad.do-1
}
}
module "gateway-mesh" {
source = "../../modules/gateway-mesh"
datacenters = [ "do-1" ]
providers = {
nomad = nomad.do-1
}
}

View file

@ -0,0 +1,57 @@
resource "nomad_volume" "baikal-specific" {
type = "csi"
plugin_id = "nfs"
volume_id = "baikal-specific"
name = "baikal-specific"
external_id = "baikal-specific"
capability {
access_mode = "single-node-writer"
attachment_mode = "file-system"
}
context = {
server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/baikal/specific"
}
mount_options {
fs_type = "nfs"
mount_flags = [ "nolock", "hard" ]
}
}
resource "nomad_volume" "baikal-config" {
type = "csi"
plugin_id = "nfs"
volume_id = "baikal-config"
name = "baikal-config"
external_id = "baikal-config"
capability {
access_mode = "single-node-writer"
attachment_mode = "file-system"
}
context = {
server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/baikal/config"
}
mount_options {
fs_type = "nfs"
mount_flags = [ "nolock", "hard" ]
}
}
resource "nomad_job" "baikal" {
jobspec = file("${path.module}/job/baikal.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
}
}

View file

@ -0,0 +1,20 @@
resource "vault_policy" "camputules-policy" {
name = "camptules-policy"
policy = <<EOF
path "kv/data/camptules" {
capabilities = ["read"]
}
EOF
}
resource "nomad_job" "camptules" {
jobspec = file("${path.module}/job/camptules.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
}
}

View file

@ -1,23 +1,22 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "vault_policy" "dovecot-policy" { resource "vault_policy" "dovecot-policy" {
name = "dovecot-policy" name = "dovecot-policy"
policy = file("${path.module}/dovecot-policy.hcl") policy = <<EOF
path "kv/data/getmail" {
capabilities = ["read"]
}
EOF
} }
resource "vault_policy" "getmail-policy" { resource "vault_policy" "getmail-policy" {
name = "getmail-policy" name = "getmail-policy"
policy = file("${path.module}/getmail-policy.hcl") policy = <<EOF
path "kv/data/dovecot" {
capabilities = ["read"]
}
EOF
} }
resource "nomad_volume" "dovecot_maildir" { resource "nomad_volume" "dovecot_maildir" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "dovecot_maildir" volume_id = "dovecot_maildir"
@ -30,7 +29,7 @@ resource "nomad_volume" "dovecot_maildir" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/dovecot/maildir" share = "/var/nfs/dovecot/maildir"
} }
@ -41,7 +40,6 @@ resource "nomad_volume" "dovecot_maildir" {
} }
resource "nomad_volume" "getmail_getmail-d" { resource "nomad_volume" "getmail_getmail-d" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "getmail_getmail-d" volume_id = "getmail_getmail-d"
@ -54,7 +52,7 @@ resource "nomad_volume" "getmail_getmail-d" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/getmail/getmail.d" share = "/var/nfs/getmail/getmail.d"
} }
@ -65,12 +63,12 @@ resource "nomad_volume" "getmail_getmail-d" {
} }
resource "nomad_job" "email" { resource "nomad_job" "email" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/email.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
} }

View file

@ -1,18 +1,4 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
# data "nomad_plugin" "nomad-driver-containerd" {
# plugin_id = "nomad-driver-containerd"
# wait_for_healthy = true
# }
resource "nomad_volume" "gitea-db" { resource "nomad_volume" "gitea-db" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "gitea-db" volume_id = "gitea-db"
@ -25,7 +11,7 @@ resource "nomad_volume" "gitea-db" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/gitea-db" share = "/var/nfs/gitea-db"
} }
@ -36,7 +22,6 @@ resource "nomad_volume" "gitea-db" {
} }
resource "nomad_volume" "gitea-data" { resource "nomad_volume" "gitea-data" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "gitea-data" volume_id = "gitea-data"
@ -49,7 +34,7 @@ resource "nomad_volume" "gitea-data" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/gitea-data" share = "/var/nfs/gitea-data"
} }
@ -61,16 +46,20 @@ resource "nomad_volume" "gitea-data" {
resource "vault_policy" "gitea-policy" { resource "vault_policy" "gitea-policy" {
name = "gitea-policy" name = "gitea-policy"
policy = file("${path.module}/gitea-policy.hcl") policy = <<EOF
path "kv/data/gitea" {
capabilities = ["read"]
}
EOF
} }
resource "nomad_job" "gitea" { resource "nomad_job" "gitea" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/gitea.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host_alt}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
} }

View file

@ -1,13 +1,4 @@
ariable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "nomad_volume" "home-assistant_hass" { resource "nomad_volume" "home-assistant_hass" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "home-assistant_hass" volume_id = "home-assistant_hass"
@ -20,7 +11,7 @@ resource "nomad_volume" "home-assistant_hass" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/home-assistant_hass" share = "/var/nfs/home-assistant_hass"
} }
@ -31,7 +22,6 @@ resource "nomad_volume" "home-assistant_hass" {
} }
resource "nomad_volume" "home-assistant_zigbee2mqtt" { resource "nomad_volume" "home-assistant_zigbee2mqtt" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "home-assistant_zigbee2mqtt" volume_id = "home-assistant_zigbee2mqtt"
@ -44,7 +34,7 @@ resource "nomad_volume" "home-assistant_zigbee2mqtt" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/home-assistant_zigbee2mqtt" share = "/var/nfs/home-assistant_zigbee2mqtt"
} }
@ -55,7 +45,6 @@ resource "nomad_volume" "home-assistant_zigbee2mqtt" {
} }
resource "nomad_volume" "home-assistant_mosquitto" { resource "nomad_volume" "home-assistant_mosquitto" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "home-assistant_mosquitto" volume_id = "home-assistant_mosquitto"
@ -68,7 +57,7 @@ resource "nomad_volume" "home-assistant_mosquitto" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/home-assistant_mosquitto" share = "/var/nfs/home-assistant_mosquitto"
} }
@ -80,26 +69,42 @@ resource "nomad_volume" "home-assistant_mosquitto" {
resource "vault_policy" "home-assistant-policy" { resource "vault_policy" "home-assistant-policy" {
name = "home-assistant-policy" name = "home-assistant-policy"
policy = file("${path.module}/home-assistant-policy.hcl") policy = <<EOF
path "kv/data/home-assistant" {
capabilities = ["read"]
}
EOF
} }
resource "vault_policy" "zigbee2mqtt-policy" { resource "vault_policy" "zigbee2mqtt-policy" {
name = "zigbee2mqtt-policy" name = "zigbee2mqtt-policy"
policy = file("${path.module}/zigbee2mqtt-policy.hcl") policy = <<EOF
path "kv/data/mqtt" {
capabilities = ["read"]
}
path "kv/data/zigbee2mqtt" {
capabilities = ["read"]
}
EOF
} }
resource "vault_policy" "mosquitto-policy" { resource "vault_policy" "mosquitto-policy" {
name = "mosquitto-policy" name = "mosquitto-policy"
policy = file("${path.module}/mosquitto-policy.hcl") policy = <<EOF
path "kv/data/mqtt" {
capabilities = ["read"]
}
EOF
} }
resource "nomad_job" "home-assistant" { resource "nomad_job" "home-assistant" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/home-assistant.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
} }

View file

@ -1,16 +1,3 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
# data "nomad_plugin" "nomad-driver-containerd" {
# plugin_id = "nomad-driver-containerd"
# wait_for_healthy = true
# }
resource "nomad_volume" "hydra-db" { resource "nomad_volume" "hydra-db" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd] # depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
@ -25,7 +12,7 @@ resource "nomad_volume" "hydra-db" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/hydra-db" share = "/var/nfs/hydra-db"
} }
@ -49,7 +36,7 @@ resource "nomad_volume" "hydra-data" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/hydra-data" share = "/var/nfs/hydra-data"
} }
@ -73,7 +60,7 @@ resource "nomad_volume" "hydra-nix" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/hydra-nix" share = "/var/nfs/hydra-nix"
} }
@ -85,16 +72,20 @@ resource "nomad_volume" "hydra-nix" {
resource "vault_policy" "hydra-policy" { resource "vault_policy" "hydra-policy" {
name = "hydra-policy" name = "hydra-policy"
policy = file("${path.module}/hydra-policy.hcl") policy = <<EOF
path "kv/data/hydra" {
capabilities = ["read"]
}
EOF
} }
resource "nomad_job" "hydra" { resource "nomad_job" "hydra" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/hydra.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
} }

View file

@ -0,0 +1,139 @@
upstream jellyfin {
server {{ env "NOMAD_UPSTREAM_ADDR_jellyfin" }};
}
upstream zigbee2mqtt {
server {{ env "NOMAD_UPSTREAM_ADDR_zigbee2mqtt" }};
}
upstream home-assistant {
server {{ env "NOMAD_UPSTREAM_ADDR_home-assistant" }};
}
upstream syncthing {
server {{ env "NOMAD_UPSTREAM_ADDR_syncthing" }};
}
upstream baikal {
server {{ env "NOMAD_UPSTREAM_ADDR_baikal" }};
}
server {
listen 8096;
server_name _;
include /local/security.conf;
client_max_body_size 100M;
location /jellyfin/ {
# Proxy main Jellyfin traffic
# The / at the end is significant.
# https://www.acunetix.com/blog/articles/a-fresh-look-on-reverse-proxy-related-attacks/
proxy_pass http://jellyfin/jellyfin/;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
# Disable buffering when the nginx proxy gets very resource heavy upon streaming
proxy_buffering off;
}
location /syncthing/ {
include /local/headers.conf;
proxy_pass http://syncthing/;
}
location ~ "^/(static/icons/|static/fonts/|static/translations/|static/images/|static/panels/|static/polyfills|api/|auth/|frontend_latest/|frontend_es5/|local/|lovelace|map|config|developer-tools|history|logbook|profile|states|hassio|onboarding.html|service_worker.js|authorize.html|manifest.json)" {
include /local/headers.conf;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://home-assistant;
}
location /home-assistant {
include /local/headers.conf;
rewrite /home-assistant/(.*) /$1 break;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://home-assistant;
}
location /zigbee2mqtt/ {
include /local/headers.conf;
# rewrite /zigbee2mqtt/(.*) /$1 break;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://zigbee2mqtt/;
}
location /baikal/ {
include /local/headers.conf;
# rewrite /baikal/(.*) /$1 break;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_pass http://baikal/;
}
}
server {
listen 80;
server_name _;
include /local/security.conf;
client_max_body_size 100M;
location /jellyfin/ {
# Proxy main Jellyfin traffic
# The / at the end is significant.
# https://www.acunetix.com/blog/articles/a-fresh-look-on-reverse-proxy-related-attacks/
proxy_pass http://jellyfin/jellyfin/;
proxy_pass_request_headers on;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $http_connection;
# Disable buffering when the nginx proxy gets very resource heavy upon streaming
proxy_buffering off;
}
}

View file

@ -0,0 +1,17 @@
data "local_file" "ingress-upstreams" {
filename = "${path.module}/ingress-upstreams.conf"
}
resource "nomad_job" "ingress" {
jobspec = file("${path.module}/job/ingress.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
upstreams = data.local_file.ingress-upstreams.content
}
}
}

View file

@ -1,18 +1,4 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
# data "nomad_plugin" "nomad-driver-containerd" {
# plugin_id = "nomad-driver-containerd"
# wait_for_healthy = true
# }
resource "nomad_volume" "jellyfin-cache" { resource "nomad_volume" "jellyfin-cache" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "jellyfin-cache" volume_id = "jellyfin-cache"
@ -25,7 +11,7 @@ resource "nomad_volume" "jellyfin-cache" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/jellyfin/cache" share = "/var/nfs/jellyfin/cache"
} }
@ -36,7 +22,6 @@ resource "nomad_volume" "jellyfin-cache" {
} }
resource "nomad_volume" "jellyfin-config" { resource "nomad_volume" "jellyfin-config" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "jellyfin-config" volume_id = "jellyfin-config"
@ -49,7 +34,7 @@ resource "nomad_volume" "jellyfin-config" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/jellyfin/config" share = "/var/nfs/jellyfin/config"
} }
@ -60,7 +45,6 @@ resource "nomad_volume" "jellyfin-config" {
} }
resource "nomad_volume" "jellyfin-media" { resource "nomad_volume" "jellyfin-media" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "jellyfin-media" volume_id = "jellyfin-media"
@ -73,7 +57,7 @@ resource "nomad_volume" "jellyfin-media" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/jellyfin/media" share = "/var/nfs/jellyfin/media"
} }
@ -84,7 +68,7 @@ resource "nomad_volume" "jellyfin-media" {
} }
resource "nomad_job" "jellyfin" { resource "nomad_job" "jellyfin" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/jellyfin.hcl")
hcl2 { hcl2 {
enabled = true enabled = true

View file

@ -0,0 +1,96 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
job "baikal" {
datacenters = [ "homelab-1" ]
type = "service"
group "baikal" {
count = 1
volume "baikal-specific" {
type = "csi"
source = "baikal-specific"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
volume "baikal-config" {
type = "csi"
source = "baikal-config"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
}
service {
name = "baikal"
port = "80"
## Syncthing with auth returns 402: Unauthorized and Nomad interprets it as
## service failure.
# check {
# type = "http"
# address_mode = "alloc"
# path = "/"
# port = "8384"
# interval = "10s"
# timeout = "10s"
# }
connect {
sidecar_service {}
sidecar_task {
resources {
cpu = 75
memory = 48
}
config {
memory_hard_limit = 96
}
}
}
}
task "baikal" {
driver = "containerd-driver"
config {
flake_ref = "${var.flake_ref}#nixngSystems.baikal.config.system.build.toplevel"
flake_sha = var.flake_sha
entrypoint = [ "init" ]
}
resources {
cpu = 128
memory = 128
}
volume_mount {
volume = "baikal-specific"
destination = "/var/baikal/specific"
read_only = false
}
volume_mount {
volume = "baikal-config"
destination = "/var/baikal/config"
read_only = false
}
}
}
}

View file

@ -0,0 +1,184 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
variable "upstreams" {
type = string
}
job "ingress" {
datacenters = [ "homelab-1" ]
type = "service"
group "ingress" {
count = 1
constraint {
attribute = "${attr.unique.hostname}"
value = "blowhole"
}
network {
mode = "bridge"
port "http" {
static = 8080
to = 80
}
port "https" {
static = 443
to = 443
}
port "jellyfin" {
static = 8096
to = 8096
}
port "imap" {
static = 143
to = 143
}
}
service {
name = "ingress-blowhole"
port = "80"
connect {
sidecar_service {
proxy {
upstreams {
destination_name = "jellyfin"
local_bind_port = 8001
}
upstreams {
destination_name = "zigbee2mqtt"
local_bind_port = 8002
}
upstreams {
destination_name = "home-assistant"
local_bind_port = 8003
}
upstreams {
destination_name = "syncthing"
local_bind_port = 8004
}
upstreams {
destination_name = "dovecot-imap"
local_bind_port = 8005
}
upstreams {
destination_name = "baikal"
local_bind_port = 8006
}
}
}
sidecar_task {
resources {
cpu = 75
memory = 48
}
config {
memory_hard_limit = 96
image = "envoyproxy/envoy:v1.20.2"
}
}
}
}
task "nginx" {
driver = "containerd-driver"
# driver = "docker"
config {
flake_ref = "${var.flake_ref}#nixngSystems.ingressBlowhole.config.system.build.toplevel"
flake_sha = var.flake_sha
entrypoint = [ "init" ]
}
resources {
cpu = 200
memory = 32
memory_max = 128
}
template {
data = <<EOF
ssl_certificate_key /etc/letsencrypt/live/redalder.org/privkey.pem;
ssl_certificate /etc/letsencrypt/live/redalder.org/fullchain.pem;
ssl_session_cache shared:le_nginx_SSL:10m;
ssl_session_timeout 1440m;
ssl_session_tickets off;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
ssl_ciphers "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384";
EOF
destination = "local/ssl.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
EOF
destination = "local/headers.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
add_header X-Frame-Options "SAMEORIGIN";
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
EOF
destination = "local/security.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = var.upstreams
destination = "local/upstreams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
template {
data = <<EOF
upstream dovecot-imap {
server {{ env "NOMAD_UPSTREAM_ADDR_dovecot-imap" }};
}
server {
listen 143;
proxy_pass dovecot-imap;
}
EOF
destination = "local/streams.conf"
change_mode = "signal"
change_signal = "SIGHUP"
}
}
}
}

View file

@ -0,0 +1,111 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
job "syncthing" {
datacenters = [ "homelab-1" ]
type = "service"
group "syncthing" {
count = 1
volume "syncthing-data" {
type = "csi"
source = "syncthing-data"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
volume "syncthing-config" {
type = "csi"
source = "syncthing-config"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
volume "syncthing-storage" {
type = "csi"
source = "syncthing-storage"
read_only = false
attachment_mode = "file-system"
access_mode = "single-node-writer"
}
network {
mode = "bridge"
}
service {
name = "syncthing"
port = "8384"
## Syncthing with auth returns 402: Unauthorized and Nomad interprets it as
## service failure.
# check {
# type = "http"
# address_mode = "alloc"
# path = "/"
# port = "8384"
# interval = "10s"
# timeout = "10s"
# }
connect {
sidecar_service {}
sidecar_task {
resources {
cpu = 75
memory = 48
}
config {
memory_hard_limit = 96
}
}
}
}
task "syncthing" {
driver = "containerd-driver"
config {
flake_ref = "${var.flake_ref}#nixngSystems.syncthing.config.system.build.toplevel"
flake_sha = var.flake_sha
entrypoint = [ "init" ]
}
resources {
cpu = 128
memory = 128
}
volume_mount {
volume = "syncthing-data"
destination = "/var/syncthing/data"
read_only = false
}
volume_mount {
volume = "syncthing-config"
destination = "/var/syncthing/config"
read_only = false
}
volume_mount {
volume = "syncthing-storage"
destination = "/var/syncthing/storage"
read_only = false
}
}
}
}

View file

@ -0,0 +1,42 @@
variable "flake_rev" {
type = string
}
variable "flake_sha" {
type = string
}
variable "flake_host" {
type = string
}
variable "flake_host_alt" {
type = string
}
provider "nomad" {
address = "http://10.64.1.201:4646"
region = "homelab-1"
alias = "homelab-1"
}
module "nfs" {
source = "../../modules/nfs"
node_dcs = [ "homelab-1" ]
region = "homelab-1"
providers = {
nomad = nomad.homelab-1
}
}
module "gateway-mesh" {
source = "../../modules/gateway-mesh"
datacenters = [ "homelab-1" ]
providers = {
nomad = nomad.homelab-1
}
}

View file

@ -0,0 +1,11 @@
resource "nomad_job" "reicio" {
jobspec = file("${path.module}/job/reicio.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
}
}

View file

@ -1,13 +1,4 @@
variable "flake_ref" {
type = string
}
variable "flake_sha" {
type = string
}
resource "nomad_volume" "syncthing-data" { resource "nomad_volume" "syncthing-data" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "syncthing-data" volume_id = "syncthing-data"
@ -20,7 +11,7 @@ resource "nomad_volume" "syncthing-data" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/syncthing/data" share = "/var/nfs/syncthing/data"
} }
@ -31,7 +22,6 @@ resource "nomad_volume" "syncthing-data" {
} }
resource "nomad_volume" "syncthing-storage" { resource "nomad_volume" "syncthing-storage" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "syncthing-storage" volume_id = "syncthing-storage"
@ -44,7 +34,7 @@ resource "nomad_volume" "syncthing-storage" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/syncthing/storage" share = "/var/nfs/syncthing/storage"
} }
@ -55,7 +45,6 @@ resource "nomad_volume" "syncthing-storage" {
} }
resource "nomad_volume" "syncthing-config" { resource "nomad_volume" "syncthing-config" {
# depends_on = [data.nomad_plugin.nomad-driver-containerd]
type = "csi" type = "csi"
plugin_id = "nfs" plugin_id = "nfs"
volume_id = "syncthing-config" volume_id = "syncthing-config"
@ -68,7 +57,7 @@ resource "nomad_volume" "syncthing-config" {
} }
context = { context = {
server = "blowhole.in.redalder.org" server = "blowhole.hosts.in.redalder.org"
share = "/var/nfs/syncthing/config" share = "/var/nfs/syncthing/config"
} }
@ -79,12 +68,12 @@ resource "nomad_volume" "syncthing-config" {
} }
resource "nomad_job" "syncthing" { resource "nomad_job" "syncthing" {
jobspec = file("${path.module}/nomad.hcl") jobspec = file("${path.module}/job/syncthing.hcl")
hcl2 { hcl2 {
enabled = true enabled = true
vars = { vars = {
flake_ref = var.flake_ref flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha flake_sha = var.flake_sha
} }
} }

View file

@ -0,0 +1,11 @@
resource "nomad_job" "website" {
jobspec = file("${path.module}/job/website.hcl")
hcl2 {
enabled = true
vars = {
flake_ref = "${var.flake_host}?rev=${var.flake_rev}"
flake_sha = var.flake_sha
}
}
}

33
nomad/regions/main.tf Normal file
View file

@ -0,0 +1,33 @@
variable "flake_rev" {
type = string
}
variable "flake_sha" {
type = string
}
variable "flake_host" {
type = string
}
variable "flake_host_alt" {
type = string
}
module "do-1" {
source = "./do-1"
flake_rev = var.flake_rev
flake_host = var.flake_host
flake_host_alt = var.flake_host_alt
flake_sha = var.flake_sha
}
module "homelab-1" {
source = "./homelab-1"
flake_rev = var.flake_rev
flake_host = var.flake_host
flake_host_alt = var.flake_host_alt
flake_sha = var.flake_sha
}