feat: initial migration

This commit is contained in:
Peter 2024-09-09 13:46:50 +02:00
parent 8b19dc5f24
commit f29ef0bdb5
Signed by: prskr
GPG key ID: F56BED6903BC5E37
19 changed files with 956 additions and 7 deletions

16
.editorconfig Normal file
View file

@ -0,0 +1,16 @@
root = true
[*]
indent_style = space
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
max_line_length = 120
[*.tf]
trim_trailing_whitespace = false
[*.{yml,yaml,yml.j2,yaml.j2}]
indent_size = 2
insert_final_newline = true

8
.gitignore vendored
View file

@ -10,13 +10,6 @@
crash.log
crash.*.log
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
# password, private keys, and other secrets. These should not be part of version
# control as they are data points which are potentially sensitive and subject
# to change depending on the environment.
*.tfvars
*.tfvars.json
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
@ -37,3 +30,4 @@ override.tf.json
.terraformrc
terraform.rc
.ssh/

112
.terraform.lock.hcl Normal file
View file

@ -0,0 +1,112 @@
# This file is maintained automatically by "tofu init".
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.40.0"
constraints = "4.40.0"
hashes = [
"h1:oWcWlZe52ZRyLQciNe94RaWzhHifSTu03nlK0uL7rlM=",
"zh:01742e5946f936548f8e42120287ffc757abf97e7cbbe34e25c266a438fb54fd",
"zh:08d81f5a5aab4cc269f983b8c6b5be0e278105136aca9681740802619577371f",
"zh:0d75131ba70902cfc94a7a5900369bdde56528b2aad6e10b164449cc97d57396",
"zh:3890a715a012e197541daacdacb8cceec6d364814daa4640ddfe98a8ba9036cb",
"zh:58254ce5ebe1faed4664df86210c39d660bcdc60280f17b25fe4d4dbea21ea8c",
"zh:6b0abc1adbc2edee79368ce9f7338ebcb5d0bf941e8d7d9ac505b750f20f80a2",
"zh:81cc415d1477174a1ca288d25fdb57e5ee488c2d7f61f265ef995b255a53b0ce",
"zh:8680140c7fe5beaefe61c5cfa471bf88422dc0c0f05dad6d3cb482d4ffd22be4",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:a491d26236122ccb83dac8cb490d2c0aa1f4d3a0b4abe99300fd49b1a624f42f",
"zh:a70d9c469dc8d55715ba77c9d1a4ede1fdebf79e60ee18438a0844868db54e0d",
"zh:a7fcb7d5c4222e14ec6d9a15adf8b9a083d84b102c3d0e4a0d102df5a1360b62",
"zh:b4f9677174fabd199c8ebd2e9e5eb3528cf887e700569a4fb61eef4e070cec5e",
"zh:c27f0f7519221d75dae4a3787a59e05acd5cc9a0d30a390eff349a77d20d52e6",
"zh:db00d8605dbf43ca42fe1481a6c67fdcaa73debb7d2a0f613cb95ae5c5e7150e",
]
}
provider "registry.opentofu.org/hashicorp/local" {
version = "2.5.1"
hashes = [
"h1:87L+rpGao062xifb1VuG9YVFwp9vbDP6G2fgfYxUkQs=",
"zh:031c2c2070672b7e78e0aa15560839278dc57fe7cf1e58a617ac13c67b31d5fb",
"zh:1ef64ea4f8382cd538a76f3d319f405d18130dc3280f1c16d6aaa52a188ecaa4",
"zh:422ce45691b2f384dbd4596fdc8209d95cb43d85a82aaa0173089d38976d6e96",
"zh:7415fbd8da72d9363ba55dd8115837714f9534f5a9a518ec42268c2da1b9ed2f",
"zh:92aa22d071339c8ef595f18a9f9245c287266c80689f5746b26e10eaed04d542",
"zh:9cd0d99f5d3be835d6336c19c4057af6274e193e677ecf6370e5b0de12b4aafe",
"zh:a8c1525b389be5809a97f02aa7126e491ba518f97f57ed3095a3992f2134bb8f",
"zh:b336fa75f72643154b07c09b3968e417a41293358a54fe03efc0db715c5451e6",
"zh:c66529133599a419123ad2e42874afbd9aba82bd1de2b15cc68d2a1e665d4c8e",
"zh:c7568f75ba6cb7c3660b69eaab8b0e4278533bd9a7a4c33ee6590cc7e69743ea",
]
}
provider "registry.opentofu.org/hashicorp/null" {
version = "3.2.2"
constraints = "~> 3.2.1"
hashes = [
"h1:sU0t6ANQ4IfEwZbbBmcNeOCg2CDCViVb7L7QVfIHrCs=",
"zh:00e5877d19fb1c1d8c4b3536334a46a5c86f57146fd115c7b7b4b5d2bf2de86d",
"zh:1755c2999e73e4d73f9de670c145c9a0dc5a373802799dff06a0e9c161354163",
"zh:2b29d706353bc9c4edda6a2946af3322abe94372ffb421d81fa176f1e57e33be",
"zh:34f65259c6d2bd51582b6da536e782b181b23725782b181193b965f519fbbacd",
"zh:370f6eb744475926a1fa7464d82d46ad83c2e1148b4b21681b4cec4d75b97969",
"zh:5950bdb23b4fcc6431562d7eba3dea37844aa4220c4da2eb898ae3e4d1b64ec4",
"zh:8f3d5c8d4b9d497fec36953a227f80c76d37fc8431b683a23fb1c42b9cccbf8a",
"zh:8f6eb5e65c047bf490ad3891efecefc488503b65898d4ee106f474697ba257d7",
"zh:a7040eed688316fe00379574c72bb8c47dbe2638b038bb705647cbf224de8f72",
"zh:e561f28df04d9e51b75f33004b7767a53c45ad96e3375d86181ba1363bffbc77",
]
}
provider "registry.opentofu.org/hashicorp/tls" {
version = "4.0.5"
hashes = [
"h1:LWGTWAUrC+/iTsNq0vxANvGOp+7Jnl4wAnSOW2Shqjc=",
"zh:05a7dc3ac92005485714f87541ad6d0d478988b478c5774227a7d39b01660050",
"zh:547e0def44080456169bf77c21037aa6dc9e7f3e644a8f6a2c5fc3e6c15cf560",
"zh:6842b03d050ae1a4f1aaed2a2b1ca707eae84ae45ae492e4bb57c3d48c26e1f1",
"zh:6ced0a9eaaba12377f3a9b08df2fd9b83ae3cb357f859eb6aecf24852f718d9a",
"zh:766bcdf71a7501da73d4805d05764dcb7c848619fa7c04b3b9bd514e5ce9e4aa",
"zh:84cc8617ce0b9a3071472863f43152812e5e8544802653f636c866ef96f1ed34",
"zh:b1939e0d44c89315173b78228c1cf8660a6924604e75ced7b89e45196ce4f45e",
"zh:ced317916e13326766427790b1d8946c4151c4f3b0efd8f720a3bc24abe065fa",
"zh:ec9ff3412cf84ba81ca88328b62c17842b803ef406ae19152c13860b356b259c",
"zh:ff064f0071e98702e542e1ce00c0465b7cd186782fe9ccab8b8830cac0f10dd4",
]
}
provider "registry.opentofu.org/hetznercloud/hcloud" {
version = "1.48.0"
constraints = "1.48.0"
hashes = [
"h1:sbjxzMtxkLOkhc1mbgVOmG7sCF7WZgTgRQqWPG/fld4=",
"zh:19d38d046e26153edcdd36ce8c0e16198aa9dea5186559651c4a75c455390573",
"zh:3cb7c453067bcabed68275f812100685fc2f753f37c0e620d3358e642833b5f0",
"zh:42cabdbb55dba02816be8d9d3fc30f51d610516cc54c3f057e6bb3ffc960b550",
"zh:486aaa88c6c9af37f07ffea4b54a7dbd11e9faee09f4ed3f2dbcb2d94064427a",
"zh:69b1a9dc867d9beac752f42501f465ea22d3fbc8af8b3a7190b6aa50fcc0db51",
"zh:7422b2ec1188d9e70c3ee34ff201eb12809c0602a009224f7cea6940cce64567",
"zh:7e31665f004a4d0055f0b1b0c0f4d36039c11bb789fc7c07fc9fb54d0d38d751",
"zh:866eb35b5ca82566f7793ec88dc135c6476f33ea0f7a7f10be9768ba3408e791",
"zh:961efe244a5163a3369817bdd1092aae2e58391d7e21929fab56473d62385d1d",
"zh:a08a965235e6db0233730b93a024e2b8a8c1567dd453eb0aa4aec59b9ed91558",
"zh:c031636938f665629ef3d48d771b6037571ddb886366ade241ed19551aaea24f",
"zh:cf8fc251e4ae701d5f2503f5d1b9f7e5f804f676a1b9b2d88a59930d6b7a9054",
"zh:d5fa2cc80a6361d92c5c725f677f93de5d98c9d644ac978f083a06a7381dda1d",
"zh:ecef5c1e59d1c6cde6aee407b79aecd76d6c129dcec4f67666085f0403a0f46a",
]
}
provider "registry.opentofu.org/poseidon/ct" {
version = "0.13.0"
constraints = "0.13.0"
hashes = [
"h1:ztsY6QOYlAx/Tz2tGsIP3vlruN1xcDPRZXBh65KkW8c=",
"zh:24d86adcba92ad0f13870d5e0d217c395aa90ff1e9234fe0c9b7c6eb65abb3a8",
"zh:317eeadf92d220fe546be624a9002190edeb623ac76ae7f6a93abd9fe1be65fd",
"zh:361dbff802ccbd94b87c9d77c0d9db9bdf4d5d408f8cf05e4dae203e60b310ca",
"zh:3b25cb8a0327886aa30c273561ecea3315cc4d729677cd6528ed1339486475da",
"zh:63455a68fee4ba0c9b131eb7e267eb17707184c55a5feb9e2bad2d9de5889d6a",
]
}

17
buckets.tf Normal file
View file

@ -0,0 +1,17 @@
resource "cloudflare_r2_bucket" "csi" {
account_id = var.cloudflare_account_id
name = "csi"
location = "WEUR"
}
resource "cloudflare_r2_bucket" "gitea" {
account_id = var.cloudflare_account_id
name = "gitea"
location = "WEUR"
}
resource "cloudflare_r2_bucket" "tfstate" {
account_id = var.cloudflare_account_id
name = "tfstate"
location = "WEUR"
}

View file

@ -0,0 +1,7 @@
variant: flatcar
version: 1.1.0
passwd:
users:
- name: core
ssh_authorized_keys: ${ssh_keys}

10
configs/cp/cp-config.yaml Normal file
View file

@ -0,0 +1,10 @@
kube-controller-manager-arg:
- "bind-address=0.0.0.0"
- "authorization-always-allow-paths=/healthz,/readyz,/livez,/metrics"
kube-proxy-arg:
- "metrics-bind-address=0.0.0.0:10249"
kube-scheduler-arg:
- "bind-address=0.0.0.0"
- "authorization-always-allow-paths=/healthz,/readyz,/livez,/metrics"
# Controller Manager exposes etcd sqllite metrics
etcd-expose-metrics: true

View file

@ -0,0 +1,70 @@
variant: flatcar
version: 1.1.0
systemd:
units:
- name: litestream.service
enabled: true
contents: |
[Unit]
Description=Litestream
[Service]
Restart=always
TimeoutSec=1800
TimeoutStartSec=180
ExecStartPre=/bin/bash -c "mkdir -p /opt/litestream && curl -L https://github.com/benbjohnson/litestream/releases/download/${litestream_version}/litestream-${litestream_version}-linux-arm64.tar.gz | tar -xvz -C /opt/litestream/"
ExecStartPre=/opt/litestream/litestream restore -replica s3 -if-db-not-exists -if-replica-exists /var/lib/rancher/k3s/server/db/state.db
ExecStart=/opt/litestream/litestream replicate
[Install]
WantedBy=multi-user.target
- name: k3s-install.service
enabled: true
contents: |
[Unit]
Description=Run K3s script
Wants = network-online.target
After = network.target network-online.target litestream.service
ConditionPathExists=/opt/k3s-install.sh
ConditionPathExists=/mnt/HC_Volume_${volume_id}
ConditionPathExists=!/opt/k3s/bin/k3s
[Service]
Type=forking
Restart=always
TimeoutSec=1800
TimeoutStartSec=120
ExecStartPre=-/sbin/modprobe br_netfilter
ExecStartPre=-/sbin/modprobe overlay
ExecStartPre=mkdir -p /opt/k3s
LimitNOFILE=1048576
LimitNPROC=infinity
LimitCORE=infinity
TasksMax=infinity
TimeoutStartSec=180
RemainAfterExit=yes
KillMode=process
Environment="K3S_TOKEN=${k3s_token}"
Environment="INSTALL_K3S_VERSION=${k3s_version}"
Environment="INSTALL_K3S_BIN_DIR=/opt/k3s"
Environment="INSTALL_K3S_EXEC=server --data-dir /mnt/HC_Volume_${volume_id}/k3s --advertise-address 172.23.2.10 --node-ip ${node_ip} --node-taint=node-type=k3s-controlplane:NoSchedule %{for san in k3s_sans }--tls-san='${san}' %{endfor} --disable-cloud-controller --disable servicelb --kubelet-arg=cloud-provider=external"
ExecStart=/usr/bin/sh -c "/opt/k3s-install.sh"
[Install]
WantedBy=multi-user.target
storage:
files:
- path: /etc/hostname
mode: 0644
contents:
inline: ${host}
- path: /opt/k3s-install.sh
mode: 0777
contents:
source: https://get.k3s.io
- path: /etc/litestream.yml
mode: 0644
contents:
source: data:;base64,${litestream_config}

13
configs/cp/litestream.yml Normal file
View file

@ -0,0 +1,13 @@
access-key-id: ${accessKey}
secret-access-key: ${secretKey}
dbs:
- path: /var/lib/rancher/k3s/server/db/state.db
replicas:
- name: Garage
type: s3
endpoint: ${endpoint}
bucket: k3s
region: hel1
retention: 24h
snapshot-interval: 15m

44
configs/cp/traefik.yaml Normal file
View file

@ -0,0 +1,44 @@
apiVersion: helm.cattle.io/v1
kind: HelmChartConfig
metadata:
name: traefik
namespace: kube-system
spec:
chart: traefik
repo: https://traefik.github.io/charts
version: 26.0.0
valuesContent: |-
ports:
traefik:
port: 9000
expose: false
web:
nodePort: 32080
forwardedHeaders:
insecure: true
websecure:
expose: true
service:
type: LoadBalancer
annotations:
load-balancer.hetzner.cloud/location: "hel1"
experimental:
kubernetesGateway:
enabled: true
providers:
kubernetesIngress:
publishedService:
enabled: true
allowExternalNameServices: true
kubernetesCRD:
enabled: true
allowExternalNameServices: true
metrics:
prometheus:
serviceMonitor:
interval: 30s
scrapeTimeout: 5s
additionalLabels:
prometheus: default
service:
enabled: true

View file

@ -0,0 +1,37 @@
variant: flatcar
version: 1.1.0
systemd:
units:
- name: k3s-install.service
enabled: true
contents: |
[Unit]
Description=Run K3s script
Wants = network-online.target
After = network.target network-online.target
ConditionPathExists=/opt/k3s-install.sh
ConditionPathExists=!/opt/bin/k3s
[Service]
Type=forking
TimeoutStartSec=180
RemainAfterExit=yes
KillMode=process
Environment="K3S_URL=https://172.23.2.10:6443"
Environment="K3S_TOKEN=${k3s_token}"
Environment="INSTALL_K3S_VERSION=${k3s_version}"
Environment="INSTALL_K3S_EXEC=agent --node-ip=${node_ip} --kubelet-arg --cloud-provider=external --node-label k8s.icb4dc0.de/storage-node=${tostring(storage_node)}"
ExecStart=/usr/bin/sh -c "/opt/k3s-install.sh"
[Install]
WantedBy=multi-user.target
storage:
files:
- path: /etc/hostname
mode: 0644
contents:
inline: ${host}
- path: /opt/k3s-install.sh
mode: 0777
contents:
source: https://get.k3s.io

53
dns.tf Normal file
View file

@ -0,0 +1,53 @@
resource "cloudflare_zone" "icb4dc0de" {
account_id = var.cloudflare_account_id
zone = "icb4dc0.de"
lifecycle {
ignore_changes = [account_id]
}
}
resource "cloudflare_record" "mx_primary" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "@"
type = "MX"
content = "mx01.mail.icloud.com"
priority = 10
}
resource "cloudflare_record" "mx_secondary" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "@"
type = "MX"
content = "mx02.mail.icloud.com"
priority = 10
}
resource "cloudflare_record" "apple_proof" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "@"
type = "TXT"
content = "apple-domain=chwbVvzH8hWIgg1l"
}
resource "cloudflare_record" "keybase_proof" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "@"
type = "TXT"
content = "keybase-site-verification=WDQoLtW22epD7eQnts6rPKJBGA0lD6jSI6m0bGMYWag"
}
resource "cloudflare_record" "apple_spf" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "@"
type = "TXT"
content = "\"v=spf1 include:icloud.com ~all\""
}
resource "cloudflare_record" "apple_sig_domainkey" {
zone_id = cloudflare_zone.icb4dc0de.id
name = "sig1._domainkey"
type = "CNAME"
content = "sig1.dkim.icb4dc0.de.at.icloudmailadmin.com"
}

161
k8s_control_plane.tf Normal file
View file

@ -0,0 +1,161 @@
resource "null_resource" "cp-config" {
triggers = {
version = var.control_plane_k3s_version
}
}
resource "null_resource" "control_plane_generation" {
for_each = var.k3s_control_plane
triggers = {
timestamp = "${each.value.generation}"
}
}
resource "hcloud_volume" "cp-k3s-storage" {
for_each = var.k3s_control_plane
name = "${each.key}-k3s-storage"
size = 15
format = "ext4"
delete_protection = true
}
resource "hcloud_server" "control-plane" {
for_each = var.k3s_control_plane
name = each.key
server_type = each.value.server_type
location = each.value.location
image = "ubuntu-22.04"
backups = false
lifecycle {
replace_triggered_by = [
null_resource.cp-config,
null_resource.control_plane_generation
]
}
ssh_keys = [
hcloud_ssh_key.provisioning_key.id,
hcloud_ssh_key.default.id
]
labels = {
"node_type" = "control-plane"
"cluster" = "icb4dc0.de"
}
network {
network_id = hcloud_network.k8s_net.id
ip = each.value.private_ip
alias_ips = each.value.alias_ips
}
public_net {
ipv4_enabled = true
ipv6_enabled = true
}
# boot into rescue OS
rescue = "linux64"
connection {
host = self.ipv4_address
private_key = tls_private_key.provisioning.private_key_pem
timeout = "5m"
}
provisioner "file" {
content = data.ct_config.machine-ignitions-cp[each.key].rendered
destination = "/root/ignition.json"
}
provisioner "remote-exec" {
inline = [
"set -ex",
"apt-get install -y gawk",
"curl -fsSLO --retry-delay 1 --retry 60 --retry-connrefused --retry-max-time 60 --connect-timeout 20 https://raw.githubusercontent.com/flatcar/init/flatcar-master/bin/flatcar-install",
"chmod +x flatcar-install",
"./flatcar-install -s -i /root/ignition.json -C ${var.flatcar_release_channel}",
"reboot",
]
on_failure = continue
}
provisioner "remote-exec" {
connection {
host = self.ipv4_address
private_key = tls_private_key.provisioning.private_key_pem
timeout = "3m"
user = "core"
}
inline = [
"sudo hostnamectl set-hostname ${self.name}",
]
}
}
resource "hcloud_volume_attachment" "cp-k3s-storage" {
for_each = var.k3s_control_plane
volume_id = hcloud_volume.cp-k3s-storage[each.key].id
server_id = hcloud_server.control-plane[each.key].id
automount = true
}
resource "cloudflare_record" "cp-host-ipv4" {
for_each = var.k3s_control_plane
depends_on = [hcloud_server.control-plane]
zone_id = cloudflare_zone.icb4dc0de.id
name = "${each.key}.k8s"
type = "A"
content = hcloud_server.control-plane[each.key].ipv4_address
}
resource "cloudflare_record" "cp-host-ipv6" {
for_each = var.k3s_control_plane
depends_on = [hcloud_server.control-plane]
zone_id = cloudflare_zone.icb4dc0de.id
name = "${each.key}.k8s"
type = "AAAA"
content = hcloud_server.control-plane[each.key].ipv6_address
}
data "ct_config" "machine-ignitions-cp" {
for_each = var.k3s_control_plane
strict = true
content = templatefile(
"${path.module}/configs/cp/k3s-flatcar.yaml",
{
"host" = "${each.key}"
"k3s_token" = "${var.k3s_token}"
"litestream_version" = "${var.litestream_version}",
"litestream_config" = base64encode(
templatefile(
"${path.module}/configs/cp/litestream.yml",
{
"accessKey" = var.k3s_backup_access_key,
"secretKey" = var.k3s_backup_secret_key,
"endpoint" = var.k3s_backup_endpoint
}
)
)
"node_ip" = "${each.value.private_ip}"
"k3s_version" = "${var.control_plane_k3s_version}",
"k3s_sans" = var.k3s_sans,
"volume_id" = hcloud_volume.cp-k3s-storage[each.key].id
}
)
snippets = [
templatefile(
"${path.module}/configs/core-user.yaml.tmpl",
{
ssh_keys = jsonencode(concat(var.ssh_keys, [tls_private_key.provisioning.public_key_openssh]))
}
)
]
}

164
k8s_flatcar_machines.tf Normal file
View file

@ -0,0 +1,164 @@
resource "null_resource" "worker-config" {
triggers = {
version = var.worker_k3s_version
}
}
resource "tls_private_key" "provisioning" {
algorithm = "RSA"
rsa_bits = 4096
}
resource "hcloud_ssh_key" "provisioning_key" {
name = "Provisioning key for hcloud cluster"
public_key = tls_private_key.provisioning.public_key_openssh
}
resource "local_file" "provisioning_key" {
filename = "${path.module}/.ssh/provisioning_private_key.pem"
content = tls_private_key.provisioning.private_key_pem
directory_permission = "0700"
file_permission = "0400"
}
resource "local_file" "provisioning_key_pub" {
filename = "${path.module}/.ssh/provisioning_key.pub"
content = tls_private_key.provisioning.public_key_openssh
directory_permission = "0700"
file_permission = "0440"
}
resource "null_resource" "machine_generation" {
for_each = var.k3s_workers
triggers = {
timestamp = "${each.value.generation}"
}
}
resource "hcloud_placement_group" "k3s_machines" {
name = "k3s-machines"
type = "spread"
labels = {
"cluster" = "icb4dc0.de"
}
}
resource "hcloud_server" "machine" {
for_each = var.k3s_workers
name = each.key
server_type = each.value.server_type
location = each.value.location
image = "ubuntu-22.04"
placement_group_id = hcloud_placement_group.k3s_machines.id
backups = false
lifecycle {
replace_triggered_by = [
null_resource.worker-config,
null_resource.machine_generation[each.key]
]
}
ssh_keys = [
hcloud_ssh_key.provisioning_key.id,
hcloud_ssh_key.default.id
]
labels = {
"node_type" = "worker"
"cluster" = "icb4dc0.de"
}
network {
network_id = hcloud_network.k8s_net.id
ip = each.value.private_ip
}
public_net {
ipv4_enabled = true
ipv6_enabled = false
}
# boot into rescue OS
rescue = "linux64"
connection {
host = self.ipv4_address
agent = false
private_key = tls_private_key.provisioning.private_key_pem
timeout = "5m"
}
provisioner "file" {
source = "${path.module}/configs/cp/traefik.yaml"
destination = "/root/traefik.yaml"
}
provisioner "file" {
content = data.ct_config.machine-ignitions[each.key].rendered
destination = "/root/ignition.json"
}
provisioner "remote-exec" {
inline = [
"set -ex",
"apt-get install -y gawk",
"curl -fsSLO --retry-delay 1 --retry 60 --retry-connrefused --retry-max-time 60 --connect-timeout 20 https://raw.githubusercontent.com/flatcar/init/flatcar-master/bin/flatcar-install",
"chmod +x flatcar-install",
"./flatcar-install -s -i /root/ignition.json -C ${var.flatcar_release_channel}",
"reboot",
]
on_failure = continue
}
provisioner "remote-exec" {
connection {
host = self.ipv4_address
private_key = tls_private_key.provisioning.private_key_pem
timeout = "3m"
user = "core"
}
inline = [
"sudo hostnamectl set-hostname ${self.name}",
]
}
}
resource "null_resource" "machine-drainable" {
for_each = var.k3s_workers
lifecycle {
replace_triggered_by = [hcloud_server.machine[each.key]]
}
provisioner "local-exec" {
when = destroy
on_failure = continue
command = "kubectl drain --delete-emptydir-data=true --ignore-daemonsets=true ${each.key}"
}
}
data "ct_config" "machine-ignitions" {
for_each = var.k3s_workers
strict = true
content = templatefile(
"${path.module}/configs/workers/k3s-flatcar.yaml",
{
"host" = "${each.key}"
"k3s_token" = "${var.k3s_token}"
"node_ip" = "${each.value.private_ip}"
"k3s_version" = "${var.worker_k3s_version}"
"storage_node" = each.value.storage_node
}
)
snippets = [
templatefile(
"${path.module}/configs/core-user.yaml.tmpl",
{
ssh_keys = jsonencode(concat(var.ssh_keys, [tls_private_key.provisioning.public_key_openssh]))
}
)
]
}

21
k8s_network.tf Normal file
View file

@ -0,0 +1,21 @@
resource "hcloud_network" "k8s_net" {
name = "k8s-net"
ip_range = "172.16.0.0/12"
}
resource "hcloud_network_subnet" "k8s_internal" {
network_id = hcloud_network.k8s_net.id
type = "cloud"
network_zone = "eu-central"
ip_range = "172.23.2.0/23"
}
resource "hcloud_ssh_key" "default" {
name = "Default Management"
public_key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKfHZaI0F5GjAcrM8hjWqwMfULDkAZ2TOIBTQtRocg1F id_ed25519"
}
resource "hcloud_ssh_key" "yubikey" {
name = "Yubikey"
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQoNCLuHgcaDn4JTjCeQKJsIsYU0Jmub5PUMzIIZbUBb+TGMh6mCAY/UbYaq/n4jVnskXopzPGJbx4iPBG5HrNzqYZqMjkk8uIeeT0mdIcNv9bXxuCxCH1iHZF8LlzIZCmQ0w3X6VQ1izcJgvjrAYzbHN3gqCHOXtNkqIUkwaadIWCEjg33OVSlM4yrIDElr6+LHzv84VNh/PhemixCVVEMJ83GjhDtpApMg9WWW3es6rpJn4TlYEMV+aPNU4ZZEWFen/DFBKoX+ulkiJ8CwpY3eJxSzlBijs5ZKH89OOk/MXN1lnREElFqli+jE8EbZKQzi59Zmx8ZOb52qVNot8XZT0Un4EttAIEeE8cETqUC4jK+6RbUrsXtclVbU9i57LWRpl65LYSIJEFmkTxvYdkPXqGbvlW024IjgSo8kds121w95+Rpo6419cSYsQWowS8+aXfEv2Q8SE81QH7ObYfWFXsPBAmmNleQNN3E5HOoaxpWQjv3aTUGuxm4PCzKLdP0LsHmTfGJB7Priaj+9i8xLjDWe7zXDde2Gp9FmdedDr06uEkVSRFnS35Dwfd7M7xP6NsilfMOdWzJWWy/BAYxtnWcrEFxhaEr4vgs8Ub+KBtKhr740x3Mr8up+mythConAs4LOj37lWK4kJ8cI7TXjcSJi9nTIPd39us7tp3Aw=="
}

7
main.tf Normal file
View file

@ -0,0 +1,7 @@
provider "hcloud" {
token = var.hcloud_token
}
provider "cloudflare" {
api_token = var.cloudflare_api_token
}

14
tf.sh Executable file
View file

@ -0,0 +1,14 @@
#!/usr/bin/env bash
export AWS_ACCESS_KEY=$(rbw get -f username "CloudFlare TFState")
export AWS_SECRET_KEY=$(rbw get "CloudFlare TFState")
export HETZNER_DNS_API_TOKEN=$(rbw get -f "API Token" "Hetzner DNS")
export TF_VAR_hcloud_token="$(rbw get "HCloud API")"
export TF_VAR_k3s_token="$(rbw get "K3s Token")"
export TF_VAR_k3s_backup_access_key="$(rbw get -f username "K3s Backup")"
export TF_VAR_k3s_backup_secret_key="$(rbw get "K3s Backup")"
export TF_VAR_k3s_backup_endpoint="$(rbw get -f Endpoint "K3s Backup")"
export TF_VAR_cloudflare_api_token="$(rbw get -f "DNS API Token" "CloudFlare")"
export TF_VAR_cloudflare_account_id="$(rbw get -f "Account ID" "CloudFlare")"
tofu $@

102
vars.tf Normal file
View file

@ -0,0 +1,102 @@
variable "hcloud_token" {
type = string
sensitive = true
}
variable "cloudflare_api_token" {
type = string
sensitive = true
}
variable "cloudflare_account_id" {
type = string
sensitive = true
}
variable "k3s_token" {
type = string
sensitive = true
}
variable "k3s_backup_access_key" {
sensitive = true
type = string
}
variable "k3s_backup_secret_key" {
sensitive = true
type = string
}
variable "k3s_backup_endpoint" {
type = string
}
variable "litestream_version" {
type = string
default = "v0.3.13"
}
variable "control_plane_k3s_version" {
type = string
default = "v1.30.4+k3s1"
}
variable "worker_k3s_version" {
type = string
default = "v1.30.4+k3s1"
}
variable "k3s_sans" {
type = list(string)
}
variable "garage_storage" {
description = "Config of Garage storage"
type = object({
size = number
location = string
})
default = {
size = 20
location = "hel1"
}
}
variable "k3s_control_plane" {
type = map(object({
server_type = string
generation = number
private_ip = string
location = string
alias_ips = set(string)
}))
}
variable "k3s_workers" {
type = map(object({
server_type = string
generation = number
private_ip = string
location = string
storage_node = bool
}))
}
variable "ssh_keys" {
type = list(string)
default = []
description = "Additional SSH public keys for user 'core'."
}
variable "flatcar_release_channel" {
type = string
description = "Release channel"
default = "stable"
validation {
condition = contains(["lts", "stable", "beta", "alpha"], var.flatcar_release_channel)
error_message = "release_channel must be lts, stable, beta, or alpha."
}
}

38
versions.tf Normal file
View file

@ -0,0 +1,38 @@
terraform {
required_version = ">= 0.14"
backend "s3" {
bucket = "tfstate"
key = "terraform.tfstate"
region = "us-east-1"
endpoint = "https://2df513adaee2eeae12106af900bed297.r2.cloudflarestorage.com"
skip_metadata_api_check = true
skip_region_validation = true
skip_credentials_validation = true
use_path_style = true
skip_s3_checksum = true
}
required_providers {
hcloud = {
source = "hetznercloud/hcloud"
version = "1.48.0"
}
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.40.0"
}
ct = {
source = "poseidon/ct"
version = "0.13.0"
}
null = {
source = "hashicorp/null"
version = "~> 3.2.2"
}
}
}

69
vms.auto.tfvars Normal file
View file

@ -0,0 +1,69 @@
k3s_control_plane = {
"cp1-cax11-hel1" = {
server_type = "cax11",
generation = 5
private_ip = "172.23.2.10"
location = "hel1"
alias_ips = []
}
}
k3s_sans = [
"127.0.0.1",
"2a01:4f9:c012:7d4b::1",
"k8s.icb4dc0.de"
]
k3s_workers = {
"w1-cx22-hel1" = {
server_type = "cx22"
generation = 1
private_ip = "172.23.2.20"
location = "hel1"
storage_node = false
}
"w2-cax11-hel1" = {
server_type = "cax11"
generation = 1
private_ip = "172.23.2.21"
location = "hel1"
storage_node = true
}
"w3-cax11-hel1" = {
server_type = "cax11"
generation = 1
private_ip = "172.23.2.22"
location = "hel1"
storage_node = true
}
"w4-cax11-hel1" = {
server_type = "cax11"
generation = 1
private_ip = "172.23.2.23"
location = "hel1"
storage_node = true
}
"w5-cax11-hel1" = {
server_type = "cax11"
generation = 1
private_ip = "172.23.2.24"
location = "hel1"
storage_node = true
}
"w6-cax11-hel1" = {
server_type = "cax11"
generation = 1
private_ip = "172.23.2.25"
location = "hel1"
storage_node = true
}
}
ssh_keys = ["ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQoNCLuHgcaDn4JTjCeQKJsIsYU0Jmub5PUMzIIZbUBb+TGMh6mCAY/UbYaq/n4jVnskXopzPGJbx4iPBG5HrNzqYZqMjkk8uIeeT0mdIcNv9bXxuCxCH1iHZF8LlzIZCmQ0w3X6VQ1izcJgvjrAYzbHN3gqCHOXtNkqIUkwaadIWCEjg33OVSlM4yrIDElr6+LHzv84VNh/PhemixCVVEMJ83GjhDtpApMg9WWW3es6rpJn4TlYEMV+aPNU4ZZEWFen/DFBKoX+ulkiJ8CwpY3eJxSzlBijs5ZKH89OOk/MXN1lnREElFqli+jE8EbZKQzi59Zmx8ZOb52qVNot8XZT0Un4EttAIEeE8cETqUC4jK+6RbUrsXtclVbU9i57LWRpl65LYSIJEFmkTxvYdkPXqGbvlW024IjgSo8kds121w95+Rpo6419cSYsQWowS8+aXfEv2Q8SE81QH7ObYfWFXsPBAmmNleQNN3E5HOoaxpWQjv3aTUGuxm4PCzKLdP0LsHmTfGJB7Priaj+9i8xLjDWe7zXDde2Gp9FmdedDr06uEkVSRFnS35Dwfd7M7xP6NsilfMOdWzJWWy/BAYxtnWcrEFxhaEr4vgs8Ub+KBtKhr740x3Mr8up+mythConAs4LOj37lWK4kJ8cI7TXjcSJi9nTIPd39us7tp3Aw== cardno:24_781_961"]
flatcar_release_channel = "stable"