resource "null_resource" "worker-config" { triggers = { version = var.worker_k3s_version } } resource "tls_private_key" "provisioning" { algorithm = "RSA" rsa_bits = 4096 } resource "hcloud_ssh_key" "provisioning_key" { name = "Provisioning key for hcloud cluster" public_key = tls_private_key.provisioning.public_key_openssh } resource "local_file" "provisioning_key" { filename = "${path.module}/.ssh/provisioning_private_key.pem" content = tls_private_key.provisioning.private_key_pem directory_permission = "0700" file_permission = "0400" } resource "local_file" "provisioning_key_pub" { filename = "${path.module}/.ssh/provisioning_key.pub" content = tls_private_key.provisioning.public_key_openssh directory_permission = "0700" file_permission = "0440" } resource "null_resource" "machine_generation" { for_each = var.k3s_workers triggers = { timestamp = "${each.value.generation}" } } resource "hcloud_placement_group" "k3s_machines" { name = "k3s-machines" type = "spread" labels = { "cluster" = "icb4dc0.de" } } resource "hcloud_server" "machine" { for_each = var.k3s_workers name = each.key server_type = each.value.server_type location = each.value.location image = "ubuntu-22.04" placement_group_id = hcloud_placement_group.k3s_machines.id backups = false lifecycle { replace_triggered_by = [ null_resource.worker-config, null_resource.machine_generation[each.key] ] } ssh_keys = [ hcloud_ssh_key.provisioning_key.id, hcloud_ssh_key.default.id ] labels = { "node_type" = "worker" "cluster" = "icb4dc0.de" } network { network_id = hcloud_network.k8s_net.id ip = each.value.private_ip } public_net { ipv4_enabled = true ipv6_enabled = false } # boot into rescue OS rescue = "linux64" connection { host = self.ipv4_address agent = false private_key = tls_private_key.provisioning.private_key_pem timeout = "5m" } provisioner "file" { source = "${path.module}/configs/cp/traefik.yaml" destination = "/root/traefik.yaml" } provisioner "file" { content = data.ct_config.machine-ignitions[each.key].rendered destination = "/root/ignition.json" } provisioner "remote-exec" { inline = [ "set -ex", "apt-get install -y gawk", "curl -fsSLO --retry-delay 1 --retry 60 --retry-connrefused --retry-max-time 60 --connect-timeout 20 https://raw.githubusercontent.com/flatcar/init/flatcar-master/bin/flatcar-install", "chmod +x flatcar-install", "./flatcar-install -s -i /root/ignition.json -C ${var.flatcar_release_channel}", "reboot", ] on_failure = continue } provisioner "remote-exec" { connection { host = self.ipv4_address private_key = tls_private_key.provisioning.private_key_pem timeout = "3m" user = "core" } inline = [ "sudo hostnamectl set-hostname ${self.name}", ] } } resource "null_resource" "machine-drainable" { for_each = var.k3s_workers lifecycle { replace_triggered_by = [hcloud_server.machine[each.key]] } provisioner "local-exec" { when = destroy on_failure = continue command = "kubectl drain --delete-emptydir-data=true --ignore-daemonsets=true ${each.key}" } } data "ct_config" "machine-ignitions" { for_each = var.k3s_workers strict = true content = templatefile( "${path.module}/configs/workers/k3s-flatcar.yaml", { "host" = "${each.key}" "k3s_token" = "${var.k3s_token}" "node_ip" = "${each.value.private_ip}" "k3s_version" = "${var.worker_k3s_version}" "storage_node" = each.value.storage_node } ) snippets = [ templatefile( "${path.module}/configs/core-user.yaml.tmpl", { ssh_keys = jsonencode(concat(var.ssh_keys, [tls_private_key.provisioning.public_key_openssh])) } ) ] }