Skip to content

Commit

Permalink
chore: add talos reset task
Browse files Browse the repository at this point in the history
Signed-off-by: Devin Buhl <[email protected]>
  • Loading branch information
onedr0p committed Mar 13, 2024
1 parent ac7e240 commit bc42350
Showing 1 changed file with 63 additions and 37 deletions.
100 changes: 63 additions & 37 deletions .taskfiles/Talos/Taskfile.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -96,39 +96,24 @@ tasks:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/bootstrap/talos/matchbox/assets/worker.secret.sops.yaml
- talosctl --context {{.cluster}} --nodes {{.node}} get machineconfig >/dev/null 2>&1

bootstrap-matchbox:
desc: Bootstrap required Matchbox configuration to Vyos for PXE Boot
dir: "{{.KUBERNETES_DIR}}/{{.cluster}}/bootstrap/talos/matchbox"
cmds:
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} "sudo mkdir -p {{.VYOS_MATCHBOX_DIR}}/{groups,profiles,assets}"
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} "sudo chown -R {{.VYOS_USER}}:users {{.VYOS_MATCHBOX_DIR}}/{groups,profiles,assets}"
- for: ["kernel-amd64", "initramfs-amd64.xz"]
cmd: |
curl -skL https://factory.talos.dev/image/{{.TALOS_SCHEMATIC_ID}}/{{.TALOS_VERSION}}/{{.ITEM}} | \
curl -skT - -u "{{.VYOS_USER}}:" \
sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_ASSETS_DIR}}/{{.ITEM}}
- for: ["controller.secret.sops.yaml", "worker.secret.sops.yaml"]
cmd: |
sops -d assets/{{.ITEM}} | \
envsubst | curl -skT - -u "{{.VYOS_USER}}:" \
sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_ASSETS_DIR}}/{{.ITEM | replace ".secret.sops.yaml" ".yaml"}}
- find ./groups -type f | xargs -I{} curl -skT {} -u "{{.VYOS_USER}}:" sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_GROUPS_DIR}}/
- find ./profiles -type f | xargs -I{} curl -skT {} -u "{{.VYOS_USER}}:" sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_PROFILES_DIR}}/
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} -t /opt/vyatta/bin/vyatta-op-cmd-wrapper "restart container matchbox"
- curl --silent --output /dev/null --connect-timeout 10 --retry 10 --retry-delay 2 http://{{.VYOS_MATCHBOX_ADDR}}/assets/controller.yaml
env:
TALOS_VERSION: "{{.TALOS_VERSION}}"
TALOS_SCHEMATIC_ID: "{{.TALOS_SCHEMATIC_ID}}"
KUBERNETES_VERSION: "{{.KUBERNETES_VERSION}}"
upgrade:
desc: Upgrade Talos on a node
cmd: bash {{.TALOS_SCRIPTS_DIR}}/upgrade.sh "{{.cluster}}" "{{.node}}" "{{.TALOS_SCHEMATIC_ID}}:{{.TALOS_VERSION}}" "{{.rollout}}"
vars:
rollout: '{{.rollout | default "false"}}'
requires:
vars: ["cluster"]
vars: ["cluster", "node"]
preconditions:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/talosconfig
- talosctl --context {{.cluster}} config info >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.node}} get machineconfig >/dev/null 2>&1

upgrade-rollout:
desc: Rollout Talos upgrade on all nodes
cmds:
- flux --context {{.cluster}} suspend kustomization --all
- kubectl cnpg --context {{.cluster}} maintenance set --reusePVC --all-namespaces
- for: { var: nodes }
- for: { var: nodes, split: "," }
task: upgrade
vars:
cluster: "{{.cluster}}"
Expand All @@ -141,34 +126,75 @@ tasks:
cluster: "{{.cluster}}"
vars:
nodes:
sh: talosctl --context {{.cluster}} config info --output json | jq --join-output '[.nodes[]] | join(" ")'
sh: talosctl --context {{.cluster}} config info --output json | jq --join-output '[.nodes[]] | join(",")'
requires:
vars: ["cluster"]
preconditions:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/talosconfig
- talosctl --context {{.cluster}} config info >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.nodes}} get machineconfig >/dev/null 2>&1

upgrade:
desc: Upgrade Talos on a node
cmd: bash {{.TALOS_SCRIPTS_DIR}}/upgrade.sh "{{.cluster}}" "{{.node}}" "{{.TALOS_SCHEMATIC_ID}}:{{.TALOS_VERSION}}" "{{.rollout}}"
upgrade-k8s:
desc: Upgrade the clusters k8s version
cmd: talosctl --context {{.cluster}} --nodes {{.controller}} upgrade-k8s --to {{.KUBERNETES_VERSION}}
vars:
rollout: '{{.rollout | default "false"}}'
controller:
sh: talosctl --context {{.cluster}} config info --output json | jq --raw-output '.endpoints[0]'
requires:
vars: ["cluster"]
preconditions:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/talosconfig
- talosctl --context {{.cluster}} config info >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.node}} get machineconfig >/dev/null 2>&1

reset-node:
desc: Reset a Talos node and shut it down
prompt: Reset Talos '{{.node}}' node on the '{{.cluster}}' cluster ... continue?
cmd: talosctl --context {{.cluster}} reset --nodes {{.node}} --graceful=false
requires:
vars: ["cluster", "node"]
preconditions:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/talosconfig
- talosctl --context {{.cluster}} config info >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.node}} get machineconfig >/dev/null 2>&1

upgrade-k8s:
desc: Upgrade the clusters k8s version
cmd: talosctl --context {{.cluster}} --nodes {{.controller}} upgrade-k8s --to {{.KUBERNETES_VERSION}}
reset-cluster:
desc: Reset all the Talos nodes and shut 'em down
prompt: Reset Talos on the '{{.cluster}}' cluster ... continue?
cmd: talosctl --context {{.cluster}} reset --nodes {{.nodes}} --graceful=false
vars:
controller:
sh: talosctl --context {{.cluster}} config info --output json | jq --raw-output '.endpoints[0]'
nodes:
sh: talosctl --context {{.cluster}} config info --output json | jq --join-output '[.nodes[]] | join(",")'
requires:
vars: ["cluster"]
preconditions:
- test -f {{.KUBERNETES_DIR}}/{{.cluster}}/talosconfig
- talosctl --context {{.cluster}} config info >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.node}} get machineconfig >/dev/null 2>&1
- talosctl --context {{.cluster}} --nodes {{.nodes}} get machineconfig >/dev/null 2>&1

bootstrap-matchbox:
desc: Bootstrap required Matchbox configuration to Vyos for PXE Boot
dir: "{{.KUBERNETES_DIR}}/{{.cluster}}/bootstrap/talos/matchbox"
cmds:
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} "sudo mkdir -p {{.VYOS_MATCHBOX_DIR}}/{groups,profiles,assets}"
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} "sudo chown -R {{.VYOS_USER}}:users {{.VYOS_MATCHBOX_DIR}}/{groups,profiles,assets}"
- for: ["kernel-amd64", "initramfs-amd64.xz"]
cmd: |
curl -skL https://factory.talos.dev/image/{{.TALOS_SCHEMATIC_ID}}/{{.TALOS_VERSION}}/{{.ITEM}} | \
curl -skT - -u "{{.VYOS_USER}}:" \
sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_ASSETS_DIR}}/{{.ITEM}}
- for: ["controller.secret.sops.yaml", "worker.secret.sops.yaml"]
cmd: |
sops -d assets/{{.ITEM}} | \
envsubst | curl -skT - -u "{{.VYOS_USER}}:" \
sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_ASSETS_DIR}}/{{.ITEM | replace ".secret.sops.yaml" ".yaml"}}
- find ./groups -type f | xargs -I{} curl -skT {} -u "{{.VYOS_USER}}:" sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_GROUPS_DIR}}/
- find ./profiles -type f | xargs -I{} curl -skT {} -u "{{.VYOS_USER}}:" sftp://{{.VYOS_ADDR}}/{{.VYOS_MATCHBOX_PROFILES_DIR}}/
- ssh -l {{.VYOS_USER}} {{.VYOS_ADDR}} -t /opt/vyatta/bin/vyatta-op-cmd-wrapper "restart container matchbox"
- curl --silent --output /dev/null --connect-timeout 10 --retry 10 --retry-delay 2 http://{{.VYOS_MATCHBOX_ADDR}}/assets/controller.yaml
env:
TALOS_VERSION: "{{.TALOS_VERSION}}"
TALOS_SCHEMATIC_ID: "{{.TALOS_SCHEMATIC_ID}}"
KUBERNETES_VERSION: "{{.KUBERNETES_VERSION}}"
requires:
vars: ["cluster"]

0 comments on commit bc42350

Please sign in to comment.