Browse Source

🚀

master
Jan Koppe 1 year ago
commit
1115ab329e
Signed by: j GPG Key ID: BE935B0735A2129B
21 changed files with 249 additions and 0 deletions
  1. 4
      .gitignore
  2. 8
      ansible/ansible.cfg
  3. 8
      ansible/hosts
  4. 20
      ansible/provision.yaml
  5. 20
      ansible/roles/docker/tasks/main.yaml
  6. 3
      ansible/roles/swarm-init/meta/main.yaml
  7. 28
      ansible/roles/swarm-init/tasks/main.yaml
  8. 3
      ansible/roles/swarm-manager/meta/main.yaml
  9. 9
      ansible/roles/swarm-manager/tasks/main.yaml
  10. 3
      ansible/roles/swarm-worker/meta/main.yaml
  11. 9
      ansible/roles/swarm-worker/tasks/main.yaml
  12. 22
      ansible_inventory.tf
  13. 22
      cloud-init.tf
  14. 29
      domain.tf
  15. 5
      files/ansible_hosts.tpl
  16. 3
      files/meta_data.cfg
  17. 25
      files/user_data.cfg
  18. 3
      provider.tf
  19. 5
      template.tf
  20. 14
      variables.tf
  21. 6
      volume.tf

4
.gitignore

@ -0,0 +1,4 @@
files/ubuntu.img
.terraform
*.tfstate
*.tfstate.backup

8
ansible/ansible.cfg

@ -0,0 +1,8 @@
[defaults]
inventory = ./hosts
host_key_checking = False
remote_user = deploy
forks = 20
[ssh_connection]
pipelining = True

8
ansible/hosts

@ -0,0 +1,8 @@
[managers]
swarm-1
swarm-2
swarm-3
[workers]

20
ansible/provision.yaml

@ -0,0 +1,20 @@
---
- hosts: all
become: true
roles:
- docker
- hosts: managers[0]
become: true
roles:
- swarm-init
- hosts: managers:!managers[0]
become: true
roles:
- swarm-manager
- hosts: workers
become: true
roles:
- swarm-worker

20
ansible/roles/docker/tasks/main.yaml

@ -0,0 +1,20 @@
---
- name: install python3 docker wrapper
apt:
name:
- python3-docker
- name: install docker apt gpg key
apt_key:
url: https://download.docker.com/linux/ubuntu/gpg
- name: install docker apt repository
apt_repository:
repo: deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable
- name: install docker
apt:
name:
- docker-ce
- docker-ce-cli
- containerd.io

3
ansible/roles/swarm-init/meta/main.yaml

@ -0,0 +1,3 @@
---
dependencies:
- docker

28
ansible/roles/swarm-init/tasks/main.yaml

@ -0,0 +1,28 @@
---
- name: get node status
command: "docker info -f '{{ '{{' }}.Swarm.LocalNodeState{{ '}}' }}'"
register: local_node_state
changed_when: false
- name: initialize swarm
when: local_node_state.stdout == "inactive"
command: "docker swarm init --advertise-addr {{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }}"
- name: get join token for managers
command: "docker swarm join-token -q manager"
register: token_managers
changed_when: false
- name: set join token for managers as host fact
set_fact:
swarm_token_managers: "{{ token_managers.stdout }}"
- name: get join token for workers
command: "docker swarm join-token -q worker"
register: token_workers
changed_when: false
- name: set join token for workers as host fact
set_fact:
swarm_token_workers: "{{ token_workers.stdout }}"

3
ansible/roles/swarm-manager/meta/main.yaml

@ -0,0 +1,3 @@
---
dependencies:
- docker

9
ansible/roles/swarm-manager/tasks/main.yaml

@ -0,0 +1,9 @@
---
- name: get node status
command: "docker info -f '{{ '{{' }}.Swarm.LocalNodeState{{ '}}' }}'"
register: local_node_state
changed_when: false
- name: join swarm as manager
when: local_node_state.stdout == "inactive"
command: "docker swarm join --advertise-addr {{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }} --token {{ hostvars[groups['managers'][0]]['swarm_token_managers'] }} {{ hostvars[groups['managers'][0]]['ansible_default_ipv4']['address'] }}:2377"

3
ansible/roles/swarm-worker/meta/main.yaml

@ -0,0 +1,3 @@
---
dependencies:
- docker

9
ansible/roles/swarm-worker/tasks/main.yaml

@ -0,0 +1,9 @@
---
- name: get node status
command: "docker info -f '{{ '{{' }}.Swarm.LocalNodeState{{ '}}' }}'"
register: local_node_state
changed_when: false
- name: join swarm as worker
when: local_node_state.stdout == "inactive"
command: "docker swarm join --advertise-addr {{ hostvars[inventory_hostname]['ansible_default_ipv4']['address'] }} --token {{ hostvars[groups['managers'][0]]['swarm_token_workers'] }} {{ hostvars[groups['managers'][0]]['ansible_default_ipv4']['address'] }}:2377"

22
ansible_inventory.tf

@ -0,0 +1,22 @@
data "template_file" "inventory" {
template = "${file("files/ansible_hosts.tpl")}"
depends_on = [
"libvirt_domain.domain",
]
vars = {
managers = "${join("\n", slice(libvirt_domain.domain.*.name, 0, 3))}"
workers = "${join("\n", slice(libvirt_domain.domain.*.name, 3, length(libvirt_domain.domain.*.name)))}"
}
}
resource "null_resource" "cmd" {
triggers = {
template_rendered = "${data.template_file.inventory.rendered}"
}
provisioner "local-exec" {
command = "echo '${data.template_file.inventory.rendered}' > ansible/hosts"
}
}

22
cloud-init.tf

@ -0,0 +1,22 @@
data "template_file" "user_data" {
count = var.node_count
template = file("${path.module}/files/user_data.cfg")
vars = {
pubkey = file("~/.ssh/id_rsa.pub")
}
}
data "template_file" "meta_data" {
count = var.node_count
template = file("${path.module}/files/meta_data.cfg")
vars = {
hostname = "swarm-${count.index + 1}"
}
}
resource "libvirt_cloudinit_disk" "cidata" {
count = var.node_count
name = "swarm-${count.index + 1}-cidata.iso"
user_data = data.template_file.user_data[count.index].rendered
meta_data = data.template_file.meta_data[count.index].rendered
}

29
domain.tf

@ -0,0 +1,29 @@
resource "libvirt_domain" "domain" {
count = var.node_count
name = "swarm-${count.index + 1}"
vcpu = var.node_vcpu
memory = var.node_memory
disk {
volume_id = libvirt_volume.root[count.index].id
}
cloudinit = libvirt_cloudinit_disk.cidata[count.index].id
console {
type = "pty"
target_port = "0"
target_type = "serial"
}
console {
type = "pty"
target_type = "virtio"
target_port = "1"
}
network_interface {
bridge = "br0"
}
}

5
files/ansible_hosts.tpl

@ -0,0 +1,5 @@
[managers]
${managers}
[workers]
${workers}

3
files/meta_data.cfg

@ -0,0 +1,3 @@
#cloud-config
local-hostname: ${hostname}

25
files/user_data.cfg

@ -0,0 +1,25 @@
#cloud-config
# vim: syntax=yaml
debug: True
disable_root: False
ssh_deletekeys: False
ssh_pwauth: True
timezone: Europe/Berlin
package_upgrade: True
package_reboot_if_required: True
packages:
- python3
users:
- name: deploy
primary_group: deploy
groups: sudo
shell: /bin/bash
uid: 1000
lock_passwd: False
passwd: "$1$sEv2FKoG$2SAYYCepb/A3cvs/dYl8d."
ssh_authorized_keys:
- "${pubkey}"

3
provider.tf

@ -0,0 +1,3 @@
provider "libvirt" {
uri = "qemu+ssh://j@luna/system"
}

5
template.tf

@ -0,0 +1,5 @@
resource "libvirt_volume" "template" {
name = "swarm-template.qcow2"
source = "${path.module}/files/ubuntu.img"
format = "qcow2"
}

14
variables.tf

@ -0,0 +1,14 @@
variable "node_count" {
type = number
default = 3
}
variable "node_vcpu" {
type = number
default = 8
}
variable "node_memory" {
type = number
default = 4096
}

6
volume.tf

@ -0,0 +1,6 @@
resource "libvirt_volume" "root" {
count = var.node_count
name = "swarm-${count.index + 1}-root.qcow2"
base_volume_id = libvirt_volume.template.id
size = "21474836480"
}
Loading…
Cancel
Save