Compare commits

31 Commits

Author SHA1 Message Date
Nathaniel Landau
5e35cf4400 fix: add ladders 2023-11-08 13:50:37 -05:00
Nathaniel Landau
7f94a62989 fix(sabnzbd): increase memory/cpu and add cron docker plugin 2023-10-22 16:52:40 -04:00
Nathaniel Landau
95f4c129ba build(deps): update dependencies 2023-10-21 22:24:01 -04:00
Nathaniel Landau
9a46bc9ebc feat(sabnzbd): add nomad job for sabnzbd 2023-10-21 22:19:23 -04:00
Nathaniel Landau
5b426da3ae feat: depreciate consul and use nomad service discovery 2023-10-21 22:18:23 -04:00
Nathaniel Landau
041649cc5e build(deps): bump dependencies 2023-09-15 16:34:28 -04:00
Nathaniel Landau
ce0cb6c5f1 fix(valentina): add new env vars 2023-09-03 15:50:24 -04:00
Nathaniel Landau
98d9a5a86f build(precommit): add typos checks 2023-08-30 08:34:07 -04:00
Nathaniel Landau
f7ba237d0d build(deps): update dependencies 2023-08-28 09:14:37 -04:00
Nathaniel Landau
e134616692 fix(recyclarr): migrate to v5 2023-08-28 08:49:03 -04:00
Nathaniel Landau
9194190591 build(deps): bump dependencies 2023-08-08 09:30:49 -04:00
Nathaniel Landau
2bb55f3d51 fix(valentina): update environment variables 2023-08-08 09:26:58 -04:00
Nathaniel Landau
7365e8b3d6 fix(ansible): update transfer method config 2023-08-08 09:25:55 -04:00
Nathaniel Landau
87c2a4e1b4 fix(consul): pull image from hashicorp/consul 2023-08-08 09:25:04 -04:00
Nathaniel Landau
edd9704258 feat: add valentain discord bot 2023-06-18 13:27:38 -04:00
Nathaniel Landau
cb4a0e9f8a build: add exclude_paths to ansible-lint 2023-05-24 14:30:01 -04:00
Nathaniel Landau
57c1a42f66 fix(nomad): bump to v1.5.6 2023-05-22 10:03:11 -04:00
Nathaniel Landau
8499f5029b fix(proxy): update traefik version and bump RAM allocation 2023-05-22 09:34:19 -04:00
Nathaniel Landau
47288456a5 build(deps): bump dependencies 2023-05-22 09:33:10 -04:00
Nathaniel Landau
0f35061a2c fix(recyclarr): move config to Nomad task template 2023-04-25 11:40:17 -04:00
Nathaniel Landau
2842e27282 build(deps): bump dependencies 2023-04-25 11:32:55 -04:00
Nathaniel Landau
d36212b7d7 style: pass ansible-lint 2023-04-25 11:32:29 -04:00
Nathaniel Landau
76f4af703e build: poetry venv in project folder 2023-03-31 10:14:14 -04:00
Nathaniel Landau
9bb7eeb439 fix: bump versions 2023-03-29 16:18:26 -04:00
Nathaniel Landau
5526024244 fix(nomad): run nomad as root user to enable docker plugin on raspberry pis
Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled. More info: https://github.com/hashicorp/nomad/pull/16063
2023-03-16 22:44:52 -04:00
Nathaniel Landau
ec52175c5c fix(nomad): authelia requires additional capabilities 2023-03-16 21:48:38 -04:00
Nathaniel Landau
be56f2a308 fix: revert to nomad v1.4.6 2023-03-13 21:29:09 -04:00
Nathaniel Landau
a757ff0cf2 build: add ignore file for anslible-lint 2023-03-13 10:25:20 -04:00
Nathaniel Landau
d6c155bef1 fix: bump versions 2023-03-12 16:52:16 -04:00
Nathaniel Landau
440d570c87 fix: bump authelia version 2023-02-12 14:41:05 -05:00
Nathaniel Landau
049267cec7 ci: add poe pb to run playbook 2023-02-12 14:40:47 -05:00
68 changed files with 3117 additions and 3026 deletions

32
.ansible-lint-ignore Normal file
View File

@@ -0,0 +1,32 @@
# This file contains ignores rule violations for ansible-lint
handlers/main.yml ignore-errors
handlers/main.yml name[casing]
main.yml name[casing]
main.yml name[missing]
tasks/backups.yml name[casing]
tasks/cluster_storage.yml name[casing]
tasks/consul.yml command-instead-of-module
tasks/consul.yml name[template]
tasks/consul.yml no-changed-when
tasks/debug.yml name[casing]
tasks/docker.yml name[casing]
tasks/docker.yml no-changed-when
tasks/interpolated_variables.yml name[casing]
tasks/logrotate.yml ignore-errors
tasks/logrotate.yml name[casing]
tasks/nomad.yml name[casing]
tasks/nomad.yml name[template]
tasks/orchestration_jobs.yml name[casing]
tasks/packages.yml ignore-errors
tasks/packages.yml name[casing]
tasks/pull_repositories.yml name[casing]
tasks/pull_repositories.yml no-changed-when
tasks/sanity.yml name[casing]
tasks/service_prometheus_nodeExporter.yml name[casing]
tasks/service_prometheus_nodeExporter.yml no-changed-when
tasks/tdarr.yml name[casing]
tasks/tdarr.yml no-changed-when
tasks/telegraf.yml name[casing]
tasks/telegraf.yml name[template]
tasks/telegraf.yml package-latest
vault.yml yaml[document-start]

View File

@@ -10,9 +10,10 @@ exclude_paths:
- galaxy-roles/
- .cz.yaml
- vault.yml
- .venv/
- ansible_collections/
skip_list:
- command-instead-of-shell
- name[template]
- ignore-errors
- meta-incorrect
@@ -21,10 +22,11 @@ skip_list:
- role-name
- unnamed-task
- var-naming
- name[casing]
- latest[git]
warn_list:
- experimental
- risky-file-permissions
- command-instead-of-module
- no-changed-when
- command-instead-of-shell

View File

@@ -1,12 +1,12 @@
---
repos:
- repo: "https://github.com/commitizen-tools/commitizen"
rev: v2.40.0
rev: 3.12.0
hooks:
- id: "commitizen"
- repo: "https://github.com/pre-commit/pre-commit-hooks"
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-ast
@@ -31,7 +31,7 @@ repos:
args: [--markdown-linebreak-ext=md]
- repo: "https://github.com/adrienverge/yamllint.git"
rev: v1.29.0
rev: v1.32.0
hooks:
- id: yamllint
files: \.(yaml|yml)$
@@ -43,6 +43,11 @@ repos:
)\.(yaml|yml)$
entry: yamllint --strict --config-file .yamllint.yml
- repo: "https://github.com/crate-ci/typos"
rev: v1.16.23
hooks:
- id: typos
- repo: local
hooks:
- id: vault-pre-commit
@@ -50,10 +55,14 @@ repos:
entry: scripts/ansible-vault-precommit.sh
language: system
# This calls a custom script. Remove if you don't need it.
- id: stopwords
name: check stopwords
entry: scripts/stopwords.sh
name: stopwords
entry: git-stopwords
# args: ["-v"]
language: system
pass_filenames: true
types: [text]
- id: ansible-lint
name: running ansible-lint
@@ -68,12 +77,6 @@ repos:
files: \.sh\.j2$
entry: shellcheck -x --exclude=1009,1054,1056,1072,1073,1083,2001,2148
- id: "run-shellscripts-bats-tests"
name: run bats unit tests
language: system
files: \.bats$
entry: bats -t
- id: "ansible-encryption-check"
name: Ansible Encryption Check
language: system

8
.typos.toml Normal file
View File

@@ -0,0 +1,8 @@
[default]
default.locale = "en_us"
[default.extend-words]
Hashi = "Hashi" # Hashicorpt
[files]
extend-exclude = ["galaxy-roles/"]

View File

@@ -9,4 +9,4 @@ display_skipped_hosts = False
vault_password_file = ./.password_file
[ssh_connection]
scp_if_ssh = True
transfer_method = smart

View File

@@ -1,14 +1,17 @@
---
# ---------------------------------- SOFTWARE VERSIONS
authelia_version: 4.37.3
consul_version: 1.14.2
influxdb_version: 1.8.10
nomad_version: 1.4.3
prometheus_verssion: 1.1.2
authelia_version: 4.37.5
consul_version: 1.16.1
influxdb_version: 1.11.1
nomad_version: 1.6.2
prometheus_verssion: 2.46.0
recyclarr_version: 5.3.1
speedtest_cli_version: 1.2.0
tdarr_installer_version: 2.00.13
telegraf_version: 1.25.0
traefik_version: "v2.9.6"
telegraf_version: 1.27.2
traefik_version: "v2.10.4"
valentina_version: 2.0.0
backup_mongodb_version: "v1.0.0"
# ---------------------------------- SERVICE STATIC PORT MAPPINGS
authelia_port: "9091"
@@ -99,6 +102,7 @@ apt_packages_list:
- logrotate
- lsof
- nano
- netcat
- net-tools
- nmap
- openssh-server

View File

@@ -3,80 +3,96 @@
- name: Mount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cv
cmd: automount -cv
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount"
- name: Mount and unmount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cvu
cmd: automount -cvu
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount_unmount"
##################################### TELEGRAF
- name: (Re)Start telegraf (Debian)
become: true
ansible.builtin.service:
name: telegraf
state: restarted
name: telegraf
state: restarted
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
listen: restart_telegraf
- name: (Re)Start telegraf
ansible.builtin.shell:
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
ignore_errors: true
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Darwin'
- ansible_os_family == 'Darwin'
listen: restart_telegraf
##################################### NOMAD
- name: restart nomad (Debian)
- name: Restart nomad (Debian)
become: true
ansible.builtin.systemd:
name: nomad
enabled: true
state: restarted
name: nomad
enabled: true
state: restarted
register: nomad_service
failed_when: nomad_service.rc > 0
changed_when: nomad_service.rc == 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "unload nomad agent (MacOSX)"
- name: "Unload nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
failed_when: false
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "load the nomad agent (MacOSX)"
- name: "Load the nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl load -w {{ nomad_plist_macos }}"
cmd: "launchctl load -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "ensure nomad is really running"
- name: "Ensure nomad is really running"
ansible.builtin.shell:
cmd: "sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "set -o pipefail && sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
failed_when: node_status_response.rc > 0
changed_when: false
changed_when: node_status_response.rc == 0
when: "'nostart' not in ansible_run_tags"
listen: "restart nomad"
# - name: "Ensure sure Nomad service is really running"

View File

@@ -42,7 +42,7 @@ all:
pis:
hosts:
rpi1:
ansible_host: 10.0.30.91
ansible_host: "{{ rpi1_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi1"
@@ -58,7 +58,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi2:
ansible_host: 10.0.30.92
ansible_host: "{{ rpi2_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi2"
@@ -72,7 +72,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi3:
ansible_host: 10.0.30.93
ansible_host: "{{ rpi3_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi3"
@@ -86,7 +86,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi4:
ansible_host: 10.0.30.94
ansible_host: "{{ rpi4_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi4"
@@ -102,7 +102,7 @@ all:
macs:
hosts:
macmini:
ansible_host: 10.0.0.4
ansible_host: "{{ macmini_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/macMini"
@@ -117,7 +117,7 @@ all:
manage_homebrew_package_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
imac:
ansible_host: 10.0.0.25
ansible_host: "{{ imac_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/imac"
@@ -129,7 +129,7 @@ all:
is_shared_storage_client: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
skimmbook:
ansible_host: 10.0.0.21
ansible_host: "{{ skimmbook_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/skimmbook"
@@ -140,22 +140,11 @@ all:
is_tdarr_node: true
is_shared_storage_client: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
vpnmac:
ansible_host: 10.0.90.2
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/skimmbook"
ansible_python_interpreter: "/Users/natelandau/.pyenv/shims/python"
ansible_port: 22
mac_arm: true
manage_homebrew_package_list: true
is_tdarr_node: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
nas:
hosts:
synology:
ansible_host: 10.0.0.6
synology_second_ip: 10.0.30.6
ansible_host: "{{ synology_ip_address_1 }}"
synology_second_ip: "{{ synology_ip_address_2 }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ synology_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/synology"

138
main.yml
View File

@@ -1,79 +1,79 @@
---
- hosts: all
name: "Running playbook"
- name: "Running playbook"
hosts: all
serial: 1
vars_files:
- default_variables.yml
- vault.yml
- default_variables.yml
- vault.yml
pre_tasks:
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: Populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: Populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
tasks:
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
handlers:
- ansible.builtin.import_tasks: handlers/main.yml
- ansible.builtin.import_tasks: handlers/main.yml

1159
poetry.lock generated

File diff suppressed because it is too large Load Diff

2
poetry.toml Normal file
View File

@@ -0,0 +1,2 @@
[virtualenvs]
in-project = true

View File

@@ -7,18 +7,26 @@
version = "0.2.0"
[tool.poetry.dependencies]
ansible = "^7.2.0"
ansible-lint = { version = "^6.12.1", markers = "platform_system != 'Windows'" }
ansible = "^8.6.0"
ansible-lint = { version = "^6.18.0", markers = "platform_system != 'Windows'" }
commitizen = "^2.40.0"
poethepoet = "^0.18.1"
pre-commit = "^3.0.4"
pre-commit = "^3.3.3"
python = "^3.9"
yamllint = "^1.29.0"
yamllint = "^1.32.0"
[tool.poetry.group.dev.dependencies]
black = "^23.11.0"
sh = "^2.0.6"
typos = "^1.16.23"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 100
[tool.commitizen]
bump_message = "bump(release): v$current_version → v$new_version"
tag_format = "v$version"
@@ -27,11 +35,18 @@
version_files = ["pyproject.toml:version"]
[tool.poe.tasks]
pb = """
ansible-playbook
--vault-password-file .password_file
main.yml
-i inventory.yml
"""
[tool.poe.tasks.lint]
help = "Run linters"
[[tool.poe.tasks.lint.sequence]]
shell = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
cmd = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
[[tool.poe.tasks.lint.sequence]]
shell = "ansible-lint --force-color --config-file .ansible-lint.yml"
cmd = "ansible-lint --force-color --config-file .ansible-lint.yml"

View File

@@ -1,821 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2317
_mainScript_() {
_customStopWords_() {
# DESC: Check if any specified stop words are in the commit diff. If found, the pre-commit hook will exit with a non-zero exit code.
# ARGS:
# $1 (Required): Path to file
# OUTS:
# 0: Success
# 1: Failure
# USAGE:
# _customStopWords_ "/path/to/file.sh"
# NOTE:
# Requires a plaintext stopword file located at
# `~/.git_stop_words` containing one stopword per line.
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local _gitDiffTmp
local FILE_TO_CHECK="${1}"
_gitDiffTmp="${TMP_DIR}/${RANDOM}.${RANDOM}.${RANDOM}.diff.txt"
if [ -f "${STOP_WORD_FILE}" ]; then
if [[ $(basename "${STOP_WORD_FILE}") == "$(basename "${FILE_TO_CHECK}")" ]]; then
debug "$(basename "${1}"): Don't check stop words file for stop words."
return 0
fi
debug "$(basename "${FILE_TO_CHECK}"): Checking for stop words..."
# remove blank lines from stopwords file
sed '/^$/d' "${STOP_WORD_FILE}" >"${TMP_DIR}/pattern_file.txt"
# Check for stopwords
if git diff --cached -- "${FILE_TO_CHECK}" | grep i -q "new file mode"; then
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${FILE_TO_CHECK}"; then
return 1
else
return 0
fi
else
# Add diff to a temporary file
git diff --cached -- "${FILE_TO_CHECK}" | grep '^+' >"${_gitDiffTmp}"
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${_gitDiffTmp}"; then
return 1
else
return 0
fi
fi
else
notice "Could not find git stopwords file expected at '${STOP_WORD_FILE}'. Continuing..."
return 0
fi
}
# Don;t lint binary files
if [[ ${ARGS[0]} =~ \.(jpg|jpeg|gif|png|exe|zip|gzip|tiff|tar|dmg|ttf|otf|m4a|mp3|mkv|mov|avi|eot|svg|woff2?|aac|wav|flac|pdf|doc|xls|ppt|7z|bin|dmg|dat|sql|ico|mpe?g)$ ]]; then
_safeExit_ 0
fi
if ! _customStopWords_ "${ARGS[0]}"; then
error "Stop words found in ${ARGS[0]}"
_safeExit_ 1
fi
}
# end _mainScript_
# ################################## Flags and defaults
# Required variables
LOGFILE="${HOME}/logs/$(basename "$0").log"
QUIET=false
LOGLEVEL=ERROR
VERBOSE=false
FORCE=false
DRYRUN=false
declare -a ARGS=()
# Script specific
LOGLEVEL=NONE
STOP_WORD_FILE="${HOME}/.git_stop_words"
shopt -s nocasematch
# ################################## Custom utility functions (Pasted from repository)
# ################################## Functions required for this template to work
_setColors_() {
# DESC:
# Sets colors use for alerts.
# ARGS:
# None
# OUTS:
# None
# USAGE:
# printf "%s\n" "${blue}Some text${reset}"
if tput setaf 1 >/dev/null 2>&1; then
bold=$(tput bold)
underline=$(tput smul)
reverse=$(tput rev)
reset=$(tput sgr0)
if [[ $(tput colors) -ge 256 ]] >/dev/null 2>&1; then
white=$(tput setaf 231)
blue=$(tput setaf 38)
yellow=$(tput setaf 11)
green=$(tput setaf 82)
red=$(tput setaf 9)
purple=$(tput setaf 171)
gray=$(tput setaf 250)
else
white=$(tput setaf 7)
blue=$(tput setaf 38)
yellow=$(tput setaf 3)
green=$(tput setaf 2)
red=$(tput setaf 9)
purple=$(tput setaf 13)
gray=$(tput setaf 7)
fi
else
bold="\033[4;37m"
reset="\033[0m"
underline="\033[4;37m"
# shellcheck disable=SC2034
reverse=""
white="\033[0;37m"
blue="\033[0;34m"
yellow="\033[0;33m"
green="\033[1;32m"
red="\033[0;31m"
purple="\033[0;35m"
gray="\033[0;37m"
fi
}
_alert_() {
# DESC:
# Controls all printing of messages to log files and stdout.
# ARGS:
# $1 (required) - The type of alert to print
# (success, header, notice, dryrun, debug, warning, error,
# fatal, info, input)
# $2 (required) - The message to be printed to stdout and/or a log file
# $3 (optional) - Pass '${LINENO}' to print the line number where the _alert_ was triggered
# OUTS:
# stdout: The message is printed to stdout
# log file: The message is printed to a log file
# USAGE:
# [_alertType] "[MESSAGE]" "${LINENO}"
# NOTES:
# - The colors of each alert type are set in this function
# - For specified alert types, the funcstac will be printed
local _color
local _alertType="${1}"
local _message="${2}"
local _line="${3-}" # Optional line number
[[ $# -lt 2 ]] && fatal 'Missing required argument to _alert_'
if [[ -n ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line}) $(_printFuncStack_)"
elif [[ -n ${_line} && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line})"
elif [[ -z ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}$(_printFuncStack_)"
fi
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_color="${bold}${red}"
elif [ "${_alertType}" == "info" ]; then
_color="${gray}"
elif [ "${_alertType}" == "warning" ]; then
_color="${red}"
elif [ "${_alertType}" == "success" ]; then
_color="${green}"
elif [ "${_alertType}" == "debug" ]; then
_color="${purple}"
elif [ "${_alertType}" == "header" ]; then
_color="${bold}${white}${underline}"
elif [ "${_alertType}" == "notice" ]; then
_color="${bold}"
elif [ "${_alertType}" == "input" ]; then
_color="${bold}${underline}"
elif [ "${_alertType}" = "dryrun" ]; then
_color="${blue}"
else
_color=""
fi
_writeToScreen_() {
("${QUIET}") && return 0 # Print to console when script is not 'quiet'
[[ ${VERBOSE} == false && ${_alertType} =~ ^(debug|verbose) ]] && return 0
if ! [[ -t 1 || -z ${TERM-} ]]; then # Don't use colors on non-recognized terminals
_color=""
reset=""
fi
if [[ ${_alertType} == header ]]; then
printf "${_color}%s${reset}\n" "${_message}"
else
printf "${_color}[%7s] %s${reset}\n" "${_alertType}" "${_message}"
fi
}
_writeToScreen_
_writeToLog_() {
[[ ${_alertType} == "input" ]] && return 0
[[ ${LOGLEVEL} =~ (off|OFF|Off) ]] && return 0
if [ -z "${LOGFILE-}" ]; then
LOGFILE="$(pwd)/$(basename "$0").log"
fi
[ ! -d "$(dirname "${LOGFILE}")" ] && mkdir -p "$(dirname "${LOGFILE}")"
[[ ! -f ${LOGFILE} ]] && touch "${LOGFILE}"
# Don't use colors in logs
local _cleanmessage
_cleanmessage="$(printf "%s" "${_message}" | sed -E 's/(\x1b)?\[(([0-9]{1,2})(;[0-9]{1,3}){0,2})?[mGK]//g')"
# Print message to log file
printf "%s [%7s] %s %s\n" "$(date +"%b %d %R:%S")" "${_alertType}" "[$(/bin/hostname)]" "${_cleanmessage}" >>"${LOGFILE}"
}
# Write specified log level data to logfile
case "${LOGLEVEL:-ERROR}" in
ALL | all | All)
_writeToLog_
;;
DEBUG | debug | Debug)
_writeToLog_
;;
INFO | info | Info)
if [[ ${_alertType} =~ ^(error|fatal|warning|info|notice|success) ]]; then
_writeToLog_
fi
;;
NOTICE | notice | Notice)
if [[ ${_alertType} =~ ^(error|fatal|warning|notice|success) ]]; then
_writeToLog_
fi
;;
WARN | warn | Warn)
if [[ ${_alertType} =~ ^(error|fatal|warning) ]]; then
_writeToLog_
fi
;;
ERROR | error | Error)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
FATAL | fatal | Fatal)
if [[ ${_alertType} =~ ^fatal ]]; then
_writeToLog_
fi
;;
OFF | off)
return 0
;;
*)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
esac
} # /_alert_
error() { _alert_ error "${1}" "${2-}"; }
warning() { _alert_ warning "${1}" "${2-}"; }
notice() { _alert_ notice "${1}" "${2-}"; }
info() { _alert_ info "${1}" "${2-}"; }
success() { _alert_ success "${1}" "${2-}"; }
dryrun() { _alert_ dryrun "${1}" "${2-}"; }
input() { _alert_ input "${1}" "${2-}"; }
header() { _alert_ header "${1}" "${2-}"; }
debug() { _alert_ debug "${1}" "${2-}"; }
fatal() {
_alert_ fatal "${1}" "${2-}"
_safeExit_ "1"
}
_printFuncStack_() {
# DESC:
# Prints the function stack in use. Used for debugging, and error reporting.
# ARGS:
# None
# OUTS:
# stdout: Prints [function]:[file]:[line]
# NOTE:
# Does not print functions from the alert class
local _i
declare -a _funcStackResponse=()
for ((_i = 1; _i < ${#BASH_SOURCE[@]}; _i++)); do
case "${FUNCNAME[${_i}]}" in
_alert_ | _trapCleanup_ | fatal | error | warning | notice | info | debug | dryrun | header | success)
continue
;;
*)
_funcStackResponse+=("${FUNCNAME[${_i}]}:$(basename "${BASH_SOURCE[${_i}]}"):${BASH_LINENO[_i - 1]}")
;;
esac
done
printf "( "
printf %s "${_funcStackResponse[0]}"
printf ' < %s' "${_funcStackResponse[@]:1}"
printf ' )\n'
}
_safeExit_() {
# DESC:
# Cleanup and exit from a script
# ARGS:
# $1 (optional) - Exit code (defaults to 0)
# OUTS:
# None
if [[ -d ${SCRIPT_LOCK-} ]]; then
if command rm -rf "${SCRIPT_LOCK}"; then
debug "Removing script lock"
else
warning "Script lock could not be removed. Try manually deleting ${yellow}'${SCRIPT_LOCK}'"
fi
fi
if [[ -n ${TMP_DIR-} && -d ${TMP_DIR-} ]]; then
if [[ ${1-} == 1 && -n "$(ls "${TMP_DIR}")" ]]; then
command rm -r "${TMP_DIR}"
else
command rm -r "${TMP_DIR}"
debug "Removing temp directory"
fi
fi
trap - INT TERM EXIT
exit "${1:-0}"
}
_trapCleanup_() {
# DESC:
# Log errors and cleanup from script when an error is trapped. Called by 'trap'
# ARGS:
# $1: Line number where error was trapped
# $2: Line number in function
# $3: Command executing at the time of the trap
# $4: Names of all shell functions currently in the execution call stack
# $5: Scriptname
# $6: $BASH_SOURCE
# USAGE:
# trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM ERR
# OUTS:
# Exits script with error code 1
local _line=${1-} # LINENO
local _linecallfunc=${2-}
local _command="${3-}"
local _funcstack="${4-}"
local _script="${5-}"
local _sourced="${6-}"
# Replace the cursor in-case 'tput civis' has been used
tput cnorm
if declare -f "fatal" &>/dev/null && declare -f "_printFuncStack_" &>/dev/null; then
_funcstack="'$(printf "%s" "${_funcstack}" | sed -E 's/ / < /g')'"
if [[ ${_script##*/} == "${_sourced##*/}" ]]; then
fatal "${7-} command: '${_command}' (line: ${_line}) [func: $(_printFuncStack_)]"
else
fatal "${7-} command: '${_command}' (func: ${_funcstack} called at line ${_linecallfunc} of '${_script##*/}') (line: ${_line} of '${_sourced##*/}') "
fi
else
printf "%s\n" "Fatal error trapped. Exiting..."
fi
if declare -f _safeExit_ &>/dev/null; then
_safeExit_ 1
else
exit 1
fi
}
_makeTempDir_() {
# DESC:
# Creates a temp directory to house temporary files
# ARGS:
# $1 (Optional) - First characters/word of directory name
# OUTS:
# Sets $TMP_DIR variable to the path of the temp directory
# USAGE:
# _makeTempDir_ "$(basename "$0")"
[ -d "${TMP_DIR-}" ] && return 0
if [ -n "${1-}" ]; then
TMP_DIR="${TMPDIR:-/tmp/}${1}.${RANDOM}.${RANDOM}.$$"
else
TMP_DIR="${TMPDIR:-/tmp/}$(basename "$0").${RANDOM}.${RANDOM}.${RANDOM}.$$"
fi
(umask 077 && mkdir "${TMP_DIR}") || {
fatal "Could not create temporary directory! Exiting."
}
debug "\$TMP_DIR=${TMP_DIR}"
}
# shellcheck disable=SC2120
_acquireScriptLock_() {
# DESC:
# Acquire script lock to prevent running the same script a second time before the
# first instance exits
# ARGS:
# $1 (optional) - Scope of script execution lock (system or user)
# OUTS:
# exports $SCRIPT_LOCK - Path to the directory indicating we have the script lock
# Exits script if lock cannot be acquired
# NOTE:
# If the lock was acquired it's automatically released in _safeExit_()
local _lockDir
if [[ ${1-} == 'system' ]]; then
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").lock"
else
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").${UID}.lock"
fi
if command mkdir "${_lockDir}" 2>/dev/null; then
readonly SCRIPT_LOCK="${_lockDir}"
debug "Acquired script lock: ${yellow}${SCRIPT_LOCK}${purple}"
else
if declare -f "_safeExit_" &>/dev/null; then
error "Unable to acquire script lock: ${yellow}${_lockDir}${red}"
fatal "If you trust the script isn't running, delete the lock dir"
else
printf "%s\n" "ERROR: Could not acquire script lock. If you trust the script isn't running, delete: ${_lockDir}"
exit 1
fi
fi
}
_setPATH_() {
# DESC:
# Add directories to $PATH so script can find executables
# ARGS:
# $@ - One or more paths
# OPTS:
# -x - Fail if directories are not found
# OUTS:
# 0: Success
# 1: Failure
# Adds items to $PATH
# USAGE:
# _setPATH_ "/usr/local/bin" "${HOME}/bin" "$(npm bin)"
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _failIfNotFound=false
while getopts ":xX" opt; do
case ${opt} in
x | X) _failIfNotFound=true ;;
*)
{
error "Unrecognized option '${1}' passed to _backupFile_" "${LINENO}"
return 1
}
;;
esac
done
shift $((OPTIND - 1))
local _newPath
for _newPath in "$@"; do
if [ -d "${_newPath}" ]; then
if ! printf "%s" "${PATH}" | grep -Eq "(^|:)${_newPath}($|:)"; then
if PATH="${_newPath}:${PATH}"; then
debug "Added '${_newPath}' to PATH"
else
debug "'${_newPath}' already in PATH"
fi
else
debug "_setPATH_: '${_newPath}' already exists in PATH"
fi
else
debug "_setPATH_: can not find: ${_newPath}"
if [[ ${_failIfNotFound} == true ]]; then
return 1
fi
continue
fi
done
return 0
}
_useGNUutils_() {
# DESC:
# Add GNU utilities to PATH to allow consistent use of sed/grep/tar/etc. on MacOS
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds GNU utilities to the path
# USAGE:
# # if ! _useGNUUtils_; then exit 1; fi
# NOTES:
# GNU utilities can be added to MacOS using Homebrew
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _setPATH_ \
"/usr/local/opt/gnu-tar/libexec/gnubin" \
"/usr/local/opt/coreutils/libexec/gnubin" \
"/usr/local/opt/gnu-sed/libexec/gnubin" \
"/usr/local/opt/grep/libexec/gnubin" \
"/usr/local/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-sed/libexec/gnubin" \
"/opt/homebrew/opt/grep/libexec/gnubin" \
"/opt/homebrew/opt/coreutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-tar/libexec/gnubin"; then
return 0
else
return 1
fi
}
_homebrewPath_() {
# DESC:
# Add homebrew bin dir to PATH
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds homebrew bin directory to PATH
# USAGE:
# # if ! _homebrewPath_; then exit 1; fi
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _uname=$(command -v uname); then
if "${_uname}" | tr '[:upper:]' '[:lower:]' | grep -q 'darwin'; then
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
else
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
}
_parseOptions_() {
# DESC:
# Iterates through options passed to script and sets variables. Will break -ab into -a -b
# when needed and --foo=bar into --foo bar
# ARGS:
# $@ from command line
# OUTS:
# Sets array 'ARGS' containing all arguments passed to script that were not parsed as options
# USAGE:
# _parseOptions_ "$@"
# Iterate over options
local _optstring=h
declare -a _options
local _c
local i
while (($#)); do
case $1 in
# If option is of type -ab
-[!-]?*)
# Loop over each character starting with the second
for ((i = 1; i < ${#1}; i++)); do
_c=${1:i:1}
_options+=("-${_c}") # Add current char to options
# If option takes a required argument, and it's not the last char make
# the rest of the string its argument
if [[ ${_optstring} == *"${_c}:"* && -n ${1:i+1} ]]; then
_options+=("${1:i+1}")
break
fi
done
;;
# If option is of type --foo=bar
--?*=*) _options+=("${1%%=*}" "${1#*=}") ;;
# add --endopts for --
--) _options+=(--endopts) ;;
# Otherwise, nothing special
*) _options+=("$1") ;;
esac
shift
done
set -- "${_options[@]-}"
unset _options
# Read the options and set stuff
# shellcheck disable=SC2034
while [[ ${1-} == -?* ]]; do
case $1 in
# Custom options
# Common options
-h | --help)
_usage_
_safeExit_
;;
--loglevel)
shift
LOGLEVEL=${1}
;;
--logfile)
shift
LOGFILE="${1}"
;;
-n | --dryrun) DRYRUN=true ;;
-v | --verbose) VERBOSE=true ;;
-q | --quiet) QUIET=true ;;
--force) FORCE=true ;;
--endopts)
shift
break
;;
*)
if declare -f _safeExit_ &>/dev/null; then
fatal "invalid option: $1"
else
printf "%s\n" "ERROR: Invalid option: $1"
exit 1
fi
;;
esac
shift
done
if [[ -z ${*} || ${*} == null ]]; then
ARGS=()
else
ARGS+=("$@") # Store the remaining user input as arguments.
fi
}
_columns_() {
# DESC:
# Prints a two column output from a key/value pair.
# Optionally pass a number of 2 space tabs to indent the output.
# ARGS:
# $1 (required): Key name (Left column text)
# $2 (required): Long value (Right column text. Wraps around if too long)
# $3 (optional): Number of 2 character tabs to indent the command (default 1)
# OPTS:
# -b Bold the left column
# -u Underline the left column
# -r Reverse background and foreground colors
# OUTS:
# stdout: Prints the output in columns
# NOTE:
# Long text or ANSI colors in the first column may create display issues
# USAGE:
# _columns_ "Key" "Long value text" [tab level]
[[ $# -lt 2 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _style=""
while getopts ":bBuUrR" opt; do
case ${opt} in
b | B) _style="${_style}${bold}" ;;
u | U) _style="${_style}${underline}" ;;
r | R) _style="${_style}${reverse}" ;;
*) fatal "Unrecognized option '${1}' passed to ${FUNCNAME[0]}. Exiting." ;;
esac
done
shift $((OPTIND - 1))
local _key="${1}"
local _value="${2}"
local _tabLevel="${3-}"
local _tabSize=2
local _line
local _rightIndent
local _leftIndent
if [[ -z ${3-} ]]; then
_tabLevel=0
fi
_leftIndent="$((_tabLevel * _tabSize))"
local _leftColumnWidth="$((30 + _leftIndent))"
if [ "$(tput cols)" -gt 180 ]; then
_rightIndent=110
elif [ "$(tput cols)" -gt 160 ]; then
_rightIndent=90
elif [ "$(tput cols)" -gt 130 ]; then
_rightIndent=60
elif [ "$(tput cols)" -gt 120 ]; then
_rightIndent=50
elif [ "$(tput cols)" -gt 110 ]; then
_rightIndent=40
elif [ "$(tput cols)" -gt 100 ]; then
_rightIndent=30
elif [ "$(tput cols)" -gt 90 ]; then
_rightIndent=20
elif [ "$(tput cols)" -gt 80 ]; then
_rightIndent=10
else
_rightIndent=0
fi
local _rightWrapLength=$(($(tput cols) - _leftColumnWidth - _leftIndent - _rightIndent))
local _first_line=0
while read -r _line; do
if [[ ${_first_line} -eq 0 ]]; then
_first_line=1
else
_key=" "
fi
printf "%-${_leftIndent}s${_style}%-${_leftColumnWidth}b${reset} %b\n" "" "${_key}${reset}" "${_line}"
done <<<"$(fold -w${_rightWrapLength} -s <<<"${_value}")"
}
_usage_() {
cat <<USAGE_TEXT
${bold}$(basename "$0") [OPTION]... [FILE]...${reset}
Custom pre-commit hook script. This script is intended to be used as part of the pre-commit pipeline managed within .pre-commit-config.yaml.
${bold}${underline}Options:${reset}
$(_columns_ -b -- '-h, --help' "Display this help and exit" 2)
$(_columns_ -b -- "--loglevel [LEVEL]" "One of: FATAL, ERROR (default), WARN, INFO, NOTICE, DEBUG, ALL, OFF" 2)
$(_columns_ -b -- "--logfile [FILE]" "Full PATH to logfile. (Default is '\${HOME}/logs/$(basename "$0").log')" 2)
$(_columns_ -b -- "-n, --dryrun" "Non-destructive. Makes no permanent changes." 2)
$(_columns_ -b -- "-q, --quiet" "Quiet (no output)" 2)
$(_columns_ -b -- "-v, --verbose" "Output more information. (Items echoed to 'verbose')" 2)
$(_columns_ -b -- "--force" "Skip all user interaction. Implied 'Yes' to all actions." 2)
${bold}${underline}Example Usage:${reset}
${gray}# Run the script and specify log level and log file.${reset}
$(basename "$0") -vn --logfile "/path/to/file.log" --loglevel 'WARN'
USAGE_TEXT
}
# ################################## INITIALIZE AND RUN THE SCRIPT
# (Comment or uncomment the lines below to customize script behavior)
trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM
# Trap errors in subshells and functions
set -o errtrace
# Exit on error. Append '||true' if you expect an error
set -o errexit
# Use last non-zero exit code in a pipeline
set -o pipefail
# Confirm we have BASH greater than v4
[ "${BASH_VERSINFO:-0}" -ge 4 ] || {
printf "%s\n" "ERROR: BASH_VERSINFO is '${BASH_VERSINFO:-0}'. This script requires BASH v4 or greater."
exit 1
}
# Make `for f in *.txt` work when `*.txt` matches zero files
shopt -s nullglob globstar
# Set IFS to preferred implementation
IFS=$' \n\t'
# Run in debug mode
# set -o xtrace
# Initialize color constants
_setColors_
# Disallow expansion of unset variables
set -o nounset
# Force arguments when invoking the script
# [[ $# -eq 0 ]] && _parseOptions_ "-h"
# Parse arguments passed to script
_parseOptions_ "$@"
# Create a temp directory '$TMP_DIR'
_makeTempDir_ "$(basename "$0")"
# Acquire script lock
# _acquireScriptLock_
# Add Homebrew bin directory to PATH (MacOS)
# _homebrewPath_
# Source GNU utilities from Homebrew (MacOS)
# _useGNUutils_
# Run the main logic script
_mainScript_
# Exit cleanly
_safeExit_

150
scripts/update_dependencies.py Executable file
View File

@@ -0,0 +1,150 @@
#!/usr/bin/env python
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
from pathlib import Path
from textwrap import wrap
try:
import tomllib
except ModuleNotFoundError: # pragma: no cover
import tomli as tomllib # type: ignore [no-redef]
import sh
from rich.console import Console
console = Console()
def dryrun(msg: str) -> None:
"""Print a message if the dry run flag is set.
Args:
msg: Message to print
"""
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
def success(msg: str) -> None:
"""Print a success message without using logging.
Args:
msg: Message to print
"""
console.print(f"[green]SUCCESS | {msg}[/green]")
def warning(msg: str) -> None:
"""Print a warning message without using logging.
Args:
msg: Message to print
"""
console.print(f"[yellow]WARNING | {msg}[/yellow]")
def error(msg: str) -> None:
"""Print an error message without using logging.
Args:
msg: Message to print
"""
console.print(f"[red]ERROR | {msg}[/red]")
def notice(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"[bold]NOTICE | {msg}[/bold]")
def info(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"INFO | {msg}")
def usage(msg: str, width: int = 80) -> None:
"""Print a usage message without using logging.
Args:
msg: Message to print
width (optional): Width of the message
"""
for _n, line in enumerate(wrap(msg, width=width)):
if _n == 0:
console.print(f"[dim]USAGE | {line}")
else:
console.print(f"[dim] | {line}")
def debug(msg: str) -> None:
"""Print a debug message without using logging.
Args:
msg: Message to print
"""
console.print(f"[blue]DEBUG | {msg}[/blue]")
def dim(msg: str) -> None:
"""Print a message in dimmed color.
Args:
msg: Message to print
"""
console.print(f"[dim]{msg}[/dim]")
# Load the pyproject.toml file
pyproject = Path(__file__).parents[1] / "pyproject.toml"
if not pyproject.exists():
console.print("pyproject.toml file not found")
raise SystemExit(1)
with pyproject.open("rb") as f:
try:
data = tomllib.load(f)
except tomllib.TOMLDecodeError as e:
raise SystemExit(1) from e
# Get the latest versions of all dependencies
info("Getting latest versions of dependencies...")
packages: dict = {}
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
package, current, latest = line.split()[:3]
packages[package] = {"current_version": current, "new_version": latest}
if not packages:
success("All dependencies are up to date")
raise SystemExit(0)
dependencies = data["tool"]["poetry"]["dependencies"]
groups = data["tool"]["poetry"]["group"]
for p in dependencies:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", _fg=True)
for group in groups:
for p in groups[group]["dependencies"]:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
sh.poetry("update", _fg=True)
success("All dependencies are up to date")
raise SystemExit(0)

View File

@@ -6,42 +6,42 @@
# 1. Copies a backup and restore shellscript to /usr/local/bin
# 2. Edits the sudoers file to allow the script to be invoked with sudo privileges
- name: copy backup shellscript to server
- name: Copy backup shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: copy restore shellscript to server
- name: Copy restore shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: ensure nomad user can run sudo with the restore script
- name: Ensure nomad user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- name: ensure my user can run sudo with the restore script
- name: Ensure my user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"

View File

@@ -6,159 +6,159 @@
- name: "Mount storage on Raspberry Pis"
when: "'pis' in group_names"
block:
- name: ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: Ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: Remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
- name: Mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
# --------------------------------- Mount on Macs
# https://gist.github.com/l422y/8697518
- name: "Mount storage on Macs"
when: "'macs' in group_names"
block:
- name: create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
- name: Create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
- name: add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: Add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: Add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: Remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: Add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: Add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: Add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: Add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount

View File

@@ -4,356 +4,362 @@
- name: Set variables needed to install consul
block:
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: "Stop Consul"
block:
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
register: consul_unload
failed_when: consul_unload.rc != 0
changed_when: consul_unload.rc == 0
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Create 'consul' user and group"
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
block:
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Create Consul /opt storage and copy certificates"
block:
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Template out Consul configuration file"
block:
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Install Consul binary"
block:
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: "Validate consul config"
ansible.builtin.command:
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
register: consul_config_valid
changed_when: false
failed_when: consul_config_valid.rc != 0
when:
- inventory_hostname != 'synology'
- inventory_hostname != 'synology'
- name: "Copy system.d or launchctl service files"
block:
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: "Start Consul"
block:
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
register: consul_loaded
changed_when: consul_loaded.rc == 0
failed_when: consul_loaded.rc > 0
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: "Copy Consul service checks to synology"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
block:
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200

View File

@@ -33,5 +33,5 @@
# when:
# - ansible_facts['system_vendor'] is search("Synology")
- name: "end play"
- name: "End play"
ansible.builtin.meta: end_play

View File

@@ -4,85 +4,91 @@
- name: Check if Docker is already present
ansible.builtin.command:
cmd: docker --version
cmd: docker --version
register: docker_command_result
changed_when: docker_command_result.rc == 1
failed_when: false
- name: install docker on Debian
- name: Install docker on Debian
when: ansible_os_family == 'Debian'
block:
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
register: docker_install
failed_when: docker_install.rc > 0
changed_when: docker_install.rc == 0
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: install docker on macOS
- name: Install docker on macOS
when: "'macs' in group_names"
block:
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: Install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
when: docker_command_result.rc == 1
- name: Open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
register: docker_open_app
failed_when: docker_open_app.rc > 0
changed_when: docker_open_app.rc == 0
when: docker_command_result.rc == 1
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
- name: end play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1
- name: End play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1

View File

@@ -8,46 +8,46 @@
- name: "Set local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "Set NFS mount location (pis)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set NFS mount location location (macs)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "set consul configuration directory (synology)"
- name: "Set consul configuration directory (synology)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
- name: "set consul configuration directory (pis)"
- name: "Set consul configuration directory (pis)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "set consul configuration directory (macs)"
- name: "Set consul configuration directory (macs)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
when:
- "'macs' in group_names"
- "'macs' in group_names"

View File

@@ -4,29 +4,29 @@
#
# NOTE: This task exists due to the arillso.logrotate failing completely on macOS
- name: add service_backups.log to logrotate
- name: Add service_backups.log to logrotate
become: true
vars:
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
ansible.builtin.import_role:
name: arillso.logrotate
name: arillso.logrotate
failed_when: false
ignore_errors: true
when:
- "'macs' not in group_names"
- is_cluster_leader
- "'macs' not in group_names"
- is_cluster_leader

View File

@@ -4,243 +4,243 @@
- name: "Set variables needed to install Nomad"
block:
- name: "set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: "Set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: "Create Nomad user and group (Debian)"
when: ansible_os_family == 'Debian'
block:
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Create Nomad /opt storage"
block:
- name: "create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: "Create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: "set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Template out the configuration file"
block:
- name: "create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: Copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: "set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: Install or Update Nomad
block:
- name: "set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: "Set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: Install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: "Copy system.d or launchctrl service files"
block:
- name: ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: Create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: Create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: "start nomad, if stopped"
- name: "Start nomad, if stopped"
ansible.builtin.shell:
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
ignore_errors: true
failed_when: false

View File

@@ -10,67 +10,67 @@
- name: "Sync Nomad Jobs"
block:
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: "Sync docker compose files"
- name: Sync docker compose files
when: is_docker_compose_client
block:
- name: confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: Confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"
- name: Synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"

View File

@@ -4,64 +4,64 @@
- name: "Update and install APT packages"
when:
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
block:
- name: update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: Update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: "upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "Upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Update and install Homebrew packages"
when:
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
block:
- name: upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: Upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: Install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: Homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: Unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true
- name: Install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true

View File

@@ -5,36 +5,37 @@
- name: "Check if pull_all_repos exists"
ansible.builtin.stat:
path: "~/bin/pull_all_repos"
path: "~/bin/pull_all_repos"
check_mode: false
register: pull_script_check
- name: "Check if ~/repos exists"
ansible.builtin.stat:
path: "~/repos"
path: "~/repos"
check_mode: false
register: repos_directory_check
- name: "run pull_all_repos script"
- name: "Run pull_all_repos script"
ansible.builtin.command:
cmd: "~/bin/pull_all_repos --directory ~/repos"
cmd: "~/bin/pull_all_repos --directory ~/repos"
register: pull_script_output
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
failed_when: pull_script_output.rc > 1
changed_when: pull_script_output.rc == 0
- name: "Output from pull_all_repos"
ansible.builtin.debug:
msg: "{{ pull_script_output.stdout }}"
msg: "{{ pull_script_output.stdout }}"
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable

View File

@@ -1,12 +1,12 @@
---
# TASK DESCRIPTION:
# Always runs fist. Confirms we can actually use Ansible
- name: sanity - user mode
- name: Sanity - user mode
become: false
ansible.builtin.debug:
msg: "sanity check: user mode"
msg: "Sanity check: user mode"
- name: sanity - become mode
- name: Sanity - become mode
become: true
ansible.builtin.debug:
msg: "sanity check: become mode"
msg: "Sanity check: become mode"

View File

@@ -4,90 +4,92 @@
#
# NOTE: This is depreciated, I no longer use Prometheus and have migrated to Telegraf
- name: populate service facts
- name: Populate service facts
ansible.builtin.service_facts:
- name: stop node_exporter
- name: Stop node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
state: stopped
name: node_exporter
state: stopped
when: ansible_facts.services["node_exporter.service"] is defined
- name: Ensure group "prometheus" exists
become: true
ansible.builtin.group:
name: prometheus
state: present
name: prometheus
state: present
- name: Add the user 'prometheus' with group 'prometheus'
become: true
ansible.builtin.user:
name: prometheus
group: prometheus
groups: docker
append: true
name: prometheus
group: prometheus
groups: docker
append: true
# --------------- Install or Update Prometheus
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: false
need_prometheus_install: false
- name: Check if node_exporter is installed
ansible.builtin.stat:
path: /usr/local/bin/node_exporter
path: /usr/local/bin/node_exporter
register: prometheus_binary_file_location
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- not prometheus_binary_file_location.stat.exists
- not prometheus_binary_file_location.stat.exists
- name: Check current version of Prometheus
ansible.builtin.shell: /usr/local/bin/node_exporter --version 3>&1 1>&2 2>&3 | head -n1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_prometheus_version
failed_when: false
changed_when: false
check_mode: false
when:
- need_prometheus_install is false
- need_prometheus_install is false
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- name: install node_exporter
- name: Install node_exporter
become: true
ansible.builtin.unarchive:
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
when:
- need_prometheus_install is true
- need_prometheus_install is true
- name: create node_exporter service
- name: Create node_exporter service
become: true
ansible.builtin.template:
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
- name: start node_exporter
- name: Start node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
daemon_reload: true
enabled: true
state: started
name: node_exporter
daemon_reload: true
enabled: true
state: started
when:
- "'nostart' not in ansible_run_tags"
- "'nostart' not in ansible_run_tags"

View File

@@ -4,186 +4,187 @@
- name: "Set variables"
block:
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "Set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "Set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: "Install ffmpeg and HandbrakeCLI"
block:
- name: "ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "ensure tdarr directory exists"
- name: "Ensure tdarr directory exists"
become: true
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
- name: "Install tdarr"
block:
- name: "set_fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "Set fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
changed_when: tdarr_install.rc == 0
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: "configure tdarr"
- name: "Configure tdarr"
block:
- name: update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: Update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: Update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: Check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: mount shared storage
- name: Mount shared storage
ansible.builtin.import_tasks: cluster_storage.yml

View File

@@ -5,146 +5,146 @@
# --------------------------------- Set variables depending on system type
- name: "Configure variables"
block:
- name: "set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Install/upgrade Telegraf"
block:
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: Install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
# - name: give telegraf access to docker
# become: true
@@ -162,115 +162,115 @@
- name: "Install speedtest"
when: "'pis' in group_names"
block:
- name: "set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: "Set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Configure Telegraf"
block:
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: Template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')

View File

@@ -1,5 +1,5 @@
[Unit]
Description="HashiCorp Consul - A service mesh solution"
Description="hashiCorp Consul - A service mesh solution"
Documentation=https://www.consul.io/
Requires=network-online.target
After=network-online.target

View File

@@ -1,11 +0,0 @@
version: '3.9'
services:
asn-to-ip:
image: ddimick/asn-to-ip:latest
hostname: asn-to-ip
container_name: asn-to-ip
network_mode: "bridge"
ports:
- 5151:5000
restart: unless-stopped

View File

@@ -2,7 +2,7 @@ version: '3.9'
services:
consul:
image: consul:{{ consul_version }}
image: hashicorp/consul:{{ consul_version }}
hostname: consul
container_name: consul
network_mode: "host"

View File

@@ -59,7 +59,7 @@ consul {
{% endif %}
}
# ----------------------------------------- CLient Config
# ----------------------------------------- Client Config
client {
enabled = true
{% if 'pis' in group_names %}
@@ -206,9 +206,9 @@ plugin "raw_exec" {
plugin "docker" {
config {
allow_caps = [ "ALL" ]
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name"]
volumes {
enabled = true
}

View File

@@ -7,9 +7,16 @@ ConditionFileNotEmpty={{ nomad_configuration_dir }}/nomad.hcl
[Service]
{# {% if 'linode' in group_names %} #}
User=nomad
Group=nomad
{# User=nomad #}
{# Group=nomad #}
{# {% endif %} #}
{# NOTE: Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled.
https://github.com/hashicorp/nomad/pull/16063
#}
User=root
Group=root
ExecReload=/bin/kill -HUP $MAINPID
ExecStart=/usr/local/bin/nomad agent -config {{ nomad_configuration_dir }}
KillMode=process

View File

@@ -57,6 +57,7 @@ job "changedetection" {
service {
port = "webUI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`changes.{{ homelab_domain_name }}`)",
@@ -75,7 +76,6 @@ job "changedetection" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -73,6 +73,7 @@ job "chronograf" {
service {
port = "chronografPort"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -91,7 +92,6 @@ job "chronograf" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -63,6 +63,7 @@ job "code" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -82,7 +83,6 @@ job "code" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -78,6 +78,7 @@ job "diagnostics" {
service {
port = "whoami"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -95,7 +96,6 @@ job "diagnostics" {
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
resources {

View File

@@ -54,6 +54,7 @@ job "freshrss" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`rss.{{ homelab_domain_name }}`)",
@@ -73,7 +74,6 @@ job "freshrss" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -87,6 +87,7 @@ job "grafana" {
service {
port = "http"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -109,7 +110,6 @@ job "grafana" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -52,6 +52,7 @@ job "headless-chrome" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`chrome.{{ homelab_domain_name }}`)",
@@ -70,7 +71,6 @@ job "headless-chrome" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -71,7 +71,7 @@ job "icloud_backup" {
# Drive destination
root: "icloud"
smtp:
# If you want to recieve email notifications about expired/missing 2FA credentials then uncomment
# If you want to receive email notifications about expired/missing 2FA credentials then uncomment
email: "{{ email_smtp_account }}"
# optional, to email address. Default is sender email.
#to: "receiver@test.com"
@@ -120,7 +120,7 @@ job "icloud_backup" {
photos:
destination: "photos"
remove_obsolete: true
sync_inteval: 172800 # 2 days
sync_interval: 172800 # 2 days
filters:
albums:
# - "album1"

View File

@@ -78,6 +78,7 @@ job "influxdb" {
service {
port = "httpAPI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
check {
type = "tcp"
@@ -89,7 +90,6 @@ job "influxdb" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}

View File

@@ -0,0 +1,94 @@
job "ladder" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "ladder" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "8080"
}
}
task "ladder" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
PORT = "8080"
}
driver = "docker"
config {
image = "ghcr.io/kubero-dev/ladder:latest"
hostname = "${NOMAD_TASK_NAME}"
ports = ["port1"]
image_pull_timeout = "10m"
// volumes = [
// "${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/etc/TEMPLATE/"
// ]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
]
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // job

View File

@@ -82,6 +82,7 @@ job "lidarr" {
service {
port = "lidarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "lidarr" {
check_restart {
limit = 0
grace = "10m"
ignore_warnings = true
}
} // service

View File

@@ -47,6 +47,7 @@ job "loki" {
service {
port = "loki_port"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -66,7 +67,6 @@ job "loki" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -67,6 +67,7 @@ job "mealie" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -85,7 +86,6 @@ job "mealie" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -57,6 +57,7 @@ job "nginx" {
service {
port = "web"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -75,7 +76,6 @@ job "nginx" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -55,6 +55,7 @@ job "nzbhydra" {
service {
port = "hydra_port"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`hydra.{{ homelab_domain_name }}`)",
@@ -73,7 +74,6 @@ job "nzbhydra" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -54,6 +54,7 @@ job "overseerr" {
service {
port = "overseerr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -74,7 +75,6 @@ job "overseerr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -37,7 +37,7 @@ job "pihole" {
// }
}
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {
@@ -109,6 +109,7 @@ job "pihole" {
service {
name = "${NOMAD_JOB_NAME}"
port = "web"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`p.{{ homelab_domain_name }}`)",
@@ -118,7 +119,7 @@ job "pihole" {
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare",
"traefik.http.middlewares.piholeRedirect.redirectregex.regex=^(https?://p\\.{{ homelab_domain_name }})/?$",
"traefik.http.middlewares.piholeRedirect.redirectregex.replacement=$${1}/admin/",
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file,piholeRedirect"
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=piholeRedirect"
]
check {
type = "http"
@@ -130,7 +131,6 @@ job "pihole" {
check_restart {
limit = 3
grace = "10m"
ignore_warnings = false
}
}

View File

@@ -84,6 +84,7 @@ job "prowlarr" {
service {
port = "prowlarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -103,7 +104,6 @@ job "prowlarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -81,6 +81,7 @@ job "radarr" {
service {
port = "radarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "radarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -81,6 +81,7 @@ job "readarr" {
service {
port = "readarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "readarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -32,104 +32,182 @@ job "recyclarr" {
task "recyclarr" {
env {
TZ = "America/New_York"
TZ = "America/New_York"
RECYCLARR_APP_DATA = "/local"
}
// user = "${meta.PUID}:${meta.PGID}"
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:2"
image = "ghcr.io/recyclarr/recyclarr:{{ recyclarr_version }}"
hostname = "${NOMAD_TASK_NAME}"
init = true
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
} // docker config
// template {
// destination = "local/recyclarr.yml"
// env = false
// change_mode = "restart"
// perms = "644"
// data = <<-EOH
// ---
// # yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
template {
destination = "local/recyclarr.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
# yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
// # A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
// # values below as needed for your instance. You will be required to update the API Key and URL for
// # each instance you want to use.
// #
// # Many optional settings have been omitted to keep this template simple.
// #
// # For more details on the configuration, see the Configuration Reference on the wiki here:
// # https://github.com/recyclarr/recyclarr/wiki/Configuration-Reference
# A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
# values below as needed for your instance. You will be required to update the API Key and URL for
# each instance you want to use.
#
# Many optional settings have been omitted to keep this template simple. Note that there's no "one
# size fits all" configuration. Please refer to the guide to understand how to build the appropriate
# configuration based on your hardware setup and capabilities.
#
# For any lines that mention uncommenting YAML, you simply need to remove the leading hash (`#`).
# The YAML comments will already be at the appropriate indentation.
#
# For more details on the configuration, see the Configuration Reference on the wiki here:
# https://recyclarr.dev/wiki/reference/config-reference
// # Configuration specific to Sonarr
// sonarr:
// # Set the URL/API Key to your actual instance
# Configuration specific to Sonarr
sonarr:
series:
base_url: https://sonarr.{{ homelab_domain_name }}/
api_key: {{ sonarr_api_key }}
delete_old_custom_formats: true
// {% raw -%}
// - base_url: http://{{ range service "sonarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Quality definitions from the guide to sync to Sonarr. Choices: series, anime
quality_definition:
type: series
// # Quality definitions from the guide to sync to Sonarr. Choice: anime, series, hybrid
// quality_definition: series
# Release profiles from the guide to sync to Sonarr v3 (Sonarr v4 does not use this!)
# Use `recyclarr list release-profiles` for values you can put here.
# https://trash-guides.info/Sonarr/Sonarr-Release-Profile-RegEx/
release_profiles:
- trash_ids:
- EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
- 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
- 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
strict_negative_scores: false
// # Release profiles from the guide to sync to Sonarr.
// # You can optionally add tags and make negative scores strictly ignored
// release_profiles:
// # Series
// - trash_ids:
// - EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
// - 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
// - 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
// # Anime (Uncomment below if you want it)
// # - trash_ids:
// # - d428eda85af1df8904b4bbe4fc2f537c # Anime - First release profile
// # - 6cd9e10bb5bb4c63d2d7cd3279924c7b # Anime - Second release profile
- trash_ids:
- 76e060895c5b8a765c310933da0a5357 # Optionals
filter:
include:
- cec8880b847dd5d31d29167ee0112b57 # Golden rule
- 436f5a7d08fbf02ba25cb5e5dfe98e55 # Ignore Dolby Vision without HDR10 fallback.
# - f3f0f3691c6a1988d4a02963e69d11f2 # Ignore The Group -SCENE
# - 5bc23c3a055a1a5d8bbe4fb49d80e0cb # Ignore so called scene releases
- 538bad00ee6f8aced8e0db5218b8484c # Ignore Bad Dual Audio Groups
- 4861d8238f9234606df6721df6e27deb # Ignore AV1
- bc7a6383cbe88c3ee2d6396e1aacc0b3 # Prefer HDR
- 6f2aefa61342a63387f2a90489e90790 # Dislike retags: rartv, rarbg, eztv, TGx
- 19cd5ecc0a24bf493a75e80a51974cdd # Dislike retagged groups
- 6a7b462c6caee4a991a9d8aa38ce2405 # Dislike release ending: en
- 236a3626a07cacf5692c73cc947bc280 # Dislike release containing: 1-
# - fa47da3377076d82d07c4e95b3f13d07 # Prefer Dolby Vision
// # Configuration specific to Radarr.
// radarr:
// # Set the URL/API Key to your actual instance
// {% raw -%}
// - base_url: http://{{ range service "radarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Configuration specific to Radarr.
radarr:
movies:
# Set the URL/API Key to your actual instance
base_url: https://radarr.{{ homelab_domain_name }}/
api_key: {{ radarr_api_key }}
delete_old_custom_formats: true
replace_existing_custom_formats: true
// # Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
// quality_definition:
// type: movie
# Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
quality_definition:
type: movie
preferred_ratio: 0.5
// # Set to 'true' to automatically remove custom formats from Radarr when they are removed from
// # the guide or your configuration. This will NEVER delete custom formats you manually created!
// delete_old_custom_formats: false
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores: true
- name: "720p/1080p Remux"
reset_unmatched_scores: true
// custom_formats:
// # A list of custom formats to sync to Radarr. Must match the "trash_id" in the guide JSON.
// - trash_ids:
// - ed38b889b31be83fda192888e2286d83 # BR-DISK
// - 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
// - 90a6f9a284dff5103f6346090e6280c8 # LQ
// - dc98083864ea246d05a42df0d05f81cc # x265 (720/1080p)
// - b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
custom_formats:
# Use `recyclarr list custom-formats radarr` for values you can put here.
# https://trash-guides.info/Radarr/Radarr-collection-of-custom-formats/
// # Uncomment the below properties to specify one or more quality profiles that should be
// # updated with scores from the guide for each custom format. Without this, custom formats
// # are synced to Radarr but no scores are set in any quality profiles.
// # quality_profiles:
// # - name: Quality Profile 1
// # - name: Quality Profile 2
// # #score: -9999 # Optional score to assign to all CFs. Overrides scores in the guide.
// # #reset_unmatched_scores: true # Optionally set other scores to 0 if they are not listed in 'names' above.
// EOH
// }
- trash_ids:
# Movie versions
- eca37840c13c6ef2dd0262b141a5482f # 4K Remaster
- 570bc9ebecd92723d2d21500f4be314c # Remaster
- 0f12c086e289cf966fa5948eac571f44 # Hybrid
- 9d27d9d2181838f76dee150882bdc58c # Masters of Cinema
- e0c07d59beb37348e975a930d5e50319 # Criterion Collection
- 957d0f44b592285f26449575e8b1167e # Special Edition
- eecf3a857724171f968a66cb5719e152 # IMAX
- 9f6cbff8cfe4ebbc1bde14c7b7bec0de # IMAX Enhanced
# Unwanted
- b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
- ed38b889b31be83fda192888e2286d83 # BR-DISK
- 90a6f9a284dff5103f6346090e6280c8 # LQ
- bfd8eb01832d646a0a89c4deb46f8564 # Upscaled
- 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
- 923b6abef9b17f937fab56cfcf89e1f1 # DV (WEBDL)
- b6832f586342ef70d9c128d40c07b872 # Bad Dual Groups
- ae9b7c9ebde1f3bd336a8cbd1ec4c5e5 # No-RlsGroup
- 7357cf5161efbf8c4d5d0c30b4815ee2 # Obfuscated
- 5c44f52a8714fdd79bb4d98e2673be1f # Retags
- c465ccc73923871b3eb1802042331306 # Line/Mic Dubbed
# Misc
- e7718d7a3ce595f289bfee26adc178f5 # Repack/Proper
- ae43b294509409a6a13919dedd4764c4 # Repack2
# HQ Release Groups
- ed27ebfef2f323e964fb1f61391bcb35 # HD Bluray Tier 01
- c20c8647f2746a1f4c4262b0fbbeeeae # HD Bluray Tier 02
- c20f169ef63c5f40c2def54abaf4438e # WEB Tier 01
- 403816d65392c79236dcb6dd591aeda4 # WEB Tier 02
- af94e0fe497124d1f9ce732069ec8c3b # WEB Tier 03
quality_profiles:
- name: "720p/1080p"
- name: "720p/1080p Remux"
# HDR FORMATS
# ########################
- trash_ids:
- 3a3ff47579026e76d6504ebea39390de # Remux Tier 01
- 9f98181fe5a3fbeb0cc29340da2a468a # Remux Tier 02
- e61e28db95d22bedcadf030b8f156d96 # HDR
- 2a4d9069cc1fe3242ff9bdaebed239bb # HDR (undefined)
quality_profiles:
- name: "720p/1080p"
score: -100
- name: "720p/1080p Remux"
# AUDIO FORMATS
# ########################
- trash_ids:
- 6fd7b090c3f7317502ab3b63cc7f51e3 # 6.1 Surround
- e77382bcfeba57cb83744c9c5449b401 # 7.1 Surround
- f2aacebe2c932337fe352fa6e42c1611 # 9.1 Surround
quality_profiles:
- name: "720p/1080p"
score: -50
- name: "720p/1080p Remux"
score: -50
- trash_ids:
- 89dac1be53d5268a7e10a19d3c896826 # 2.0 Stereo
quality_profiles:
- name: "720p/1080p"
score: 120
- trash_ids:
- 77ff61788dfe1097194fd8743d7b4524 # 5.1 Surround
quality_profiles:
- name: "720p/1080p"
score: 80
- name: "720p/1080p Remux"
score: 80
EOH
}
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
resources {
cpu = 100 # MHz
memory = 300 # MB
} // resources
} // task

View File

@@ -193,6 +193,7 @@ job "reverse-proxy" {
service {
port = "authelia-port"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`authelia.{{ homelab_domain_name }}`)",
@@ -215,13 +216,12 @@ job "reverse-proxy" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service
resources {
cpu = 200 # MHz
memory = 110 # MB
memory = 1000 # MB
}
} // task authelia
@@ -238,6 +238,7 @@ job "reverse-proxy" {
service {
port = "whoami"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
@@ -245,7 +246,7 @@ job "reverse-proxy" {
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
// "traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
check {
type = "http"
@@ -256,7 +257,6 @@ job "reverse-proxy" {
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
resources {
@@ -294,18 +294,20 @@ job "reverse-proxy" {
"--providers.file.filename=/local/traefik/siteconfigs.toml",
"--providers.file.watch=true",
"--providers.consulcatalog=true",
"--providers.consulcatalog.endpoint.address=http://consul.service.consul:8500",
"--providers.consulcatalog.endpoint.address=http://${NOMAD_IP_web}:8500",
"--providers.consulcatalog.prefix=traefik",
"--providers.consulcatalog.exposedbydefault=false",
"--metrics=true",
"--metrics.influxdb=true",
"--metrics.influxdb.address=influxdb.service.consul:{{ influxdb_port }}",
"--metrics.influxdb.protocol=http",
"--metrics.influxdb.pushinterval=10s",
"--metrics.influxdb.database=homelab",
"--metrics.influxdb.retentionpolicy=2day",
"--metrics.influxdb.addentrypointslabels=true",
"--metrics.influxdb.addserviceslabels=true",
"--providers.nomad=true",
"--providers.nomad.endpoint.address=http://${NOMAD_IP_web}:4646",
// "--metrics=true",
// "--metrics.influxdb=true",
// "--metrics.influxdb.address=influxdb.service.consul:{{ influxdb_port }}",
// "--metrics.influxdb.protocol=http",
// "--metrics.influxdb.pushinterval=10s",
// "--metrics.influxdb.database=homelab",
// "--metrics.influxdb.retentionpolicy=2day",
// "--metrics.influxdb.addentrypointslabels=true",
// "--metrics.influxdb.addserviceslabels=true",
"--accesslog=true",
"--log=true",
"--log.level=ERROR",
@@ -357,11 +359,13 @@ job "reverse-proxy" {
scheme = "https"
permanent = true
[http.middlewares.authelia.forwardAuth]
address = "http://authelia.service.consul:{{ authelia_port }}/api/verify?rd=https://authelia.{{ homelab_domain_name }}"
address = {% raw %}"http://{{ env "NOMAD_IP_authelia_port" }}:{{ env "NOMAD_PORT_authelia_port" }}{% endraw %}/api/verify?rd=https://authelia.{{ homelab_domain_name }}"
trustForwardHeader = true
authResponseHeaders = ["Remote-User", "Remote-Groups", "Remote-Name", "Remote-Email"]
[http.middlewares.basicauth.basicauth]
usersfile = "/local/traefik/httpasswd"
removeHeader = true
@@ -396,6 +400,7 @@ job "reverse-proxy" {
service {
port = "dashboard"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
@@ -416,7 +421,6 @@ job "reverse-proxy" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -0,0 +1,101 @@
job "sabnzbd" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "macmini"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "sabnzbd" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "http" {
static = "8080"
to = "8080"
}
}
task "sabnzbd" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
DOCKER_MODS = "linuxserver/mods:universal-cron"
}
driver = "docker"
config {
image = "ghcr.io/linuxserver/sabnzbd"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config",
"${meta.nfsStorageRoot}/media/downloads/nzb:/nzbd",
"${meta.nfsStorageRoot}/media/downloads/temp:/incomplete-downloads",
"${meta.nfsStorageRoot}/media/downloads/complete:/downloads",
"${meta.nfsStorageRoot}/nate:/nate",
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}/startup-scripts:/custom-cont-init.d"
]
ports = ["http"]
} // docker config
service {
port = "http"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`sab.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
// "traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
check {
type = "tcp"
port = "http"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 5000 # MHz
memory = 1000 # MB
} // resources
} // task
} // group
} // job

View File

@@ -82,6 +82,7 @@ job "sonarr" {
service {
port = "sonarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "sonarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -66,6 +66,7 @@ job "stash" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -73,7 +74,7 @@ job "stash" {
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file"
// "traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file"
]
check {
@@ -85,7 +86,6 @@ job "stash" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -70,6 +70,7 @@ job "syncthing" {
service {
port = "webGUI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -89,7 +90,6 @@ job "syncthing" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -108,7 +108,7 @@ job "TEMPLATE" {
}
}
task "await-TEMPLATEdb" {
task "await-TEMPLATEEdb" {
driver = "docker"
config {
@@ -158,6 +158,7 @@ job "TEMPLATE" {
service {
name = "${NOMAD_TASK_NAME}"
port = "port2"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
@@ -178,7 +179,6 @@ job "TEMPLATE" {
check_restart {
limit = 3
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -45,9 +45,10 @@ job "TEMPLATE" {
driver = "docker"
config {
image = ""
hostname = "${NOMAD_TASK_NAME}"
volumes = [
image = ""
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/etc/TEMPLATE/",
"/etc/timezone:/etc/timezone:ro",
"/etc/localtime:/etc/localtime:ro"
@@ -55,9 +56,10 @@ job "TEMPLATE" {
ports = ["port1"]
} // docker config
service {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -77,7 +79,6 @@ job "TEMPLATE" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -82,6 +82,7 @@ job "TEMPLATE" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -102,7 +103,6 @@ job "TEMPLATE" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -70,6 +70,7 @@ job "uptimekuma" {
service {
port = "web"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`uptime.{{ homelab_domain_name }}`)",
@@ -88,7 +89,6 @@ job "uptimekuma" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -0,0 +1,158 @@
job "valentina" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "valentina" {
count = 1
restart {
attempts = 0
delay = "30s"
}
task "valentina" {
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_AWS_ACCESS_KEY_ID = "{{ valentina_aws_access_key_id }}"
VALENTINA_AWS_SECRET_ACCESS_KEY = "{{ valentina_aws_secret_access_key }}"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "INFO"
VALENTINA_LOG_LEVEL_AWS = "INFO"
VALENTINA_MONGO_DATABASE_NAME = "{{ valentina_mongo_database_name }}"
VALENTINA_MONGO_URI = "{{ valentina_mongo_uri }}"
VALENTINA_OWNER_CHANNELS = "{{ valentina_owner_channels }}"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
VALENTINA_S3_BUCKET_NAME = "{{ valentina_s3_bucket_name}}"
VALENTINA_DB_PATH = "/valentina/valentina.sqlite" # Depreciated
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{ valentina_version }}"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
group "mongobackup" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "80"
}
}
constraint {
attribute = "${attr.cpu.arch}"
value = "amd64"
}
task "mongobackup" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
AWS_ACCESS_KEY_ID = "{{ valentina_aws_access_key_id }}"
AWS_S3_BUCKET_NAME = "{{ valentina_s3_bucket_name }}"
AWS_S3_BUCKET_PATH = "db_backups"
AWS_SECRET_ACCESS_KEY = "{{ valentina_aws_secret_access_key }}"
BACKUP_DIR = "/data/db_backups"
CRON_SCHEDULE = "0 2 * * *" # 2am daily
// CRON_SCHEDULE = "*/1 * * * *" # Every minute
DAILY_RETENTION = "7"
DB_NAME = "{{ backup_mongo_db_name }}"
LOG_FILE = "/data/backup_mongodb.log"
LOG_LEVEL = "INFO"
MONGODB_URI = "{{ backup_mongo_mongodb_uri }}"
MONTHLY_RETENTION = "12"
PORT = "80"
STORAGE_LOCATION = "BOTH"
WEEKLY_RETENTION = "4"
YEARLY_RETENTION = "2"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/backup-mongodb:{{ backup_mongodb_version }}"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
ports = ["port1"]
volumes = ["${meta.nfsStorageRoot}/pi-cluster/valentina:/data"]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
]
check {
type = "tcp"
port = "port1"
interval = "1m"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
} // task
} // group
} // job

View File

@@ -58,6 +58,7 @@ job "whoogle" {
service {
port = "whoogle"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -77,7 +78,6 @@ job "whoogle" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -28,7 +28,7 @@ job "wikijs" {
}
}
task "await_db_filesytem" {
task "await_db_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -56,7 +56,7 @@ job "wikijs" {
}
} // /task
task "await_backup_filesytem" {
task "await_backup_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -122,6 +122,7 @@ job "wikijs" {
service {
port = "db"
name = "wikijsdb"
provider = "nomad"
check {
type = "tcp"
port = "db"
@@ -131,7 +132,6 @@ job "wikijs" {
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
@@ -180,7 +180,7 @@ group "wikijs_app_group" {
}
} // /task
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {
@@ -225,6 +225,7 @@ group "wikijs_app_group" {
service {
port = "http"
name = "wikijs"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.wikijs.rule=Host(`wiki.{{ homelab_domain_name }}`)",
@@ -241,7 +242,6 @@ group "wikijs_app_group" {
check_restart {
limit = 3
grace = "30s"
ignore_warnings = true
}
} // /service

316
vault.yml
View File

@@ -1,118 +1,200 @@
$ANSIBLE_VAULT;1.1;AES256
35366634393265303030366466303232616338633038313738633637383439356439616536666230
6566643530623337323034306366613935663334313934310a636235653531316237393231376362
33663935366131663137363465666363336630386362313333633762656461636239366234633832
3538353463356335360a323030643238323034343666376230356465396639636563316532373638
35366637366663303164376661636563346330313932343462326239626264633262303739383831
31333134613534643265643433323065303833326662346466633931373337326233303633363032
66333336373865313333626566386665653833343638376264356430383764316134333231366466
62336534666565343839393237356139393738396333393337666631303461373362343664396665
63343161613462653866616566363631346566636639626138316539353362616261666337386635
62356262363564376334336163613035643336656331653562306433363161393435343431663137
66663936623834666364303333386335353961373031383164623766323836383462363231396263
34343662336637633262333530623039376534643966653839346236363166646564613333633366
33363534616466393137366234633030663036613263383733313235353364613864316139356330
38343439346661613136316235326430326437646135636637343665663031393262653661396331
39653739666364666564633364636231323237366265323631333234306631386362666135336461
35646564643631666663336237636435626338346663633038353964303764626236373561323763
34643565656462323764623263383037663735323364396437653332376137356263633963306332
39633339366236313063643665356366346138616434316332643731666634366336623064653361
63393134643630313632396434643131646464323737343133613364333465393834656236616134
30313961346236326563616263373463616432393962663262616232356663636439643731383930
32326664306563623665633164373932356163356361663465363362303661396662386630323137
32383333656435613762393430303163383135393037363763333139633239666639303538623134
32386635663962363939373365613138316435366433303863326561613463306338396136393965
34333961383035383135333561313331336565383031356133626530306163333666333564353262
38646434643234303363383965636339323633326330663736393461383461303661353365663631
33343831356135653139633463336330646634363639326635653863343632663466336639313962
65306438613933386664336138613066326364343738633531356664343664646464396162343861
36663030643762343938633564373531663430303536643665613532313630636461646235666335
62613634656232373936363439363766316561373937386261613861396566303834376134666564
35396330636166316239336433323939363839636361643630353263663233303166313863636364
34363134363161643234643134663361373237316466626363646264643530343064666464393166
66366561356336616663393064376162643731343532663436646432366331643066396232393432
63336633313963383132333639626130623737346137646561303338623136306361656630396364
36306234643161643864313334316634396233313831613830393865353763653963656632363865
34346439356166363839343063313263396437366163343734326162346166353465313163313236
35343531333438303561393137323831303063353466666463303835653630353630393836393236
32643035636335363137303134333735343964646130306339663137646261366635353632613533
35303636373465633831353439376464386132616238613336366134383037376165396365353436
66633937656162346661326136343266313937393436353532656634366535653762633930393239
62383862356165336435616666346238646666613066323262323530356534373262633861646466
66643935363334623264373338663362623439313138666338363732386666383739636162653763
32666439316632653633363266343365393366373834323065353335613563306135383432613433
61633835326565386662313265356536613237313364366163313562393836613061616432316638
33376531663533376435383437393539663565616439666438646232663732663063343666646631
36366364353339323262666630363932616461323833306666616365343530646536326363613232
65313031333064396662363736316137656161393865383135366539636432386539623837353634
62313638666564396462666334616365323932396236633932613362633166346265613161363863
39396464663966353565393662363633366237323066306436616437363666666635343265666435
32363162643761666639336464383430366565323862353161303338333232326335303462653938
61333162306132633637653736623033373164343463333933666438326534303730613862383035
39393939323561333738653465306165643461366534313537633162313638393630393361623432
34306264316565333334303633323836343162373738636161656565313134643262343533666434
37623962626263353062333939633662316663316238636331646230313861363364326636653365
37343761626437663832346266333634666361333361313638626639633934646335613062626365
31656132643034303032623365613530306436383437633761636238336139373739313836393336
66653066633962333730643034653032626530663731633462393937326236656362356236666333
62343139646139303433393163613037623963633230366236396434643163316664616435386436
61616237366130663662643162613730303033376334333066393432333032613830316262333763
31663936663239653361633634323736306363323864666635633465376363323838326366663630
39383463343038666564653663616161336266313563633731623335373732343732383164623431
39333262346539373937386531386466373863323232653265383064643863303638326566313765
30376565643462643439316431306331346438633331616437613762323138363061336630353661
61653139333962373261323063386231346266323762306433613363643230366265366239623832
38353562393064633537373761643539313234333136333530343536393033313131393932633637
63383066623930376561656436396564353264643630636332653862613933636630333633396130
33346638663033636436626631323330336430313738313465323737386434613538346564633938
62653236643033313062336664323335656132383430313831636334326430383938653762313835
65656430656363653735343738326534616335636130646539363066393436383961316135346262
30656131353733616562613239663965383864313263653063393635623838633538303433323437
61636162363663306166343464333534316131346231303663336365303363656635363066353131
38623263366136396466383538323637626236633163663033613934303766613931313135613132
39346237656635303166363031353866663833303537666330346130353563373763623530373464
65666338313039633732626564393161663335613264306637646332643133356135613366353264
62376466613037326463623131373937303039356138336337333163303636303335333736376563
63313730623664356165653861303139313039616231326136313634623136613365313466373561
35313466313539383838373838386131653638653430613863313430626139353465626636386539
37383034326561393666383566326364623337376432633864613630353662663665336232313064
31396531313937373336383438646234343661643534316332633163653532633565613136343235
31653739633035303430626364303961626433323835653638653839396231663662636666393563
30346162326561636365393831333435333362356432646161633463313963346537643631393736
66343966313361333934616534313037636238613830623938623563393230376635316631636534
62376136386137313737646135376463343831663162616566373764643930386539306231613964
65383633313037323234616364623139623834303466306666613334346131633531643932623362
31623337636434623531376437623033306235346366376436396336346634303138613262373464
30363735633663363364613139666164383436306666363362346633346663346366393634333335
61326234303734626465616530346339303536636665626436623237383434636362393034346562
64633765363531346138376166393030666433396339333662663036313031306434626236383664
63636633316138653433366337303033636330333761626162373435633062366639396362376537
30313436353964613838323332353137383433323265343831356334393238666438323735313630
66313534646636633866313533353533643531356266643433353137653130386165353936616438
31353331383461313130383035663837646439323366623935386236663262653165313432326639
65316339356661623436386537353335343332616362323463613966383736306638396630653437
62376232353763336365613438383936646265623261306338613663343864363839313663343030
37396537626435303036613531396239353439663930363263373632333536376364336436383961
31613237393430326663366531633633613362373265646437303530656564383830366164643465
31333162373037323836396234383265333832376461383530383139353562666635386661383262
31323162363834376432313766393965373763313664383966346464313865343261333030653033
37326564663836323963663735353432653938373632356564653830616562656333383563366432
32366464316265613565613830633264613634313134373530386562313163656434356164356139
62643065613638323735366332316366383236653762373436393631363039636333346431666137
33643239323062343537353061646138346661643262303363326137356461356439663166653739
39636534653935376433633761373630656566393535373962353762646165663566613235646134
32313838383538363532643965376464626361663431393165663238373762636337366434666437
32636234613264666633366663616639386236386333623766383735383431323964343965643362
39326266366162333266343133646335653837393962633731613230653665366462393931613462
33633966626132633832653634626633393238643238393064646233663064346333653164623336
34373634376335383639346338663830653061386161306134336530376161333637333733666533
63626464393435626361323333656639616431333638383163626662323733613564613430323532
31343262616133333965633462366636333762623764326231346437666634663339393563666664
65653536333834643937326464333464353135363738663031303162396535616139663535336535
65343062646465373831303235303933343030346562633561653534313263333033313531656430
64623833653832323134333138663966313939303739376131383133366131323530353961633765
63386436373262313334323932646232616435646665323736356433376332653530326230346331
63613163343365623937336564643431653963383333363664663934633962316663306537376236
62333531353833326232613565666563613864653364363333613737663965366133383231623839
65323161613533343130316635636630633931633936666662303330326262376233376562633865
3930
64346463626630316432653930363164323930653865363031616265663934393233373135656238
3562616139656435613231396137623462383137646639650a653662303766333437393238386230
62356230636233633235346339313661383761303333316230343363646665336630386536616532
6234623037653031350a333266626462616434623164633464383931666163386535313962346361
62393162356461626461393866663666323561383934623339313763393061383734623936303266
62386537653339653764656133323131633663643563356561643164633035363635336230393161
36623966376538346332376338346432323939353866626533633837316338633165336236666632
38653433646630623831646232373365373661636362393366653837616433373439646236386234
33376339643664663133323431653937343163613165333165656536336663666232383331613737
37333830303066333839616565326538623237633465343238636337346530323438666264306336
65366530623030306263306536323861393734623939666239313436393836353366333534663830
36303665396537393832613265383734363363383631373630313666353138663461396530633439
34663630346366306539313036663365356433616133303237393161663835393734383463336637
64356164323863336235663966613164643836313462663330346339666631633462636237386638
35316133373032303763363632353365663737336139366430613566653635336637366130386134
30343036623038613265646338306639663565396535663531646366353739663335633262613935
38396366323266393064333764623465323334353532383438373833363035643833393535656233
33396538343331653164666338383862663063343231346434393031356439363031333832383734
65613337383564613935303639353935353333633831303835323433386165396336396331396432
36386236373136643862326566303830353533316232316534353436363766373533386462396164
39643863316366636439653461333238623336306636633664343465633731363634663031383464
38303735366333623339363839333533393061623639643065366336316439323131633764633332
38313734326532643061633632333538373764333663363238643036656337333035643632623765
38613736336364316264326237353536653364353638653866353062636563613035663433653936
63306139346537633738383061303466393632366436323964626565333537366136366336373364
38616237303363316130323265666438616161666263633463613837666561393363656233333466
61306438626234663665666438343364373330306231383731626535656131323065653730303562
62333365656564613662346339306132626135646664373035643032343366333230616234366532
61386265646537323861306434353235323463613033346662333037643337616266633463656533
32366262666539306463346136613261383730623033343731633337656561653731383835306461
32333336333134306332633039333566626133656331313334633033343164306133636331623666
32393462303834656139343766613837393162396234343534383139323965353633633162303539
32303137396433333131306437623330373633626438393633356439613264373366613630386630
33306433613464356236653266386661396638373361663465313835633362333635346439663834
35316230396231323231323966663462343234376531306336373164643663616262343966316265
31663639316337303436366533336537336632333734373830626130366133326135643264653033
37333836363536616361613763323266653663366662666137363637376666346331373936343639
63666363366236356531353564356331316632336263356663613438373535303665633363306234
37643237306338626562386363616630336337396538636563393933383963653130356531306435
66626535393839386338323633353362313337396634653765303737636331633064363239666462
62366261353739643333363634376437646265303534663038656338643266303635356533656437
32373165613439356335633861643433353062366638323035346437663964343635633265363265
35613534613463653165383634303231653631366633663966373837376561643038313639363662
37316135346332623433363164306261343061393565353435373335656237363365323466373833
34653835313034623161306434616633663261656636653333353561633333343134616432376361
35353532623231366363373465616261383530373530616166353830643437626334326135613438
32643532343933336435336361616338376264386165306363346663343335343161653066353162
36346437623661343763623135393066626532373337616636373366336236356236633865383461
30396437393166303066333037373134616565303462313239373463333137323331343237653334
30306233393166333638313866313964336634386436386264323931626661306564626439383061
61623364366432636465353162666638326635653661323233646639303030623233313763626235
31306337386330636430623439363936346166633033336630653034376466316535666364623036
63303639333264313736386337333938623234383536646130313964623935356139653762663662
64323235366166653936616130626262306430353933303436666236623233356463366466616265
38653064343538353436363764613832303236366338336431666239663236623131653635396337
30313739326330396638356131313263316237643166666335363161656537366532343866313161
39613164306265303061316562356338643564663230663962666237316132303062326435633439
36393730663133666564623833323739646565366136363635313564623030336465663533396161
66366466396230653736326333646166373735323166346331346164666663336463643866656463
30636465646336386234393539633238326631356266663238336133323163613261623963376134
36376538666263353036383533343862336130663132383964376263623430383035623962623135
64376563653636396663663336666566376165303261353565313534356462303931376238313664
36316633313835376266613734303263303866623462616235313266386435613562636530336330
38616530626262383930333334623431623662666561383966383438373930386461376461663066
65323731343662393361363134643831303931613930643262333934613861353563366361316465
64393931373032306363323334373266663266363437666135313335393766366433663730323536
32313632633163616563326234623965363230663433326462353637663862663039306637333464
61313839633039356365643336313039643739353764326664353035376533623034633433326530
33303731316434643232636637623463326231343065346462373637316436623237303335376437
35353334383535656264316564653638663036323464333564303236656430313862313533303165
66613964336462626632313936623332363266623662313438663831326637313839313830633434
61646430666333383630636139353865323365623462306332616439373261636565653534306661
64663863353537373834356564386338336535653866373330343138363030656534643165353533
30373131363764663132313637343331666139313561313464326436653338386231333662643937
31623235393736393962336664623832636266393035343431376632643231343831383161613335
35666231383237333330613361653931646237663364336334306135323039636336633561333864
63356438346633326532336663373337643637626232653139353964633238336366663331363761
34656236373566326536626631336331363238623235333936633830336263313165353434663862
32616631313639346564376563383635643333663532323461336231613332363532643730313536
32363431393863336337613563303865316664316562333462613232646661613231616139666538
61633337623935313233313136343465626264373032366330303430326362353963613730373535
62626434313831613836316263623135663331343035643563383837656238626237356438396431
65313165323233363162373235626237356462353666646563633735363164333234313838646538
35366666393636666432613334383134336335373536316630316330326661303731326632303862
36653665666539393964333831313230356438613263343532353866356132396634343266616130
35383734313132353662613635643861383435306562396262643564646461383261613362373065
31386437626162623637323138333965663034343936626263303539623162353461616333636363
61346439613562623138643539306137363231396661306162663330356661326566396630653038
65623164336632613633326534316562656638636135623466306331393761626339386233356534
39656531373438373763393033353632613761623334316539626236363837653434313437363736
31313738613737386562356638643133366532363138373231653662303538313131303333316130
66303238363638373538383233336361626461376661666565323631326635353335636436333164
36313735316466383938626566623733633464303734303032343936313431306233646234386466
63633835663339616262323131356234366635313337386338336435396438333765373431323436
38613166656639323632393766313835353364313836386238393361623132373537633735613965
36356639303862336263646235303231646539323235383063643162323937323535626564653937
33663262303964663461613334663865353164303936356332326466326265626438633264363839
38373163303834363562356133643834346166613934616537366633323664316231303936616531
37323635323735393836336430343438613533663535343135313838643365613139653765633732
66633432616662363135653535646539356265626236666161633336316562376637366538336339
30306364303036643865386336626337623134313535323432666235373964363035396366646338
39653137316363343138666161333762383762316634653232356233363465303633303832376634
34333431616238303035363534613766323836643361313535656539623538653333666461376334
32633433313134646664646239643962326562623435626432336361343939303335313331616365
34383765373833633333396461366464643463616430646437386339306463346462336331316361
65393330333438373662363635653330323263323331336435626135323138316430653531303939
63623936303365343266326632336237373061613731636538623733303132616330313637666138
35383533643537383239636662396231313536353533393262333230366430613437623264373733
36623036383465663439346135313038643731336562336531636461323236626132626261636461
33306265303435393032623066333136353030373664376563646233356331373234333663313934
65316462326431383037626432353137386336313035383232363063336535373338346364623466
31656164383461646535366232356563353466313962643638303230363061363637326337343562
33666139366464306635353961346362333537336332363930326337633461323864663039316236
62323534666161316534346663653266313739656537663332303637666339386538613136313237
34353664376135333736373264303332366539643436626236663762326466303037643863626237
61336161626139366462343132633937333139626262663630666537326362376635326365316238
64623339666536666431343261323463313466323838313635316639383663373737353466663765
39376263633362383135653636633461356631626361623635663439373661663865613337303430
64323462343939323930373635666635366535656530363137663833643864633861303535613163
30383539393737356132313637376430396430306434333263636162323633656364323931616234
39366565323430333862313736366535356561376465636334666639343331643239633431613764
63353738323933396365653037363832653531326661646163613737363733383132376664393931
33363032613736363361636531313739386339313131346139663130623530656664333039656562
31366137346538633166656362626664353936653339653837336662626332663066393139363665
34396532633030343134666662363538363534616232633035343063623537316434613837376235
61316438646438313564336230356333373738346563313631386633313136633531656364376263
37393861333830623238326335623830383566633066303238626633613338653865316231333138
64656136366230633138303838383037626134656237336664643736333037376233666539626363
36303661623261323161346136663339643035636239393966316461623238356664396233306238
39616538636638383133373238636661343233313865643537626264326534383235376466356166
32353234383439353461356635626261626433383564626234376465333066346333653965303265
63316464343533326263626632616339383737323533313466633466336135316339666563633834
62636666626363323963356334653862643532623939376630373338646636303834653562346532
64616463313032303136353961626362383138613035306165376535633566333831633835376134
37643836346131626335646562656536356662363765383862303162353931336135353565633739
34633736363433343132656431666266393731363130313137633934383263653837306630306539
61373438346234656335613236363666313831656364656230333864383265303262633665346365
37326264393230373964356539643166363562303332633430316639646463383662316364326463
30616365623437373366353265656661626432333463353237613233393462386361353964613363
62336566386535326463343366326561346332333962343430383464633664616663616364343730
38636437393037623635636637313863653164623837313363383264613635333732363437663135
33343433623963653561303638363237643536383232313837343030353933343432313935623837
37363338666232336564373863653835323936633137663162636331386137323333633632373161
31336266656561663539313636313961636264613133653234633066376437616139383236323139
39353030303232633864383630643934333632613862623964343331356666346536393937666163
65343765626166626535666231633164303366393635366631303266633361616137666639313435
64643038663732643063633032623031653732393539373065343135393630393461396235363933
32306664663364656434353736613863326565346366386531343561373134366661336663306638
37616138396663386533323766356531666636653033313830653266303835616333343835386130
34646563333664363765373434623038663163333563623763383563383438343436653465386338
65396139323139666236643631636363363932373464303934656331653263636664386666396662
62323034343239616665343434303164323432313166633637623832623261303938613164623161
65366261353765353233323133356231653936623239333962303463346538366166316639643232
31333463313633396134623435636432346261366136356434373630626234633665386634623038
33623662623238646162656634663666633639303833653634633330366366313266373435336564
30373161316632336136373765356538336630383139666434613462303734623338643134626264
38393032363162306434626666323138363739386662633234623963313566313438613231643738
66663961323635333231363237333435313231326139643462643834323734626464363466306235
36386166356661663030666265633735356636363532366364343139323337393937303936663236
64383132353036313261376438643335633765316232396230626365623337616636363863643865
34373038376466633835646261363461336338376137356236653262326132356639363863656163
38666366616664646639626564393264643333383233383534393433363334663633313838633666
65656463643864623766333035316232656263363934303764663761643164353965623361636338
66633961373236333138336331386536646331663239613430643165636336376232633265346532
31333636323832363966633037346262393139646166353938613836316135633461373337396630
64636361303063626366653431623366663438626539353264353334303663316439623235346636
33376631613931326431316465346466343636663930356366383434326532376132326663306636
65316265343633633433323361636338373233616239313463333365363030393239396362346139
66383739393866396161323363306632313264363364393466383961316365313832346661383334
61646338633561656437343730343439383765366432323533666633653839323962326662346430
32386638323262326334653736323338356434656639613730626131646138643130613935356632
33316636373233353932383234663035623935313734383638306334376134623330336461326632
30633134386533633732383938393831303839633333326538666536663733353938356136653562
37653635373232316338303739343865636263363030353938613865393761346431646638333065
33623334616639636537353531643734366339393339653536383363616536323839396434363933
35393636333233346466386437646237316333613532336135393638396163346463393761613437
36626161626664343332363564353339373736383466663737343038313335663438316536613465
64306265613164353630363732346162363366363338343264306335306338323761373161313364
63333534316133346163323632643231353033636164363037363733376362316334366261353464
31636431323766653431333165653366363366303130336636363366646464643231303138376438
66666137363764376563313538373161373664366132316533663032353661363666356566663962
32346536336631646265343334366262316262333634373162336631613934386663363061313337
61353364383664623163663936613437393934636566373834636461383336653733396635343636
33373363646364643437356539333665653165383639396164363262623235623562303065303633
61376664613137376333613166326364616138613764666161303364643631393861663232356533
32346630373437623139333563386635383532343733356538373831336330633739333462313439
35313461623331396135393361386463366266366164313332346232653662393433663361653265
31316631623537616630343736653336356662343938363438363266383739393837623335303835
36613163373731393964356537616337333365653238393862653036353538393362663561653563
64323437353638376366326661336238333439383030626438343530633034656363396439373562
66666136383837656464323732393331316263653735383837386264323632623031653333363932
62346461646236383666613532623564396638346333356133313537643537356237326266366132
65653934616435303766646339396266363439653464643839663335663535326639363936666431
32376461323530343532343731326438396639636233326334666639313637396564326634616330
64643235323331373539636239356439376266393934383031663766616162633430303564383865
34333337346164623238623462323333666139326339393333666535303738626231383935363264
63653435653962326561363232626463616162663330313631393939376232393131323562363561
34666366303931396339303465653038353137313732326237633561643265323431366264366131
633731376131636639313335633339633934