Compare commits

17 Commits

Author SHA1 Message Date
Nathaniel Landau
edd9704258 feat: add valentain discord bot 2023-06-18 13:27:38 -04:00
Nathaniel Landau
cb4a0e9f8a build: add exclude_paths to ansible-lint 2023-05-24 14:30:01 -04:00
Nathaniel Landau
57c1a42f66 fix(nomad): bump to v1.5.6 2023-05-22 10:03:11 -04:00
Nathaniel Landau
8499f5029b fix(proxy): update traefik version and bump RAM allocation 2023-05-22 09:34:19 -04:00
Nathaniel Landau
47288456a5 build(deps): bump dependencies 2023-05-22 09:33:10 -04:00
Nathaniel Landau
0f35061a2c fix(recyclarr): move config to Nomad task template 2023-04-25 11:40:17 -04:00
Nathaniel Landau
2842e27282 build(deps): bump dependencies 2023-04-25 11:32:55 -04:00
Nathaniel Landau
d36212b7d7 style: pass ansible-lint 2023-04-25 11:32:29 -04:00
Nathaniel Landau
76f4af703e build: poetry venv in project folder 2023-03-31 10:14:14 -04:00
Nathaniel Landau
9bb7eeb439 fix: bump versions 2023-03-29 16:18:26 -04:00
Nathaniel Landau
5526024244 fix(nomad): run nomad as root user to enable docker plugin on raspberry pis
Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled. More info: https://github.com/hashicorp/nomad/pull/16063
2023-03-16 22:44:52 -04:00
Nathaniel Landau
ec52175c5c fix(nomad): authelia requires additional capabilities 2023-03-16 21:48:38 -04:00
Nathaniel Landau
be56f2a308 fix: revert to nomad v1.4.6 2023-03-13 21:29:09 -04:00
Nathaniel Landau
a757ff0cf2 build: add ignore file for anslible-lint 2023-03-13 10:25:20 -04:00
Nathaniel Landau
d6c155bef1 fix: bump versions 2023-03-12 16:52:16 -04:00
Nathaniel Landau
440d570c87 fix: bump authelia version 2023-02-12 14:41:05 -05:00
Nathaniel Landau
049267cec7 ci: add poe pb to run playbook 2023-02-12 14:40:47 -05:00
31 changed files with 2175 additions and 1783 deletions

32
.ansible-lint-ignore Normal file
View File

@@ -0,0 +1,32 @@
# This file contains ignores rule violations for ansible-lint
handlers/main.yml ignore-errors
handlers/main.yml name[casing]
main.yml name[casing]
main.yml name[missing]
tasks/backups.yml name[casing]
tasks/cluster_storage.yml name[casing]
tasks/consul.yml command-instead-of-module
tasks/consul.yml name[template]
tasks/consul.yml no-changed-when
tasks/debug.yml name[casing]
tasks/docker.yml name[casing]
tasks/docker.yml no-changed-when
tasks/interpolated_variables.yml name[casing]
tasks/logrotate.yml ignore-errors
tasks/logrotate.yml name[casing]
tasks/nomad.yml name[casing]
tasks/nomad.yml name[template]
tasks/orchestration_jobs.yml name[casing]
tasks/packages.yml ignore-errors
tasks/packages.yml name[casing]
tasks/pull_repositories.yml name[casing]
tasks/pull_repositories.yml no-changed-when
tasks/sanity.yml name[casing]
tasks/service_prometheus_nodeExporter.yml name[casing]
tasks/service_prometheus_nodeExporter.yml no-changed-when
tasks/tdarr.yml name[casing]
tasks/tdarr.yml no-changed-when
tasks/telegraf.yml name[casing]
tasks/telegraf.yml name[template]
tasks/telegraf.yml package-latest
vault.yml yaml[document-start]

View File

@@ -10,9 +10,10 @@ exclude_paths:
- galaxy-roles/
- .cz.yaml
- vault.yml
- .venv/
- ansible_collections/
skip_list:
- command-instead-of-shell
- name[template]
- ignore-errors
- meta-incorrect
@@ -21,10 +22,11 @@ skip_list:
- role-name
- unnamed-task
- var-naming
- name[casing]
- latest[git]
warn_list:
- experimental
- risky-file-permissions
- command-instead-of-module
- no-changed-when
- command-instead-of-shell

View File

@@ -1,7 +1,7 @@
---
repos:
- repo: "https://github.com/commitizen-tools/commitizen"
rev: v2.40.0
rev: 3.2.2
hooks:
- id: "commitizen"
@@ -31,7 +31,7 @@ repos:
args: [--markdown-linebreak-ext=md]
- repo: "https://github.com/adrienverge/yamllint.git"
rev: v1.29.0
rev: v1.31.0
hooks:
- id: yamllint
files: \.(yaml|yml)$

View File

@@ -1,14 +1,15 @@
---
# ---------------------------------- SOFTWARE VERSIONS
authelia_version: 4.37.3
consul_version: 1.14.2
authelia_version: 4.37.5
consul_version: 1.15.1
influxdb_version: 1.8.10
nomad_version: 1.4.3
prometheus_verssion: 1.1.2
nomad_version: 1.5.6
prometheus_verssion: 2.42.0
speedtest_cli_version: 1.2.0
tdarr_installer_version: 2.00.13
telegraf_version: 1.25.0
traefik_version: "v2.9.6"
telegraf_version: 1.25.3
traefik_version: "v2.10.1"
valentina_version: 0.3.2
# ---------------------------------- SERVICE STATIC PORT MAPPINGS
authelia_port: "9091"

View File

@@ -3,80 +3,96 @@
- name: Mount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cv
cmd: automount -cv
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount"
- name: Mount and unmount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cvu
cmd: automount -cvu
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount_unmount"
##################################### TELEGRAF
- name: (Re)Start telegraf (Debian)
become: true
ansible.builtin.service:
name: telegraf
state: restarted
name: telegraf
state: restarted
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
listen: restart_telegraf
- name: (Re)Start telegraf
ansible.builtin.shell:
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
ignore_errors: true
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Darwin'
- ansible_os_family == 'Darwin'
listen: restart_telegraf
##################################### NOMAD
- name: restart nomad (Debian)
- name: Restart nomad (Debian)
become: true
ansible.builtin.systemd:
name: nomad
enabled: true
state: restarted
name: nomad
enabled: true
state: restarted
register: nomad_service
failed_when: nomad_service.rc > 0
changed_when: nomad_service.rc == 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "unload nomad agent (MacOSX)"
- name: "Unload nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
failed_when: false
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "load the nomad agent (MacOSX)"
- name: "Load the nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl load -w {{ nomad_plist_macos }}"
cmd: "launchctl load -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "ensure nomad is really running"
- name: "Ensure nomad is really running"
ansible.builtin.shell:
cmd: "sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "set -o pipefail && sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
failed_when: node_status_response.rc > 0
changed_when: false
changed_when: node_status_response.rc == 0
when: "'nostart' not in ansible_run_tags"
listen: "restart nomad"
# - name: "Ensure sure Nomad service is really running"

134
main.yml
View File

@@ -4,76 +4,76 @@
serial: 1
vars_files:
- default_variables.yml
- vault.yml
- default_variables.yml
- vault.yml
pre_tasks:
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: Populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: Populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
tasks:
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
handlers:
- ansible.builtin.import_tasks: handlers/main.yml
- ansible.builtin.import_tasks: handlers/main.yml

369
poetry.lock generated
View File

@@ -1,53 +1,30 @@
# This file is automatically @generated by Poetry and should not be changed by hand.
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
[[package]]
name = "ansible"
version = "7.2.0"
version = "7.5.0"
description = "Radically simple IT automation"
category = "main"
optional = false
python-versions = ">=3.9"
files = [
{file = "ansible-7.2.0-py3-none-any.whl", hash = "sha256:756a6d8fec6c19ccd7a00ad0f0cb18859f4ab77e0c5063f73a115f3fb75284aa"},
{file = "ansible-7.2.0.tar.gz", hash = "sha256:60e2c1a58f1ceb34a190b7c380f7b3386d1e7369061954b1f8b8ca3df76063cc"},
{file = "ansible-7.5.0-py3-none-any.whl", hash = "sha256:a2deadeb8a199abfbd7c1960bc126697be517ac4310b2f59eb2190706e6a2637"},
{file = "ansible-7.5.0.tar.gz", hash = "sha256:4f08ca25bb29005c1afc4125e837882ad7a2c67ff0cc9d1a361b89ad09cf8c44"},
]
[package.dependencies]
ansible-core = ">=2.14.2,<2.15.0"
[[package]]
name = "ansible-compat"
version = "3.0.1"
description = "Ansible compatibility goodies"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "ansible-compat-3.0.1.tar.gz", hash = "sha256:d7dc5e4b7fade9b7375c568a24475b7be81024cac2a20caef3260ef0b51287b4"},
{file = "ansible_compat-3.0.1-py3-none-any.whl", hash = "sha256:a2a53975e6ea72221d33f82b75332a6416d00a381cb8a84db92fc29935f74bcb"},
]
[package.dependencies]
ansible-core = ">=2.12"
jsonschema = ">=4.6.0"
packaging = "*"
PyYAML = "*"
subprocess-tee = ">=0.4.1"
[package.extras]
docs = ["argparse-manpage", "black", "cairosvg", "markdown-include", "mkdocs", "mkdocs-exclude", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings", "mkdocstrings-python", "pillow", "pymdown-extensions", "slugify"]
test = ["coverage", "pip-tools", "pytest (>=7.2.0)", "pytest-mock", "pytest-plus"]
ansible-core = ">=2.14.5,<2.15.0"
[[package]]
name = "ansible-core"
version = "2.14.2"
version = "2.14.5"
description = "Radically simple IT automation"
category = "main"
optional = false
python-versions = ">=3.9"
files = [
{file = "ansible-core-2.14.2.tar.gz", hash = "sha256:47f0d4b4125b58edba6435a47f37cbe6a18da54594d18f812958bb0cb58d4e65"},
{file = "ansible_core-2.14.2-py3-none-any.whl", hash = "sha256:d1d44aab18fb5436d94ba77081bcfebece2a87015665e2444aadc16ece230886"},
{file = "ansible-core-2.14.5.tar.gz", hash = "sha256:8c4eed76ce458b4a37334a0802df29488ecf9f8af38c3111069c96b17b205530"},
{file = "ansible_core-2.14.5-py3-none-any.whl", hash = "sha256:bc1755f43bdddac574607e959010f98256ff87068ca7e23be5a9d335f6ebf01e"},
]
[package.dependencies]
@@ -59,101 +36,102 @@ resolvelib = ">=0.5.3,<0.9.0"
[[package]]
name = "ansible-lint"
version = "6.12.1"
version = "6.16.2"
description = "Checks playbooks for practices and behavior that could potentially be improved"
category = "main"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
files = [
{file = "ansible-lint-6.12.1.tar.gz", hash = "sha256:bbb1953aa563baa2587edb6efa94bf4805846ab01fb596e718c48f9a79a99a89"},
{file = "ansible_lint-6.12.1-py3-none-any.whl", hash = "sha256:9b7b3607234ce3bfc288cdd52e4316a11343a08ee2bfbd7328b9bbe5a5795c09"},
{file = "ansible-lint-6.16.2.tar.gz", hash = "sha256:93ad8a04adcda6025d4ff218a10694120fe70cb91da8ac9f85c5dc82b32456e4"},
{file = "ansible_lint-6.16.2-py3-none-any.whl", hash = "sha256:086306d5962639100d03701fce589c85549fe1c23e359abc52c91e07f0108e5b"},
]
[package.dependencies]
ansible-compat = ">=3.0.1"
ansible-core = {version = ">=2.12.0", markers = "python_version >= \"3.9\""}
ansible-core = ">=2.12.0"
black = ">=22.8.0"
filelock = ">=3.3.0"
jsonschema = ">=4.10.0"
packaging = ">=21.3"
pyyaml = ">=5.4.1"
rich = ">=12.0.0"
"ruamel.yaml" = ">=0.17.21,<0.18"
"ruamel.yaml" = ">=0.17.0,<0.18"
subprocess-tee = ">=0.4.1"
wcmatch = ">=8.1.2"
yamllint = ">=1.26.3"
yamllint = ">=1.30.0"
[package.extras]
docs = ["cairosvg", "markdown-exec (>=1.0.0)", "mkdocs (>=1.4.2)", "mkdocs-gen-files (>=0.4.0)", "mkdocs-material (>=9.0.6)", "mkdocs-material-extensions (>=1.1.1)", "mkdocstrings (>=0.20.0)", "mkdocstrings-python (>=0.8.3)", "pillow", "pipdeptree (>=2.3.3)", "pymdown-extensions (>=9.9.2)"]
lock = ["ansible-compat (==3.0.1)", "ansible-core (==2.14.1)", "attrs (==22.2.0)", "black (==22.12.0)", "bracex (==2.3.post1)", "cffi (==1.15.1)", "click (==8.1.3)", "cryptography (==38.0.4)", "filelock (==3.9.0)", "jinja2 (==3.1.2)", "jsonschema (==4.17.3)", "markdown-it-py (==2.1.0)", "markupsafe (==2.1.1)", "mdurl (==0.1.2)", "mypy-extensions (==0.4.3)", "packaging (==22.0)", "pathspec (==0.10.3)", "platformdirs (==2.6.2)", "pycparser (==2.21)", "pygments (==2.13.0)", "pyrsistent (==0.19.3)", "pyyaml (==6.0)", "resolvelib (==0.8.1)", "rich (==13.2.0)", "ruamel-yaml (==0.17.21)", "setuptools (==65.6.3)", "subprocess-tee (==0.4.1)", "tomli (==2.0.1)", "typing-extensions (==4.4.0)", "wcmatch (==8.4.1)", "yamllint (==1.28.0)"]
test = ["black", "coverage-enable-subprocess", "coverage[toml] (>=6.4.4)", "flake8", "flake8-future-annotations", "mypy", "psutil", "pylint", "pytest (>=7.2.0)", "pytest-mock", "pytest-plus (>=0.2)", "pytest-xdist (>=2.1.0)"]
docs = ["mkdocs-ansible[lock] (>=0.1.4)", "pipdeptree (>=2.4.0)"]
lock = ["ansible-core (==2.15.0)", "attrs (==23.1.0)", "black (==23.3.0)", "bracex (==2.3.post1)", "cffi (==1.15.1)", "click (==8.1.3)", "cryptography (==40.0.2)", "filelock (==3.12.0)", "importlib-resources (==5.0.7)", "jinja2 (==3.1.2)", "jsonschema (==4.17.3)", "markdown-it-py (==2.2.0)", "markupsafe (==2.1.2)", "mdurl (==0.1.2)", "mypy-extensions (==1.0.0)", "packaging (==23.1)", "pathspec (==0.11.1)", "platformdirs (==3.5.1)", "pycparser (==2.21)", "pygments (==2.15.1)", "pyrsistent (==0.19.3)", "pyyaml (==6.0)", "rich (==13.3.5)", "ruamel-yaml (==0.17.26)", "subprocess-tee (==0.4.1)", "tomli (==2.0.1)", "typing-extensions (==4.5.0)", "wcmatch (==8.4.1)", "yamllint (==1.31.0)"]
test = ["black", "coverage-enable-subprocess", "coverage[toml] (>=6.4.4)", "jmespath", "mypy", "netaddr", "psutil", "pylint", "pytest (>=7.2.2)", "pytest-mock", "pytest-plus (>=0.2)", "pytest-xdist (>=2.1.0)", "ruamel-yaml-clib", "ruamel.yaml (>=0.17.26,<0.18)", "spdx-tools (>=0.7.1)", "types-jsonschema", "types-pyyaml"]
[[package]]
name = "argcomplete"
version = "2.0.0"
version = "2.0.6"
description = "Bash tab completion for argparse"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "argcomplete-2.0.0-py2.py3-none-any.whl", hash = "sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e"},
{file = "argcomplete-2.0.0.tar.gz", hash = "sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"},
{file = "argcomplete-2.0.6-py3-none-any.whl", hash = "sha256:6c2170b3e0ab54683cb28d319b65261bde1f11388be688b68118b7d281e34c94"},
{file = "argcomplete-2.0.6.tar.gz", hash = "sha256:dc33528d96727882b576b24bc89ed038f3c6abbb6855ff9bb6be23384afff9d6"},
]
[package.extras]
test = ["coverage", "flake8", "pexpect", "wheel"]
lint = ["flake8", "mypy"]
test = ["coverage", "flake8", "mypy", "pexpect", "wheel"]
[[package]]
name = "attrs"
version = "22.2.0"
version = "23.1.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
{file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
{file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
]
[package.extras]
cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
dev = ["attrs[docs,tests]"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
tests = ["attrs[tests-no-zope]", "zope.interface"]
tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[docs,tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
[[package]]
name = "black"
version = "23.1.0"
version = "23.3.0"
description = "The uncompromising code formatter."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
{file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
{file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
{file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
{file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
{file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
{file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
{file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
{file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
{file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
{file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
{file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
{file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
{file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
{file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
{file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
{file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
{file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
]
[package.dependencies]
@@ -299,6 +277,9 @@ files = [
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.6"
@@ -313,14 +294,14 @@ files = [
[[package]]
name = "commitizen"
version = "2.40.0"
version = "2.42.1"
description = "Python commitizen client tool"
category = "main"
optional = false
python-versions = ">=3.6.2,<4.0.0"
files = [
{file = "commitizen-2.40.0-py3-none-any.whl", hash = "sha256:44b589869529c297d4ef594bb7560388d3367b3ae8af36b0664d2f51a28e8f87"},
{file = "commitizen-2.40.0.tar.gz", hash = "sha256:8f1a09589ffb87bb17df17261423e88299bd63432dbfc4e6fc6657fea23dddc0"},
{file = "commitizen-2.42.1-py3-none-any.whl", hash = "sha256:fad7d37cfae361a859b713d4ac591859d5ca03137dd52de4e1bd208f7f45d5dc"},
{file = "commitizen-2.42.1.tar.gz", hash = "sha256:eac18c7c65587061aac6829534907aeb208405b8230bfd35ec08503c228a7f17"},
]
[package.dependencies]
@@ -338,33 +319,31 @@ typing-extensions = ">=4.0.1,<5.0.0"
[[package]]
name = "cryptography"
version = "39.0.1"
version = "40.0.2"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965"},
{file = "cryptography-39.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f"},
{file = "cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106"},
{file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c"},
{file = "cryptography-39.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4"},
{file = "cryptography-39.0.1-cp36-abi3-win32.whl", hash = "sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8"},
{file = "cryptography-39.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad"},
{file = "cryptography-39.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6"},
{file = "cryptography-39.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a"},
{file = "cryptography-39.0.1.tar.gz", hash = "sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695"},
{file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"},
{file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"},
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"},
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"},
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"},
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"},
{file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"},
{file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"},
{file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"},
{file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"},
{file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"},
{file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"},
{file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"},
{file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"},
{file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"},
{file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"},
{file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"},
{file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"},
{file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"},
]
[package.dependencies]
@@ -373,10 +352,10 @@ cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
pep8test = ["black", "check-manifest", "mypy", "ruff", "types-pytz", "types-requests"]
pep8test = ["black", "check-manifest", "mypy", "ruff"]
sdist = ["setuptools-rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist", "pytz"]
test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
tox = ["tox"]
@@ -406,30 +385,30 @@ files = [
[[package]]
name = "filelock"
version = "3.9.0"
version = "3.12.0"
description = "A platform independent file lock."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"},
{file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"},
{file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"},
{file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"},
]
[package.extras]
docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
[[package]]
name = "identify"
version = "2.5.17"
version = "2.5.24"
description = "File identification library for Python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "identify-2.5.17-py2.py3-none-any.whl", hash = "sha256:7d526dd1283555aafcc91539acc061d8f6f59adb0a7bba462735b0a318bff7ed"},
{file = "identify-2.5.17.tar.gz", hash = "sha256:93cc61a861052de9d4c541a7acb7e3dcc9c11b398a2144f6e52ae5285f5f4f06"},
{file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"},
{file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"},
]
[package.extras]
@@ -475,24 +454,24 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-
[[package]]
name = "markdown-it-py"
version = "2.1.0"
version = "2.2.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"},
{file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"},
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
]
[package.dependencies]
mdurl = ">=0.1,<1.0"
[package.extras]
benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"]
code-style = ["pre-commit (==2.6)"]
compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"]
linkify = ["linkify-it-py (>=1.0,<2.0)"]
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
code-style = ["pre-commit (>=3.0,<4.0)"]
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
linkify = ["linkify-it-py (>=1,<3)"]
plugins = ["mdit-py-plugins"]
profiling = ["gprof2dot"]
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
@@ -584,14 +563,14 @@ files = [
[[package]]
name = "nodeenv"
version = "1.7.0"
version = "1.8.0"
description = "Node.js virtual environment builder"
category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
{file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
]
[package.dependencies]
@@ -599,14 +578,14 @@ setuptools = "*"
[[package]]
name = "packaging"
version = "23.0"
version = "23.1"
description = "Core utilities for Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
{file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
{file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
]
[[package]]
@@ -623,31 +602,31 @@ files = [
[[package]]
name = "pathspec"
version = "0.11.0"
version = "0.11.1"
description = "Utility library for gitignore style pattern matching of file paths."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"},
{file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"},
{file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
{file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
]
[[package]]
name = "platformdirs"
version = "2.6.2"
version = "3.5.1"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"},
{file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"},
{file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"},
{file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"},
]
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "poethepoet"
@@ -670,14 +649,14 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"]
[[package]]
name = "pre-commit"
version = "3.0.4"
version = "3.3.2"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "pre_commit-3.0.4-py2.py3-none-any.whl", hash = "sha256:9e3255edb0c9e7fe9b4f328cb3dc86069f8fdc38026f1bf521018a05eaf4d67b"},
{file = "pre_commit-3.0.4.tar.gz", hash = "sha256:bc4687478d55578c4ac37272fe96df66f73d9b5cf81be6f28627d4e712e752d5"},
{file = "pre_commit-3.3.2-py2.py3-none-any.whl", hash = "sha256:8056bc52181efadf4aac792b1f4f255dfd2fb5a350ded7335d251a68561e8cb6"},
{file = "pre_commit-3.3.2.tar.gz", hash = "sha256:66e37bec2d882de1f17f88075047ef8962581f83c234ac08da21a0c58953d1f0"},
]
[package.dependencies]
@@ -689,14 +668,14 @@ virtualenv = ">=20.10.0"
[[package]]
name = "prompt-toolkit"
version = "3.0.36"
version = "3.0.38"
description = "Library for building powerful interactive command lines in Python"
category = "main"
optional = false
python-versions = ">=3.6.2"
python-versions = ">=3.7.0"
files = [
{file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"},
{file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"},
{file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"},
{file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"},
]
[package.dependencies]
@@ -716,14 +695,14 @@ files = [
[[package]]
name = "pygments"
version = "2.14.0"
version = "2.15.1"
description = "Pygments is a syntax highlighting package written in Python."
category = "main"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
{file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
{file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
{file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
]
[package.extras]
@@ -781,6 +760,13 @@ files = [
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
@@ -847,37 +833,37 @@ test = ["commentjson", "packaging", "pytest"]
[[package]]
name = "rich"
version = "13.3.1"
version = "13.3.5"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "rich-13.3.1-py3-none-any.whl", hash = "sha256:8aa57747f3fc3e977684f0176a88e789be314a99f99b43b75d1e9cb5dc6db9e9"},
{file = "rich-13.3.1.tar.gz", hash = "sha256:125d96d20c92b946b983d0d392b84ff945461e5a06d3867e9f9e575f8697b67f"},
{file = "rich-13.3.5-py3-none-any.whl", hash = "sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704"},
{file = "rich-13.3.5.tar.gz", hash = "sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c"},
]
[package.dependencies]
markdown-it-py = ">=2.1.0,<3.0.0"
pygments = ">=2.14.0,<3.0.0"
markdown-it-py = ">=2.2.0,<3.0.0"
pygments = ">=2.13.0,<3.0.0"
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "ruamel-yaml"
version = "0.17.21"
version = "0.17.26"
description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
category = "main"
optional = false
python-versions = ">=3"
files = [
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
{file = "ruamel.yaml-0.17.26-py3-none-any.whl", hash = "sha256:25d0ee82a0a9a6f44683dcf8c282340def4074a4562f3a24f55695bb254c1693"},
{file = "ruamel.yaml-0.17.26.tar.gz", hash = "sha256:baa2d0a5aad2034826c439ce61c142c07082b76f4791d54145e131206e998059"},
]
[package.dependencies]
"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}
"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""}
[package.extras]
docs = ["ryd"]
@@ -899,6 +885,9 @@ files = [
{file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"},
@@ -928,21 +917,33 @@ files = [
[[package]]
name = "setuptools"
version = "67.2.0"
version = "67.8.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "setuptools-67.2.0-py3-none-any.whl", hash = "sha256:16ccf598aab3b506593c17378473978908a2734d7336755a8769b480906bec1c"},
{file = "setuptools-67.2.0.tar.gz", hash = "sha256:b440ee5f7e607bb8c9de15259dba2583dd41a38879a7abc1d43a71c59524da48"},
{file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"},
{file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "sh"
version = "2.0.4"
description = "Python subprocess replacement"
category = "dev"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "sh-2.0.4-py3-none-any.whl", hash = "sha256:14265a4cd1622429edcf300292ec98193530fb143fe642b3437024eca9bee8c5"},
{file = "sh-2.0.4.tar.gz", hash = "sha256:a18920f0839991bc9dfddb6dcc006c360b1064ba96257359f0ea356e9fa75a60"},
]
[[package]]
name = "subprocess-tee"
version = "0.4.1"
@@ -960,14 +961,14 @@ test = ["enrich (>=1.2.6)", "molecule (>=3.4.0)", "pytest (>=6.2.5)", "pytest-co
[[package]]
name = "termcolor"
version = "2.2.0"
version = "2.3.0"
description = "ANSI color formatting for output in terminal"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"},
{file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"},
{file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"},
{file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"},
]
[package.extras]
@@ -987,48 +988,48 @@ files = [
[[package]]
name = "tomlkit"
version = "0.11.6"
version = "0.11.8"
description = "Style preserving TOML library"
category = "main"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
{file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
{file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"},
{file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"},
]
[[package]]
name = "typing-extensions"
version = "4.4.0"
version = "4.5.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
]
[[package]]
name = "virtualenv"
version = "20.18.0"
version = "20.23.0"
description = "Virtual Python Environment builder"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.18.0-py3-none-any.whl", hash = "sha256:9d61e4ec8d2c0345dab329fb825eb05579043766a4b26a2f66b28948de68c722"},
{file = "virtualenv-20.18.0.tar.gz", hash = "sha256:f262457a4d7298a6b733b920a196bf8b46c8af15bf1fd9da7142995eff15118e"},
{file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"},
{file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"},
]
[package.dependencies]
distlib = ">=0.3.6,<1"
filelock = ">=3.4.1,<4"
platformdirs = ">=2.4,<3"
filelock = ">=3.11,<4"
platformdirs = ">=3.2,<4"
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"]
[[package]]
name = "wcmatch"
@@ -1059,22 +1060,24 @@ files = [
[[package]]
name = "yamllint"
version = "1.29.0"
version = "1.31.0"
description = "A linter for YAML files."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "yamllint-1.29.0-py3-none-any.whl", hash = "sha256:5153bf9f8205aa9dc6af6217e38bd4f5baf09d9a7c6f4ae1e23f90d9c00c49c5"},
{file = "yamllint-1.29.0.tar.gz", hash = "sha256:66a755d5fbcbb8831f1a9568676329b5bac82c37995bcc9afd048b6459f9fa48"},
{file = "yamllint-1.31.0-py3-none-any.whl", hash = "sha256:15f4bdb645e6a4a0a22fe5415bc38b4a934c51419b30104896d2f3f95e329185"},
{file = "yamllint-1.31.0.tar.gz", hash = "sha256:2d83f1d12f733e162a87e06b176149d7bb9c5bae4a9e5fce1c771d7f703f7a65"},
]
[package.dependencies]
pathspec = ">=0.5.3"
pyyaml = "*"
setuptools = "*"
[package.extras]
dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "5ef9a5432b997e5178ae778d102fda95ca3792c0f21ea9b0b78146490b7ec82f"
content-hash = "1b7a947a7e186e00af3b6884941a659ca25491237343c7daae79df656aa18a7b"

2
poetry.toml Normal file
View File

@@ -0,0 +1,2 @@
[virtualenvs]
in-project = true

View File

@@ -7,7 +7,7 @@
version = "0.2.0"
[tool.poetry.dependencies]
ansible = "^7.2.0"
ansible = "^7.5.0"
ansible-lint = { version = "^6.12.1", markers = "platform_system != 'Windows'" }
commitizen = "^2.40.0"
poethepoet = "^0.18.1"
@@ -15,10 +15,17 @@
python = "^3.9"
yamllint = "^1.29.0"
[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
sh = "^2.0.4"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 100
[tool.commitizen]
bump_message = "bump(release): v$current_version → v$new_version"
tag_format = "v$version"
@@ -27,11 +34,18 @@
version_files = ["pyproject.toml:version"]
[tool.poe.tasks]
pb = """
ansible-playbook
--vault-password-file .password_file
main.yml
-i inventory.yml
"""
[tool.poe.tasks.lint]
help = "Run linters"
[[tool.poe.tasks.lint.sequence]]
shell = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
cmd = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
[[tool.poe.tasks.lint.sequence]]
shell = "ansible-lint --force-color --config-file .ansible-lint.yml"
cmd = "ansible-lint --force-color --config-file .ansible-lint.yml"

150
scripts/update_dependencies.py Executable file
View File

@@ -0,0 +1,150 @@
#!/usr/bin/env python
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
from pathlib import Path
from textwrap import wrap
try:
import tomllib
except ModuleNotFoundError: # pragma: no cover
import tomli as tomllib # type: ignore [no-redef]
import sh
from rich.console import Console
console = Console()
def dryrun(msg: str) -> None:
"""Print a message if the dry run flag is set.
Args:
msg: Message to print
"""
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
def success(msg: str) -> None:
"""Print a success message without using logging.
Args:
msg: Message to print
"""
console.print(f"[green]SUCCESS | {msg}[/green]")
def warning(msg: str) -> None:
"""Print a warning message without using logging.
Args:
msg: Message to print
"""
console.print(f"[yellow]WARNING | {msg}[/yellow]")
def error(msg: str) -> None:
"""Print an error message without using logging.
Args:
msg: Message to print
"""
console.print(f"[red]ERROR | {msg}[/red]")
def notice(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"[bold]NOTICE | {msg}[/bold]")
def info(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"INFO | {msg}")
def usage(msg: str, width: int = 80) -> None:
"""Print a usage message without using logging.
Args:
msg: Message to print
width (optional): Width of the message
"""
for _n, line in enumerate(wrap(msg, width=width)):
if _n == 0:
console.print(f"[dim]USAGE | {line}")
else:
console.print(f"[dim] | {line}")
def debug(msg: str) -> None:
"""Print a debug message without using logging.
Args:
msg: Message to print
"""
console.print(f"[blue]DEBUG | {msg}[/blue]")
def dim(msg: str) -> None:
"""Print a message in dimmed color.
Args:
msg: Message to print
"""
console.print(f"[dim]{msg}[/dim]")
# Load the pyproject.toml file
pyproject = Path(__file__).parents[1] / "pyproject.toml"
if not pyproject.exists():
console.print("pyproject.toml file not found")
raise SystemExit(1)
with pyproject.open("rb") as f:
try:
data = tomllib.load(f)
except tomllib.TOMLDecodeError as e:
raise SystemExit(1) from e
# Get the latest versions of all dependencies
info("Getting latest versions of dependencies...")
packages: dict = {}
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
package, current, latest = line.split()[:3]
packages[package] = {"current_version": current, "new_version": latest}
if not packages:
success("All dependencies are up to date")
raise SystemExit(0)
dependencies = data["tool"]["poetry"]["dependencies"]
groups = data["tool"]["poetry"]["group"]
for p in dependencies:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", _fg=True)
for group in groups:
for p in groups[group]["dependencies"]:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
sh.poetry("update", _fg=True)
success("All dependencies are up to date")
raise SystemExit(0)

View File

@@ -6,42 +6,42 @@
# 1. Copies a backup and restore shellscript to /usr/local/bin
# 2. Edits the sudoers file to allow the script to be invoked with sudo privileges
- name: copy backup shellscript to server
- name: Copy backup shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: copy restore shellscript to server
- name: Copy restore shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: ensure nomad user can run sudo with the restore script
- name: Ensure nomad user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- name: ensure my user can run sudo with the restore script
- name: Ensure my user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"

View File

@@ -6,159 +6,159 @@
- name: "Mount storage on Raspberry Pis"
when: "'pis' in group_names"
block:
- name: ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: Ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: Remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
- name: Mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
# --------------------------------- Mount on Macs
# https://gist.github.com/l422y/8697518
- name: "Mount storage on Macs"
when: "'macs' in group_names"
block:
- name: create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
- name: Create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
- name: add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: Add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: Add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: Remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: Add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: Add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: Add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: Add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount

View File

@@ -4,356 +4,362 @@
- name: Set variables needed to install consul
block:
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: "Stop Consul"
block:
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
register: consul_unload
failed_when: consul_unload.rc != 0
changed_when: consul_unload.rc == 0
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Create 'consul' user and group"
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
block:
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Create Consul /opt storage and copy certificates"
block:
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Template out Consul configuration file"
block:
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Install Consul binary"
block:
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: "Validate consul config"
ansible.builtin.command:
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
register: consul_config_valid
changed_when: false
failed_when: consul_config_valid.rc != 0
when:
- inventory_hostname != 'synology'
- inventory_hostname != 'synology'
- name: "Copy system.d or launchctl service files"
block:
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: "Start Consul"
block:
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
register: consul_loaded
changed_when: consul_loaded.rc == 0
failed_when: consul_loaded.rc > 0
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: "Copy Consul service checks to synology"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
block:
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200

View File

@@ -33,5 +33,5 @@
# when:
# - ansible_facts['system_vendor'] is search("Synology")
- name: "end play"
- name: "End play"
ansible.builtin.meta: end_play

View File

@@ -4,85 +4,91 @@
- name: Check if Docker is already present
ansible.builtin.command:
cmd: docker --version
cmd: docker --version
register: docker_command_result
changed_when: docker_command_result.rc == 1
failed_when: false
- name: install docker on Debian
- name: Install docker on Debian
when: ansible_os_family == 'Debian'
block:
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
register: docker_install
failed_when: docker_install.rc > 0
changed_when: docker_install.rc == 0
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: install docker on macOS
- name: Install docker on macOS
when: "'macs' in group_names"
block:
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: Install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
when: docker_command_result.rc == 1
- name: Open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
register: docker_open_app
failed_when: docker_open_app.rc > 0
changed_when: docker_open_app.rc == 0
when: docker_command_result.rc == 1
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
- name: end play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1
- name: End play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1

View File

@@ -8,46 +8,46 @@
- name: "Set local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "Set NFS mount location (pis)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set NFS mount location location (macs)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "set consul configuration directory (synology)"
- name: "Set consul configuration directory (synology)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
- name: "set consul configuration directory (pis)"
- name: "Set consul configuration directory (pis)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "set consul configuration directory (macs)"
- name: "Set consul configuration directory (macs)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
when:
- "'macs' in group_names"
- "'macs' in group_names"

View File

@@ -4,29 +4,29 @@
#
# NOTE: This task exists due to the arillso.logrotate failing completely on macOS
- name: add service_backups.log to logrotate
- name: Add service_backups.log to logrotate
become: true
vars:
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
ansible.builtin.import_role:
name: arillso.logrotate
name: arillso.logrotate
failed_when: false
ignore_errors: true
when:
- "'macs' not in group_names"
- is_cluster_leader
- "'macs' not in group_names"
- is_cluster_leader

View File

@@ -4,243 +4,243 @@
- name: "Set variables needed to install Nomad"
block:
- name: "set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: "Set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: "Create Nomad user and group (Debian)"
when: ansible_os_family == 'Debian'
block:
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Create Nomad /opt storage"
block:
- name: "create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: "Create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: "set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Template out the configuration file"
block:
- name: "create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: Copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: "set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: Install or Update Nomad
block:
- name: "set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: "Set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: Install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: "Copy system.d or launchctrl service files"
block:
- name: ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: Create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: Create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: "start nomad, if stopped"
- name: "Start nomad, if stopped"
ansible.builtin.shell:
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
ignore_errors: true
failed_when: false

View File

@@ -10,67 +10,67 @@
- name: "Sync Nomad Jobs"
block:
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: "Sync docker compose files"
- name: Sync docker compose files
when: is_docker_compose_client
block:
- name: confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: Confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"
- name: Synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"

View File

@@ -4,64 +4,64 @@
- name: "Update and install APT packages"
when:
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
block:
- name: update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: Update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: "upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "Upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Update and install Homebrew packages"
when:
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
block:
- name: upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: Upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: Install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: Homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: Unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true
- name: Install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true

View File

@@ -5,36 +5,37 @@
- name: "Check if pull_all_repos exists"
ansible.builtin.stat:
path: "~/bin/pull_all_repos"
path: "~/bin/pull_all_repos"
check_mode: false
register: pull_script_check
- name: "Check if ~/repos exists"
ansible.builtin.stat:
path: "~/repos"
path: "~/repos"
check_mode: false
register: repos_directory_check
- name: "run pull_all_repos script"
- name: "Run pull_all_repos script"
ansible.builtin.command:
cmd: "~/bin/pull_all_repos --directory ~/repos"
cmd: "~/bin/pull_all_repos --directory ~/repos"
register: pull_script_output
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
failed_when: pull_script_output.rc > 1
changed_when: pull_script_output.rc == 0
- name: "Output from pull_all_repos"
ansible.builtin.debug:
msg: "{{ pull_script_output.stdout }}"
msg: "{{ pull_script_output.stdout }}"
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable

View File

@@ -1,12 +1,12 @@
---
# TASK DESCRIPTION:
# Always runs fist. Confirms we can actually use Ansible
- name: sanity - user mode
- name: Sanity - user mode
become: false
ansible.builtin.debug:
msg: "sanity check: user mode"
msg: "Sanity check: user mode"
- name: sanity - become mode
- name: Sanity - become mode
become: true
ansible.builtin.debug:
msg: "sanity check: become mode"
msg: "Sanity check: become mode"

View File

@@ -4,90 +4,92 @@
#
# NOTE: This is depreciated, I no longer use Prometheus and have migrated to Telegraf
- name: populate service facts
- name: Populate service facts
ansible.builtin.service_facts:
- name: stop node_exporter
- name: Stop node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
state: stopped
name: node_exporter
state: stopped
when: ansible_facts.services["node_exporter.service"] is defined
- name: Ensure group "prometheus" exists
become: true
ansible.builtin.group:
name: prometheus
state: present
name: prometheus
state: present
- name: Add the user 'prometheus' with group 'prometheus'
become: true
ansible.builtin.user:
name: prometheus
group: prometheus
groups: docker
append: true
name: prometheus
group: prometheus
groups: docker
append: true
# --------------- Install or Update Prometheus
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: false
need_prometheus_install: false
- name: Check if node_exporter is installed
ansible.builtin.stat:
path: /usr/local/bin/node_exporter
path: /usr/local/bin/node_exporter
register: prometheus_binary_file_location
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- not prometheus_binary_file_location.stat.exists
- not prometheus_binary_file_location.stat.exists
- name: Check current version of Prometheus
ansible.builtin.shell: /usr/local/bin/node_exporter --version 3>&1 1>&2 2>&3 | head -n1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_prometheus_version
failed_when: false
changed_when: false
check_mode: false
when:
- need_prometheus_install is false
- need_prometheus_install is false
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- name: install node_exporter
- name: Install node_exporter
become: true
ansible.builtin.unarchive:
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
when:
- need_prometheus_install is true
- need_prometheus_install is true
- name: create node_exporter service
- name: Create node_exporter service
become: true
ansible.builtin.template:
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
- name: start node_exporter
- name: Start node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
daemon_reload: true
enabled: true
state: started
name: node_exporter
daemon_reload: true
enabled: true
state: started
when:
- "'nostart' not in ansible_run_tags"
- "'nostart' not in ansible_run_tags"

View File

@@ -4,186 +4,187 @@
- name: "Set variables"
block:
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "Set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "Set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: "Install ffmpeg and HandbrakeCLI"
block:
- name: "ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "ensure tdarr directory exists"
- name: "Ensure tdarr directory exists"
become: true
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
- name: "Install tdarr"
block:
- name: "set_fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "Set fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
changed_when: tdarr_install.rc == 0
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: "configure tdarr"
- name: "Configure tdarr"
block:
- name: update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: Update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: Update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: Check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: mount shared storage
- name: Mount shared storage
ansible.builtin.import_tasks: cluster_storage.yml

View File

@@ -5,146 +5,146 @@
# --------------------------------- Set variables depending on system type
- name: "Configure variables"
block:
- name: "set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Install/upgrade Telegraf"
block:
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: Install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
# - name: give telegraf access to docker
# become: true
@@ -162,115 +162,115 @@
- name: "Install speedtest"
when: "'pis' in group_names"
block:
- name: "set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: "Set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Configure Telegraf"
block:
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: Template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')

View File

@@ -206,9 +206,9 @@ plugin "raw_exec" {
plugin "docker" {
config {
allow_caps = [ "ALL" ]
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name"]
volumes {
enabled = true
}

View File

@@ -7,9 +7,16 @@ ConditionFileNotEmpty={{ nomad_configuration_dir }}/nomad.hcl
[Service]
{# {% if 'linode' in group_names %} #}
User=nomad
Group=nomad
{# User=nomad #}
{# Group=nomad #}
{# {% endif %} #}
{# NOTE: Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled.
https://github.com/hashicorp/nomad/pull/16063
#}
User=root
Group=root
ExecReload=/bin/kill -HUP $MAINPID
ExecStart=/usr/local/bin/nomad agent -config {{ nomad_configuration_dir }}
KillMode=process

View File

@@ -32,104 +32,177 @@ job "recyclarr" {
task "recyclarr" {
env {
TZ = "America/New_York"
TZ = "America/New_York"
RECYCLARR_APP_DATA = "/local"
}
// user = "${meta.PUID}:${meta.PGID}"
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:2"
image = "ghcr.io/recyclarr/recyclarr:4"
hostname = "${NOMAD_TASK_NAME}"
init = true
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
} // docker config
// template {
// destination = "local/recyclarr.yml"
// env = false
// change_mode = "restart"
// perms = "644"
// data = <<-EOH
// ---
// # yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
template {
destination = "local/recyclarr.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
# yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
// # A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
// # values below as needed for your instance. You will be required to update the API Key and URL for
// # each instance you want to use.
// #
// # Many optional settings have been omitted to keep this template simple.
// #
// # For more details on the configuration, see the Configuration Reference on the wiki here:
// # https://github.com/recyclarr/recyclarr/wiki/Configuration-Reference
# A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
# values below as needed for your instance. You will be required to update the API Key and URL for
# each instance you want to use.
#
# Many optional settings have been omitted to keep this template simple. Note that there's no "one
# size fits all" configuration. Please refer to the guide to understand how to build the appropriate
# configuration based on your hardware setup and capabilities.
#
# For any lines that mention uncommenting YAML, you simply need to remove the leading hash (`#`).
# The YAML comments will already be at the appropriate indentation.
#
# For more details on the configuration, see the Configuration Reference on the wiki here:
# https://recyclarr.dev/wiki/reference/config-reference
// # Configuration specific to Sonarr
// sonarr:
// # Set the URL/API Key to your actual instance
# Configuration specific to Sonarr
sonarr:
series:
base_url: https://sonarr.{{ homelab_domain_name }}/
api_key: a6cedb325b5645eeb08acb06a42b7746
delete_old_custom_formats: true
// {% raw -%}
// - base_url: http://{{ range service "sonarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Quality definitions from the guide to sync to Sonarr. Choices: series, anime
quality_definition:
type: series
// # Quality definitions from the guide to sync to Sonarr. Choice: anime, series, hybrid
// quality_definition: series
# Release profiles from the guide to sync to Sonarr v3 (Sonarr v4 does not use this!)
# Use `recyclarr list release-profiles` for values you can put here.
# https://trash-guides.info/Sonarr/Sonarr-Release-Profile-RegEx/
release_profiles:
- trash_ids:
- EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
- 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
- 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
strict_negative_scores: false
// # Release profiles from the guide to sync to Sonarr.
// # You can optionally add tags and make negative scores strictly ignored
// release_profiles:
// # Series
// - trash_ids:
// - EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
// - 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
// - 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
// # Anime (Uncomment below if you want it)
// # - trash_ids:
// # - d428eda85af1df8904b4bbe4fc2f537c # Anime - First release profile
// # - 6cd9e10bb5bb4c63d2d7cd3279924c7b # Anime - Second release profile
- trash_ids:
- 76e060895c5b8a765c310933da0a5357 # Optionals
filter:
include:
- cec8880b847dd5d31d29167ee0112b57 # Golden rule
- 436f5a7d08fbf02ba25cb5e5dfe98e55 # Ignore Dolby Vision without HDR10 fallback.
# - f3f0f3691c6a1988d4a02963e69d11f2 # Ignore The Group -SCENE
# - 5bc23c3a055a1a5d8bbe4fb49d80e0cb # Ignore so called scene releases
- 538bad00ee6f8aced8e0db5218b8484c # Ignore Bad Dual Audio Groups
- 4861d8238f9234606df6721df6e27deb # Ignore AV1
- bc7a6383cbe88c3ee2d6396e1aacc0b3 # Prefer HDR
- 6f2aefa61342a63387f2a90489e90790 # Dislike retags: rartv, rarbg, eztv, TGx
- 19cd5ecc0a24bf493a75e80a51974cdd # Dislike retagged groups
- 6a7b462c6caee4a991a9d8aa38ce2405 # Dislike release ending: en
- 236a3626a07cacf5692c73cc947bc280 # Dislike release containing: 1-
# - fa47da3377076d82d07c4e95b3f13d07 # Prefer Dolby Vision
// # Configuration specific to Radarr.
// radarr:
// # Set the URL/API Key to your actual instance
// {% raw -%}
// - base_url: http://{{ range service "radarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Configuration specific to Radarr.
radarr:
movies:
# Set the URL/API Key to your actual instance
base_url: https://radarr.{{ homelab_domain_name }}/
api_key: 53060417cccf4978bf7384c7869616f1
delete_old_custom_formats: true
// # Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
// quality_definition:
// type: movie
# Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
quality_definition:
type: movie
preferred_ratio: 0.5
// # Set to 'true' to automatically remove custom formats from Radarr when they are removed from
// # the guide or your configuration. This will NEVER delete custom formats you manually created!
// delete_old_custom_formats: false
custom_formats:
# Use `recyclarr list custom-formats radarr` for values you can put here.
# https://trash-guides.info/Radarr/Radarr-collection-of-custom-formats/
// custom_formats:
// # A list of custom formats to sync to Radarr. Must match the "trash_id" in the guide JSON.
// - trash_ids:
// - ed38b889b31be83fda192888e2286d83 # BR-DISK
// - 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
// - 90a6f9a284dff5103f6346090e6280c8 # LQ
// - dc98083864ea246d05a42df0d05f81cc # x265 (720/1080p)
// - b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
- trash_ids:
# Movie versions
- eca37840c13c6ef2dd0262b141a5482f # 4K Remaster
- 570bc9ebecd92723d2d21500f4be314c # Remaster
- 0f12c086e289cf966fa5948eac571f44 # Hybrid
- 9d27d9d2181838f76dee150882bdc58c # Masters of Cinema
- e0c07d59beb37348e975a930d5e50319 # Criterion Collection
- 957d0f44b592285f26449575e8b1167e # Special Edition
- eecf3a857724171f968a66cb5719e152 # IMAX
- 9f6cbff8cfe4ebbc1bde14c7b7bec0de # IMAX Enhanced
# Unwanted
- b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
- ed38b889b31be83fda192888e2286d83 # BR-DISK
- 90a6f9a284dff5103f6346090e6280c8 # LQ
- bfd8eb01832d646a0a89c4deb46f8564 # Upscaled
- 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
- 923b6abef9b17f937fab56cfcf89e1f1 # DV (WEBDL)
- b6832f586342ef70d9c128d40c07b872 # Bad Dual Groups
- ae9b7c9ebde1f3bd336a8cbd1ec4c5e5 # No-RlsGroup
- 7357cf5161efbf8c4d5d0c30b4815ee2 # Obfuscated
- 5c44f52a8714fdd79bb4d98e2673be1f # Retags
- c465ccc73923871b3eb1802042331306 # Line/Mic Dubbed
# Misc
- e7718d7a3ce595f289bfee26adc178f5 # Repack/Proper
- ae43b294509409a6a13919dedd4764c4 # Repack2
# HQ Release Groups
- ed27ebfef2f323e964fb1f61391bcb35 # HD Bluray Tier 01
- c20c8647f2746a1f4c4262b0fbbeeeae # HD Bluray Tier 02
- c20f169ef63c5f40c2def54abaf4438e # WEB Tier 01
- 403816d65392c79236dcb6dd591aeda4 # WEB Tier 02
- af94e0fe497124d1f9ce732069ec8c3b # WEB Tier 03
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores: true
- name: "720p/1080p Remux"
reset_unmatched_scores: true
// # Uncomment the below properties to specify one or more quality profiles that should be
// # updated with scores from the guide for each custom format. Without this, custom formats
// # are synced to Radarr but no scores are set in any quality profiles.
// # quality_profiles:
// # - name: Quality Profile 1
// # - name: Quality Profile 2
// # #score: -9999 # Optional score to assign to all CFs. Overrides scores in the guide.
// # #reset_unmatched_scores: true # Optionally set other scores to 0 if they are not listed in 'names' above.
// EOH
// }
# HDR FORMATS
# ########################
- trash_ids:
- 3a3ff47579026e76d6504ebea39390de # Remux Tier 01
- 9f98181fe5a3fbeb0cc29340da2a468a # Remux Tier 02
- e61e28db95d22bedcadf030b8f156d96 # HDR
- 2a4d9069cc1fe3242ff9bdaebed239bb # HDR (undefined)
quality_profiles:
- name: "720p/1080p"
score: -100
- name: "720p/1080p Remux"
# AUDIO FORMATS
# ########################
- trash_ids:
- 6fd7b090c3f7317502ab3b63cc7f51e3 # 6.1 Surround
- e77382bcfeba57cb83744c9c5449b401 # 7.1 Surround
- f2aacebe2c932337fe352fa6e42c1611 # 9.1 Surround
quality_profiles:
- name: "720p/1080p"
score: -50
- name: "720p/1080p Remux"
score: -50
- trash_ids:
- 89dac1be53d5268a7e10a19d3c896826 # 2.0 Stereo
quality_profiles:
- name: "720p/1080p"
score: 120
- trash_ids:
- 77ff61788dfe1097194fd8743d7b4524 # 5.1 Surround
quality_profiles:
- name: "720p/1080p"
score: 80
- name: "720p/1080p Remux"
score: 80
EOH
}
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
resources {
cpu = 100 # MHz
memory = 300 # MB
} // resources
} // task

View File

@@ -221,7 +221,7 @@ job "reverse-proxy" {
resources {
cpu = 200 # MHz
memory = 110 # MB
memory = 1000 # MB
}
} // task authelia

View File

@@ -0,0 +1,64 @@
job "valentina" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "valentina" {
count = 1
restart {
attempts = 0
delay = "30s"
}
task "valentina" {
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "INFO"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{valentina_version}}"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // job

246
vault.yml
View File

@@ -1,118 +1,130 @@
$ANSIBLE_VAULT;1.1;AES256
35366634393265303030366466303232616338633038313738633637383439356439616536666230
6566643530623337323034306366613935663334313934310a636235653531316237393231376362
33663935366131663137363465666363336630386362313333633762656461636239366234633832
3538353463356335360a323030643238323034343666376230356465396639636563316532373638
35366637366663303164376661636563346330313932343462326239626264633262303739383831
31333134613534643265643433323065303833326662346466633931373337326233303633363032
66333336373865313333626566386665653833343638376264356430383764316134333231366466
62336534666565343839393237356139393738396333393337666631303461373362343664396665
63343161613462653866616566363631346566636639626138316539353362616261666337386635
62356262363564376334336163613035643336656331653562306433363161393435343431663137
66663936623834666364303333386335353961373031383164623766323836383462363231396263
34343662336637633262333530623039376534643966653839346236363166646564613333633366
33363534616466393137366234633030663036613263383733313235353364613864316139356330
38343439346661613136316235326430326437646135636637343665663031393262653661396331
39653739666364666564633364636231323237366265323631333234306631386362666135336461
35646564643631666663336237636435626338346663633038353964303764626236373561323763
34643565656462323764623263383037663735323364396437653332376137356263633963306332
39633339366236313063643665356366346138616434316332643731666634366336623064653361
63393134643630313632396434643131646464323737343133613364333465393834656236616134
30313961346236326563616263373463616432393962663262616232356663636439643731383930
32326664306563623665633164373932356163356361663465363362303661396662386630323137
32383333656435613762393430303163383135393037363763333139633239666639303538623134
32386635663962363939373365613138316435366433303863326561613463306338396136393965
34333961383035383135333561313331336565383031356133626530306163333666333564353262
38646434643234303363383965636339323633326330663736393461383461303661353365663631
33343831356135653139633463336330646634363639326635653863343632663466336639313962
65306438613933386664336138613066326364343738633531356664343664646464396162343861
36663030643762343938633564373531663430303536643665613532313630636461646235666335
62613634656232373936363439363766316561373937386261613861396566303834376134666564
35396330636166316239336433323939363839636361643630353263663233303166313863636364
34363134363161643234643134663361373237316466626363646264643530343064666464393166
66366561356336616663393064376162643731343532663436646432366331643066396232393432
63336633313963383132333639626130623737346137646561303338623136306361656630396364
36306234643161643864313334316634396233313831613830393865353763653963656632363865
34346439356166363839343063313263396437366163343734326162346166353465313163313236
35343531333438303561393137323831303063353466666463303835653630353630393836393236
32643035636335363137303134333735343964646130306339663137646261366635353632613533
35303636373465633831353439376464386132616238613336366134383037376165396365353436
66633937656162346661326136343266313937393436353532656634366535653762633930393239
62383862356165336435616666346238646666613066323262323530356534373262633861646466
66643935363334623264373338663362623439313138666338363732386666383739636162653763
32666439316632653633363266343365393366373834323065353335613563306135383432613433
61633835326565386662313265356536613237313364366163313562393836613061616432316638
33376531663533376435383437393539663565616439666438646232663732663063343666646631
36366364353339323262666630363932616461323833306666616365343530646536326363613232
65313031333064396662363736316137656161393865383135366539636432386539623837353634
62313638666564396462666334616365323932396236633932613362633166346265613161363863
39396464663966353565393662363633366237323066306436616437363666666635343265666435
32363162643761666639336464383430366565323862353161303338333232326335303462653938
61333162306132633637653736623033373164343463333933666438326534303730613862383035
39393939323561333738653465306165643461366534313537633162313638393630393361623432
34306264316565333334303633323836343162373738636161656565313134643262343533666434
37623962626263353062333939633662316663316238636331646230313861363364326636653365
37343761626437663832346266333634666361333361313638626639633934646335613062626365
31656132643034303032623365613530306436383437633761636238336139373739313836393336
66653066633962333730643034653032626530663731633462393937326236656362356236666333
62343139646139303433393163613037623963633230366236396434643163316664616435386436
61616237366130663662643162613730303033376334333066393432333032613830316262333763
31663936663239653361633634323736306363323864666635633465376363323838326366663630
39383463343038666564653663616161336266313563633731623335373732343732383164623431
39333262346539373937386531386466373863323232653265383064643863303638326566313765
30376565643462643439316431306331346438633331616437613762323138363061336630353661
61653139333962373261323063386231346266323762306433613363643230366265366239623832
38353562393064633537373761643539313234333136333530343536393033313131393932633637
63383066623930376561656436396564353264643630636332653862613933636630333633396130
33346638663033636436626631323330336430313738313465323737386434613538346564633938
62653236643033313062336664323335656132383430313831636334326430383938653762313835
65656430656363653735343738326534616335636130646539363066393436383961316135346262
30656131353733616562613239663965383864313263653063393635623838633538303433323437
61636162363663306166343464333534316131346231303663336365303363656635363066353131
38623263366136396466383538323637626236633163663033613934303766613931313135613132
39346237656635303166363031353866663833303537666330346130353563373763623530373464
65666338313039633732626564393161663335613264306637646332643133356135613366353264
62376466613037326463623131373937303039356138336337333163303636303335333736376563
63313730623664356165653861303139313039616231326136313634623136613365313466373561
35313466313539383838373838386131653638653430613863313430626139353465626636386539
37383034326561393666383566326364623337376432633864613630353662663665336232313064
31396531313937373336383438646234343661643534316332633163653532633565613136343235
31653739633035303430626364303961626433323835653638653839396231663662636666393563
30346162326561636365393831333435333362356432646161633463313963346537643631393736
66343966313361333934616534313037636238613830623938623563393230376635316631636534
62376136386137313737646135376463343831663162616566373764643930386539306231613964
65383633313037323234616364623139623834303466306666613334346131633531643932623362
31623337636434623531376437623033306235346366376436396336346634303138613262373464
30363735633663363364613139666164383436306666363362346633346663346366393634333335
61326234303734626465616530346339303536636665626436623237383434636362393034346562
64633765363531346138376166393030666433396339333662663036313031306434626236383664
63636633316138653433366337303033636330333761626162373435633062366639396362376537
30313436353964613838323332353137383433323265343831356334393238666438323735313630
66313534646636633866313533353533643531356266643433353137653130386165353936616438
31353331383461313130383035663837646439323366623935386236663262653165313432326639
65316339356661623436386537353335343332616362323463613966383736306638396630653437
62376232353763336365613438383936646265623261306338613663343864363839313663343030
37396537626435303036613531396239353439663930363263373632333536376364336436383961
31613237393430326663366531633633613362373265646437303530656564383830366164643465
31333162373037323836396234383265333832376461383530383139353562666635386661383262
31323162363834376432313766393965373763313664383966346464313865343261333030653033
37326564663836323963663735353432653938373632356564653830616562656333383563366432
32366464316265613565613830633264613634313134373530386562313163656434356164356139
62643065613638323735366332316366383236653762373436393631363039636333346431666137
33643239323062343537353061646138346661643262303363326137356461356439663166653739
39636534653935376433633761373630656566393535373962353762646165663566613235646134
32313838383538363532643965376464626361663431393165663238373762636337366434666437
32636234613264666633366663616639386236386333623766383735383431323964343965643362
39326266366162333266343133646335653837393962633731613230653665366462393931613462
33633966626132633832653634626633393238643238393064646233663064346333653164623336
34373634376335383639346338663830653061386161306134336530376161333637333733666533
63626464393435626361323333656639616431333638383163626662323733613564613430323532
31343262616133333965633462366636333762623764326231346437666634663339393563666664
65653536333834643937326464333464353135363738663031303162396535616139663535336535
65343062646465373831303235303933343030346562633561653534313263333033313531656430
64623833653832323134333138663966313939303739376131383133366131323530353961633765
63386436373262313334323932646232616435646665323736356433376332653530326230346331
63613163343365623937336564643431653963383333363664663934633962316663306537376236
62333531353833326232613565666563613864653364363333613737663965366133383231623839
65323161613533343130316635636630633931633936666662303330326262376233376562633865
3930
30663632373933656431386165366239613265616163656263356462626365636661333339646462
3063316332666465626234393330323737393635343564310a303036653366653333636663643765
32666661356637353863633934646536333037393735313139303433336639343466643538646236
6466633565363633620a343131356337336366636433666230306237633963616266363833343636
37623938303161323963356338313166643566636166333639303163653464353266306465336231
33373736663765663734326234653931366237336666336461363734363065356664653763363437
63383266376136653033373263393261353836643932353737626136356135373264616532323434
64386533326164346662636630373863336530396236613736626239656238363738643839613934
33393930353166643961393665326262313463656137343537393363336538313935306364616136
32643961633436303262326239393063373162343562383436336639383066343431383639366363
64363736393535336531326531353939393435356535643163356536356364396464633565616366
36303231613839316538383637386136356638663334316530366534343533393462396361376132
33366434306563316263643864656332303361346632613336313135393937346338373730336664
39623239616234373530373739336465616462653663303331616232616632316161646434366664
62653165323163323533653939353565613337653136646232333462613562326335363337656666
30396335323030336132373564643233313631653735393236383830353130393163376165313963
38383933383034303739363461306464626461613661666466346534326135376534366636303738
38613936633232343366623433376230313633616465636537393030306461613734633437663962
66636264306139323763323436636661306339653063333363386438313365656537306663376630
66346264353532366135616131613164363438663335386565316336303036316363313830366162
64326563613038616238626463356136396230633639316135323466393939653237373837636432
63363430656563373163666134633031636336626530636633663831306363336239383139653366
61653138653563633532616164663735663564353038623661393136353164373330323565633766
62646164353633343863643438633863666464643534643936643337393865306265646364653038
66386362656436313935336432643833656436373130613031353434633766376132366637616334
31633761396264366361346335346130313435636265666339346436633831643932336665396366
63653963353839383736306330376262386333643366353836623235353538353066656535663464
63623365346133646339656433353332656632363737336336626462386362386164383266316463
63366431613133313232666335666430643462363064316133373630333266626666623631326163
64646264323264393164323732353035646261633033633335643631343964643134353032653062
39303163626564326562626630376132666563316534623662366165333466396662616535653339
35623962333434303762333861613838643763636136383030303966353162373239323634313064
31306565613164346364653830373134343635316639633434656163313763646165396664356430
37306633633432326233316338306237343864623335623039336533633865356662663231653732
30616461303133623732393463313334373263613737646431653036653464316530643431353931
31626131376638383265636632616134666561653063623035623034633737356465343065643039
33623338646261396366356638316533306436633365336231386532663136363862336234303036
30326335646465366266326230363731616263643530326234623832356233303965373536626430
30646232653830633536353861626230326134353735333038313337323366376634313763313139
32316466393330376462663539323464366636316637643035333535656632303364323531303137
38393962353761623638306539653238306536386337303239613539393530646332313038343962
34613232616632326138343166613635343336346538633465323364323332313833636533643531
63633763373533376566633331383663626664386263656631333137613236303964353830613831
63363534633336376336333566343765316235343962663664626461656134323365646566643930
31336630666436353833633838643335393863623539323966313162323766356136646263646233
32353862613136643061316434333133303363316337356637623535343939386439306664306162
61626136633565383066636536373231343833343566306336303865363937646339633961313639
33653330313331373361363936373861313531613762636166623539303434303765656338313839
39376134656332323730633631373639363036626261653832366234326561386264386533336564
34313838313836653639316237663733396135623438313236306166363435333266613431653966
31663964663030316633373631626139333663646234633464313536373030313735373236613762
34373232303530313765623263356232346538646232303030333665633038666438333032636231
37653031666663393562333833663763356434353962346438346130633064393466613736343063
39336433616665323236366335363065396362313433396633396238656232313936373737333265
61663263303564643262353337393865313561306461353832303264336635613662383265316233
32633232353365386362303764313236653862633335333831636236363336636235376635643639
63373732633033626263366465393135306538643863343534386665663365336461663862633437
61383263356161383434316465666365333932323064616164376633363332353433623635646230
36336335353365323863636238346264343765333034333332383130313462356432616531346163
63663665366563663462616464653332636538336565363534303363343633333065393363316530
62363761343230656239303564373733383332613537383633306230313230393732316462383662
36663439316339356139383664336433333636323233643765353934343163336530333338346562
32383666333536656334663936653635623462303232636365363939393665313531636163316362
32613738663730336565333063363436666635323834633266656264326166656365666161616565
34383461616433656437613738633862303935363434393532643062393639383535303835346235
33393031653133353662313739386638626332373732653531313239636132643162343035646336
33396236656435333032656134613233383237313138306464653137626333346631303136343765
66313263353633306432386463316162636363333561623762653763373233343739663836633831
65663031346238346539666437653436393866646361383061383934653439653432323263613866
31356137303137306530633835663334333934333764343533346666663934313439303765306230
64396635383835393562363732366665383965663035666266313334326333653661643638316266
65623162393764623365623632633637366263346466336262353932656430313539616535626661
63366165353263353135666532386165366432343237353963343666333333643539323065663332
33613166633533663331343935313839303738653331353532633334353239363133303266613432
65646239366361643739643934633365326331313339666566393365363935666465333030616432
66363034653131656462376461306132616137633330333565313839396265666635316439343762
38646231336634356432316663376363323734366562643761396161386532306161343734343632
38663136393637626235663937373734363230373763356337636463623333386435323037396432
61643031383365356631616532316332316139396135356535353431336330336438373761376534
33333837343562383564303462616133653836323463343836613531616434316130343137373238
33313238633466383866616631346561396332356335396235393937376463353832643935313830
66386130613661373337623935336266643834336234313833626636346265323264363737633530
39613664373233333935656134363265616332613131303363363662633837333436383033366635
35383037643335373061376434616338623239353437366637366535663962616337626165653534
36383666613339396465663066366433663664343431373133643861653866336130393633323937
65333837383762653062346438363366393836326365353532653464396436303833323737323661
62393262313232366461373737613030306566386566373331353633323762626638626262393332
65376439366364326337323130636330633938323732643566643562373766653230333439343663
64323666306635653935646366323662653066653136393535346632333039333435313832633866
61656539343536313665663365626238323836626635363437366663316261353065373931666461
35376466643532656563386162643533653562646434333935343665316366376338366364366138
37373936333261303237333061626366633732353033643636336131643562633964663738646661
64363537636439353838616336363061623663386237656266616132623862633834343932663333
33323061366135316233353730326230613463633838363866306463616561316135303063656164
62333439636632636432356536376461363438303431393732386538646531326633376437663633
66646531313835393831376461396435623561613663383533366637626163303933303162323739
34653339643933343963636435623833663437653162333436633166353662366139623533376162
30306233643836613838373738336263623633393334616664613636393361313465373932343436
30326166313230383732613432663162346432313862613133643463636436313336363161366335
63653437386332336266373937663234393338393236343033336637343464306339393330663735
61313739343432343338306334306165623038346336646666653139303662366235383363313532
32386336343730363866373131303366613838623837326333303736383561613239623466326332
61376461363861616461653961336363643865636233653965353036373662393964313765623139
64393364333535316165636439613734643530666663663965343734613831636233303636636633
62323834633137346161666463393938356231633135383765363363613138373638303835363161
37646432373565653236303933316164623636626664633039363065373231343931626337653163
37663631623437613564613765333466306265653461353032626437346533373163653161613464
35346638303364386461336565646663363138613065303533376662653064383763663563383135
36363966323464346135326234393035343566623762356237623631333838633964633364626661
35323665613238356136623837346636643962336166646635376238363930373863313637346130
30643634343531346230393438363437346462646634336338363932633365646461383732393631
65623332356561366562353732376136386531343061306364333063373963353433663336393663
63303465343861353837636536356135306234643131343032623364663164306534633261613938
34303462363364386163363237623563656664386436623030313935633938663363623163316363
63363866383262353233363464326238393833323763333839633530323136613363326131626639
31333131313765633636633335386533393862363534313064616236663531643762316134393261
65343739316161666630393662353064636536323262643434616166366531383661396466356633
37663539363331356332616664316164373764633631386134363738346135623262626230316238
37323663396162653465306136653262616137366163346330623834616634353635643236326664
31333537373264623064643933373364653830666630343063646339373733316364643762366664
63646661633230626462636438323664656234323237626439626138643835313131393534346230
35623362616438393863656339316238613766316234343466633437396137643434383739633932
62653935613864363230323638666462386461343665336433353934396537336330343937363038
66356362346638386363373632643363666132343461383763653434616233663239656366633565
36386366643262633532646631356631313866303438383939373133393139353236363862633633
34373866316335376361303934613639613837353235663534316136313431333837326666616637
39383537353664336466306132396266623835346239353339373461343031636166616535363639
63623939313261336639386564336633663465306537346436386336633531653633366136633337
3839