fix: fix nomad configuration

This commit is contained in:
Nathaniel Landau
2023-12-11 11:22:54 -05:00
parent eff9059bac
commit 846fb2bc31
2 changed files with 146 additions and 145 deletions

View File

@@ -57,7 +57,7 @@
{% if 'linode' in group_names %} {% if 'linode' in group_names %}
"retry_join" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].linode_private_ip }}"{% if not loop.last %}, {% endif %}{% endfor %}] "retry_join" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].linode_private_ip }}"{% if not loop.last %}, {% endif %}{% endfor %}]
{% else %} {% else %}
"retry_join" = [{% for h in groups['lan'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}] "retry_join" = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
{% if is_consul_server %} {% if is_consul_server %}
{% if 'linode' in group_names %} {% if 'linode' in group_names %}
"join_wan" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}] "join_wan" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
@@ -98,9 +98,9 @@
{% endif %} {% endif %}
"acl" = { "acl" = {
enabled = false
default_policy = "allow" default_policy = "allow"
enable_token_persistence = true enable_token_persistence = true
enabled = false
} }
# ----------------------------------------- Cluster Operations # ----------------------------------------- Cluster Operations

View File

@@ -5,11 +5,11 @@ datacenter = "{{ datacenter_name }}"
# ----------------------------------------- Files and Logs # ----------------------------------------- Files and Logs
data_dir = "{{ nomad_opt_dir_location }}" data_dir = "{{ nomad_opt_dir_location }}"
plugin_dir = "{{ nomad_opt_dir_location }}/plugins"
log_level = "warn"
log_file = "{{ nomad_opt_dir_location }}/logs/nomad.log"
log_rotate_max_files = 5
enable_syslog = false enable_syslog = false
log_file = "{{ nomad_opt_dir_location }}/logs/nomad.log"
log_level = "warn"
log_rotate_max_files = 5
plugin_dir = "{{ nomad_opt_dir_location }}/plugins"
# ----------------------------------------- Networking # ----------------------------------------- Networking
bind_addr = "0.0.0.0" # the default bind_addr = "0.0.0.0" # the default
@@ -53,7 +53,7 @@ consul {
"traefik.http.routers.nomad-server.service=nomad-server", "traefik.http.routers.nomad-server.service=nomad-server",
"traefik.http.routers.nomad-server.rule=Host(`nomad.{{ homelab_domain_name }}`)", "traefik.http.routers.nomad-server.rule=Host(`nomad.{{ homelab_domain_name }}`)",
"traefik.http.routers.nomad-server.tls=true", "traefik.http.routers.nomad-server.tls=true",
"traefik.http.routers.nomad-server.middlewares=authelia@file,redirectScheme@file", "traefik.http.routers.nomad-server.middlewares=redirectScheme@file",
"traefik.http.services.nomad-server.loadbalancer.server.port=4646" "traefik.http.services.nomad-server.loadbalancer.server.port=4646"
] ]
{% endif %} {% endif %}
@@ -82,8 +82,9 @@ client {
retry_interval = "15s" retry_interval = "15s"
} }
{% else %} {% else %}
servers = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
server_join { server_join {
retry_join = [{% for h in groups['lan'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}] retry_join = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
retry_max = 3 retry_max = 3
retry_interval = "15s" retry_interval = "15s"
} }
@@ -149,7 +150,7 @@ server {
raft_protocol = "3" raft_protocol = "3"
server_join { server_join {
retry_join = [{% for h in groups['lan'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}] retry_join = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
retry_max = 3 retry_max = 3
retry_interval = "15s" retry_interval = "15s"
} }
@@ -157,12 +158,12 @@ server {
autopilot { autopilot {
cleanup_dead_servers = true cleanup_dead_servers = true
disable_upgrade_migration = false
enable_custom_upgrades = false
enable_redundancy_zones = false
last_contact_threshold = "200ms" last_contact_threshold = "200ms"
max_trailing_logs = 250 max_trailing_logs = 250
server_stabilization_time = "10s" server_stabilization_time = "10s"
enable_redundancy_zones = false
disable_upgrade_migration = false
enable_custom_upgrades = false
} }
{% endif %} {% endif %}
@@ -175,11 +176,11 @@ client {
# ----------------------------------------- Telemety # ----------------------------------------- Telemety
telemetry = { telemetry = {
collection_interval = "10s"
datadog_address = "localhost:8125"
filter_default = false
publish_allocation_metrics = true publish_allocation_metrics = true
publish_node_metrics = true publish_node_metrics = true
collection_interval = "10s"
filter_default = false
datadog_address = "localhost:8125"
prefix_filter = [ prefix_filter = [
"+nomad.client.allocations.running", "+nomad.client.allocations.running",
"+nomad.client.allocations.terminal", "+nomad.client.allocations.terminal",
@@ -194,7 +195,8 @@ telemetry = {
"+nomad.nomad.job_summary.running", "+nomad.nomad.job_summary.running",
"+nomad.nomad.job_summary.complete", "+nomad.nomad.job_summary.complete",
"+nomad.nomad.job_summary.lost", "+nomad.nomad.job_summary.lost",
"+nomad.nomad.job_summary.failed"] "+nomad.nomad.job_summary.failed"
]
} }
# ----------------------------------------- Plugins # ----------------------------------------- Plugins
@@ -208,10 +210,9 @@ plugin "docker" {
config { config {
allow_caps = ["all"] allow_caps = ["all"]
allow_privileged = true allow_privileged = true
extra_labels = ["job_name"] extra_labels = ["job_name", "job_id", "task_group_name", "task_name", "namespace", "node_name", "node_id"]
volumes { volumes {
enabled = true enabled = true
} }
} }
} }