From cc4556cb7778fb32eb11b38cfae235bac4627839 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 07:35:52 +0200 Subject: [PATCH 01/18] fix(ui): use ansible_host attributes for host ids --- ui/schemas/graphs.js | 9 +++++---- ui/schemas/infrastructures.js | 6 +++++- ui/schemas/inventory.js | 6 +++--- ui/schemas/variables.js | 8 ++++---- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/ui/schemas/graphs.js b/ui/schemas/graphs.js index 19fc159f..c30e83d2 100644 --- a/ui/schemas/graphs.js +++ b/ui/schemas/graphs.js @@ -8,6 +8,7 @@ NEWSCHEMA('Graphs', schema => { const LEVELS = Object.freeze(['project', 'provider', 'region', 'location', 'instance']); for (const { index_key } of dataset) { + if (!index_key) continue; // guard against missing data const parts = index_key.split('.'); // e.g. ["instance001","frontends","region1","provider1","project1"] @@ -62,15 +63,15 @@ NEWSCHEMA('Graphs', schema => { // Flatten the tfstate resources → instances → index_key const dataset = (infraResult?.items ?? []) - .flatMap(item => (item.tfstate?.resources ?? [])) - .flatMap(resource => (resource?.instances ?? [])) - .map(inst => ({ index_key: inst.index_key })); + .flatMap(item => (item.tfstate?.resources ?? [])) + .filter(resource => resource.type === "ansible_host") + .flatMap(resource => (resource?.instances ?? [])) + .map(inst => ({ index_key: inst.attributes.name })); // Load software definitions (they are displayed as separate nodes) const softResult = await DATA .find('nosql/softwares') .where('uid', $.user.id) - .error('@(Error)') .promise($); const softwareNodes = (softResult ?? []).map(s => ({ diff --git a/ui/schemas/infrastructures.js b/ui/schemas/infrastructures.js index 8642aab6..3327c72c 100644 --- a/ui/schemas/infrastructures.js +++ b/ui/schemas/infrastructures.js @@ -49,11 +49,15 @@ NEWSCHEMA('Infrastructures', function (schema) { if (!infra.tfstate?.resources) continue; for (const resource of infra.tfstate.resources) { if (!resource.instances) continue; + if (resource.type !== "ansible_host") continue; for (const instance of resource.instances) { - instances.push({ id: instance.index_key, name: instance.index_key }); + // instances.push({ id: instance.index_key, name: instance.index_key }); + instances.push({ id: instance.attributes.name, name: instance.attributes.name }); } } } + + console.log(instances); $.callback(instances); } }); diff --git a/ui/schemas/inventory.js b/ui/schemas/inventory.js index db3d19b2..ee6ff867 100644 --- a/ui/schemas/inventory.js +++ b/ui/schemas/inventory.js @@ -64,7 +64,7 @@ NEWSCHEMA('Inventory', function (schema) { const softwares = await DATA.find('nosql/catalogs') .fields('name,version') - .error('@(Error)') + // .error('@(Error)') .promise(); inventory.infrastructure.vars.softwares = softwares.reduce((acc, cur) => { @@ -88,12 +88,12 @@ NEWSCHEMA('Inventory', function (schema) { for (const item of result.items) { if(!item.tfstate.resources) continue; for (const resource of item.tfstate.resources) { + if(resource.type !== "ansible_host") continue; for (const instance of resource.instances) { - dataset.push({ id: item.id, hostname: instance.index_key }); + dataset.push({ id: item.id, hostname: instance.attributes.name }); } } } - $.callback(await buildInventory(dataset), null, 2); } }); diff --git a/ui/schemas/variables.js b/ui/schemas/variables.js index b6937730..471a051a 100644 --- a/ui/schemas/variables.js +++ b/ui/schemas/variables.js @@ -60,7 +60,7 @@ NEWSCHEMA('Variables', function (schema) { const key2 = model.key2.replace(/\./g, '_'); const variables = await DATA.find('nosql/variables') .where('key2', key2) - .in('type', ['project', 'provider', 'region', 'instance']) + .in('type', ['project', 'provider', 'location', 'region', 'instance']) .promise($); if (!variables?.length) { @@ -204,7 +204,7 @@ NEWSCHEMA('Variables', function (schema) { stored[model.subkey] = generatePassword(model.userpass, model.nosymbols, model.length); await DATA.update('nosql/variables', { value: ENCRYPT(stored, CONF.auth_secret), dtupdated: NOW }) .where('id', result.id) - .error('@(Error)') + // .error('@(Error)') .promise($); $.callback(stored[model.subkey]); return; @@ -214,7 +214,7 @@ NEWSCHEMA('Variables', function (schema) { stored[model.subkey] = generatePassword(model.userpass, model.nosymbols, model.length); await DATA.update('nosql/variables', { value: ENCRYPT(stored, CONF.auth_secret), dtupdated: NOW }) .where('id', result.id) - .error('@(Error)') + // .error('@(Error)') .promise($); $.success(stored[model.subkey]); return; @@ -224,7 +224,7 @@ NEWSCHEMA('Variables', function (schema) { delete stored[model.subkey]; await DATA.update('nosql/variables', { value: ENCRYPT(stored, CONF.auth_secret), dtupdated: NOW }) .where('id', result.id) - .error('@(Error)') + // .error('@(Error)') .promise($); $.success(); return; From 70e67efc35ee83881143a43cf8208015b9dfc795 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 07:36:27 +0200 Subject: [PATCH 02/18] fix(ui): disable instance and software validation --- ui/schemas/softwares.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/schemas/softwares.js b/ui/schemas/softwares.js index 7eb6d90f..7cbf9e16 100644 --- a/ui/schemas/softwares.js +++ b/ui/schemas/softwares.js @@ -124,8 +124,8 @@ NEWSCHEMA('Softwares', function (schema) { input: '*instance:String, *software:UID, *size:String, *domain:String, domain_alias:String, *exposition:String', action: async function ($, model) { const rules = { - instance: { regex: REGEX_SOFTWARES.instance, comment: REGEX_SOFTWARES.instance.comment }, - software: { regex: REGEX_SOFTWARES.software, comment: REGEX_SOFTWARES.software.comment }, + // instance: { regex: REGEX_SOFTWARES.instance, comment: REGEX_SOFTWARES.instance.comment }, + // software: { regex: REGEX_SOFTWARES.software, comment: REGEX_SOFTWARES.software.comment }, size: { regex: REGEX_SOFTWARES.size, comment: REGEX_SOFTWARES.size.comment }, domain: { regex: REGEX_SOFTWARES.domain, comment: REGEX_SOFTWARES.domain.comment }, domain_alias: { regex: REGEX_SOFTWARES.domain_alias, comment: REGEX_SOFTWARES.domain_alias.comment, optional: true }, From 3006540bca91c88ac68a50fab87592173bf2310b Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 07:36:52 +0200 Subject: [PATCH 03/18] fix(ui): include key param in variables_read request --- ui/public/forms/softwares.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/public/forms/softwares.html b/ui/public/forms/softwares.html index dfe7ce1e..c1b5534a 100644 --- a/ui/public/forms/softwares.html +++ b/ui/public/forms/softwares.html @@ -112,7 +112,7 @@ SET('common.form2', 'formvariable'); } else { - exports.tapi('variables_read/{0} ERROR'.format(vid), { type: type, format: 'yaml' }, function(response) { + exports.tapi('variables_read/{0} ERROR'.format(vid), { type: type, key: key, format: 'yaml' }, function(response) { SET('formvariable @reset @hideloading', response); SET('common.form2', 'formvariable'); }); From 0451408ea4360dd654ab178bc605c1be671c0b91 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 07:37:20 +0200 Subject: [PATCH 04/18] feat(ui): enable first account creation in account schema --- ui/schemas/common.js | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/ui/schemas/common.js b/ui/schemas/common.js index bea1cfbd..44ab24cd 100644 --- a/ui/schemas/common.js +++ b/ui/schemas/common.js @@ -9,7 +9,7 @@ NEWSCHEMA('Account', function(schema) { schema.action('create', { name: 'Create user account', input: '*email:Email, *password:String', - action: function($, model) { + action: async function($, model) { if(!FUNC.regex(REGEX_USERS.email, model.email)) { $.invalid('{0}'.format(REGEX_USERS.email.comment)); return; @@ -19,7 +19,16 @@ NEWSCHEMA('Account', function(schema) { return; } - $.invalid('Registration is disabled, please try later'); + const result = await DATA.find('nosql/users').promise($); + if(result.length == 0){ + await ACTION('Users/create', { first_name: 'First', last_name: 'Admin', email: model.email, language: 'en', token: GUID(64), password: model.password, isdisabled: false, sa: true }).user({ id: 'bot', name: 'Bot', sa: true }).promise($); + $.invalid('The first account has been create, try to log in now'); + } + else { + $.invalid('Registration is disabled, please try later'); + } + + } }); From 2d9a049c49a304fb1fce4ac6eb1a4bbc386da7bf Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 07:37:50 +0200 Subject: [PATCH 05/18] fix(lookup): simplify missing key error messages --- ansible/plugins/lookup/simple-stack-ui.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ansible/plugins/lookup/simple-stack-ui.py b/ansible/plugins/lookup/simple-stack-ui.py index d40755ea..401ebe73 100644 --- a/ansible/plugins/lookup/simple-stack-ui.py +++ b/ansible/plugins/lookup/simple-stack-ui.py @@ -91,11 +91,11 @@ def run(self, terms, variables=None, **kwargs): body = e.read().decode(errors='replace') if status == 460: - display.warning((f"{data["key"]}/{data["subkey"]} does not exist... Continue anyway")) + display.warning("key or subkey does not exist") return [{}] if status == 461: - raise AnsibleError(f"{data["key"]}/{data["subkey"]} does not exist") + raise AnsibleError("key or subkey does not exist") raise AnsibleError(f"HTTP {status} for {api_url}: {body}") From beceb14a58112425a6a5dea5bc5293531eb08b61 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 15:05:05 +0200 Subject: [PATCH 06/18] refactor(nomad): replace fact_instance vars with software vars in constraints Update Nomad job templates to use `software.constraints.location` and `software.instance` instead of `fact_instance.location` and `inventory_hostname`. Add conditional inclusion of the location constraint when `software.constraints.location` is defined. --- ansible/playbooks/saas/roles/adguard/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/arangodb/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/caddy/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/dolibarr/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/forgejo/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/freshrss/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/grafana/templates/nomad.hcl | 6 ++++-- .../playbooks/saas/roles/homeassistant/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/kresus/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/loki/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/mariadb/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/mimir/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/minio/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/mosquitto/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/nextcloud/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/nginx/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 | 6 ++++-- ansible/playbooks/saas/roles/nomad/templates/restore.hcl.j2 | 6 ++++-- ansible/playbooks/saas/roles/open-webui/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/postgresql/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/registry/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/rocketchat/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/traefik/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/valkey/templates/nomad.hcl | 6 ++++-- ansible/playbooks/saas/roles/wordpress/templates/nomad.hcl | 6 ++++-- .../playbooks/saas/roles/zigbee2mqtt/templates/nomad.hcl | 6 ++++-- 26 files changed, 104 insertions(+), 52 deletions(-) diff --git a/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl b/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl index 25f6db97..c3a315a5 100644 --- a/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/adguard/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/arangodb/templates/nomad.hcl b/ansible/playbooks/saas/roles/arangodb/templates/nomad.hcl index 7d76e5b3..e6956e43 100644 --- a/ansible/playbooks/saas/roles/arangodb/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/arangodb/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl b/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl index f1126c2a..c5982b04 100644 --- a/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/dolibarr/templates/nomad.hcl b/ansible/playbooks/saas/roles/dolibarr/templates/nomad.hcl index 2dca396f..a0faff30 100644 --- a/ansible/playbooks/saas/roles/dolibarr/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/dolibarr/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "dolibarr" { diff --git a/ansible/playbooks/saas/roles/forgejo/templates/nomad.hcl b/ansible/playbooks/saas/roles/forgejo/templates/nomad.hcl index 07e9b902..1608d244 100644 --- a/ansible/playbooks/saas/roles/forgejo/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/forgejo/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/freshrss/templates/nomad.hcl b/ansible/playbooks/saas/roles/freshrss/templates/nomad.hcl index c6e5e350..5b924ce5 100644 --- a/ansible/playbooks/saas/roles/freshrss/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/freshrss/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/grafana/templates/nomad.hcl b/ansible/playbooks/saas/roles/grafana/templates/nomad.hcl index 9f4b3afe..a17e4205 100644 --- a/ansible/playbooks/saas/roles/grafana/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/grafana/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl b/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl index 19c12261..ec5ba3a6 100644 --- a/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/homeassistant/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/kresus/templates/nomad.hcl b/ansible/playbooks/saas/roles/kresus/templates/nomad.hcl index 8669493b..75da59ff 100644 --- a/ansible/playbooks/saas/roles/kresus/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/kresus/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "kresus" { diff --git a/ansible/playbooks/saas/roles/loki/templates/nomad.hcl b/ansible/playbooks/saas/roles/loki/templates/nomad.hcl index 4ddb08f0..ed695b61 100644 --- a/ansible/playbooks/saas/roles/loki/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/loki/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/mariadb/templates/nomad.hcl b/ansible/playbooks/saas/roles/mariadb/templates/nomad.hcl index b91af8a5..c21e91aa 100644 --- a/ansible/playbooks/saas/roles/mariadb/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/mariadb/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/mimir/templates/nomad.hcl b/ansible/playbooks/saas/roles/mimir/templates/nomad.hcl index 709b496f..66c84ec5 100644 --- a/ansible/playbooks/saas/roles/mimir/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/mimir/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}-minio" { diff --git a/ansible/playbooks/saas/roles/minio/templates/nomad.hcl b/ansible/playbooks/saas/roles/minio/templates/nomad.hcl index d493634c..db983a14 100644 --- a/ansible/playbooks/saas/roles/minio/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/minio/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/mosquitto/templates/nomad.hcl b/ansible/playbooks/saas/roles/mosquitto/templates/nomad.hcl index fb6082b4..600ce577 100644 --- a/ansible/playbooks/saas/roles/mosquitto/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/mosquitto/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/nextcloud/templates/nomad.hcl b/ansible/playbooks/saas/roles/nextcloud/templates/nomad.hcl index 4117e0a6..5fd428f3 100644 --- a/ansible/playbooks/saas/roles/nextcloud/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/nextcloud/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/nginx/templates/nomad.hcl b/ansible/playbooks/saas/roles/nginx/templates/nomad.hcl index 034ff7f4..2e3166fb 100644 --- a/ansible/playbooks/saas/roles/nginx/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/nginx/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "nginx" { diff --git a/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 b/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 index 7149bea7..d348356c 100644 --- a/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 +++ b/ansible/playbooks/saas/roles/nomad/templates/backup.hcl.j2 @@ -3,14 +3,16 @@ job "{{ nomad_job_name }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "batch" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } {% if periodic %} diff --git a/ansible/playbooks/saas/roles/nomad/templates/restore.hcl.j2 b/ansible/playbooks/saas/roles/nomad/templates/restore.hcl.j2 index 32f367b4..fb7cae8b 100644 --- a/ansible/playbooks/saas/roles/nomad/templates/restore.hcl.j2 +++ b/ansible/playbooks/saas/roles/nomad/templates/restore.hcl.j2 @@ -3,14 +3,16 @@ job "{{ nomad_job_name }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "batch" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ operation }}" { diff --git a/ansible/playbooks/saas/roles/open-webui/templates/nomad.hcl b/ansible/playbooks/saas/roles/open-webui/templates/nomad.hcl index 5292724e..bc1e1a92 100644 --- a/ansible/playbooks/saas/roles/open-webui/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/open-webui/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "open-webui" { diff --git a/ansible/playbooks/saas/roles/postgresql/templates/nomad.hcl b/ansible/playbooks/saas/roles/postgresql/templates/nomad.hcl index 130eac0b..b751328a 100644 --- a/ansible/playbooks/saas/roles/postgresql/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/postgresql/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "postgresql" { diff --git a/ansible/playbooks/saas/roles/registry/templates/nomad.hcl b/ansible/playbooks/saas/roles/registry/templates/nomad.hcl index 9bd3c512..cd2ec90f 100644 --- a/ansible/playbooks/saas/roles/registry/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/registry/templates/nomad.hcl @@ -7,14 +7,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "registry" { diff --git a/ansible/playbooks/saas/roles/rocketchat/templates/nomad.hcl b/ansible/playbooks/saas/roles/rocketchat/templates/nomad.hcl index 84ced02e..2cc28193 100644 --- a/ansible/playbooks/saas/roles/rocketchat/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/rocketchat/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}-rocketchat" { diff --git a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl index 3fe3941e..40c87e45 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "traefik" { diff --git a/ansible/playbooks/saas/roles/valkey/templates/nomad.hcl b/ansible/playbooks/saas/roles/valkey/templates/nomad.hcl index 1d6525b9..55dc611d 100644 --- a/ansible/playbooks/saas/roles/valkey/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/valkey/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { diff --git a/ansible/playbooks/saas/roles/wordpress/templates/nomad.hcl b/ansible/playbooks/saas/roles/wordpress/templates/nomad.hcl index 34c027d8..ab54a4e8 100644 --- a/ansible/playbooks/saas/roles/wordpress/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/wordpress/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "phpfpm" { diff --git a/ansible/playbooks/saas/roles/zigbee2mqtt/templates/nomad.hcl b/ansible/playbooks/saas/roles/zigbee2mqtt/templates/nomad.hcl index 5f3c4d47..255ef6d9 100644 --- a/ansible/playbooks/saas/roles/zigbee2mqtt/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/zigbee2mqtt/templates/nomad.hcl @@ -3,14 +3,16 @@ job "{{ domain }}" { datacenters = ["{{ fact_instance.datacenter }}"] type = "service" +{% if software.constraints.location %} constraint { attribute = "${meta.location}" - set_contains = "{{ fact_instance.location }}" + set_contains = "{{ software.constraints.location }}" } +{% endif %} constraint { attribute = "${meta.instance}" - set_contains = "{{ inventory_hostname }}" + set_contains = "{{ software.instance }}" } group "{{ domain }}" { From 1c4795a2f7124aa00374cf34cb3ea8dbc1541ab3 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 15:06:51 +0200 Subject: [PATCH 07/18] chore(forgejo): remove debug task --- ansible/playbooks/saas/roles/forgejo/tasks/main.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/ansible/playbooks/saas/roles/forgejo/tasks/main.yml b/ansible/playbooks/saas/roles/forgejo/tasks/main.yml index f15216d9..3e40fe09 100644 --- a/ansible/playbooks/saas/roles/forgejo/tasks/main.yml +++ b/ansible/playbooks/saas/roles/forgejo/tasks/main.yml @@ -47,8 +47,6 @@ priv: "{{ service_name[:32] }}.*:ALL" state: present -- debug: - msg: - name: Copy default config file ansible.builtin.template: src: app.ini From 177d2fb0d0c0bab3c0103fb3f6f0dd75ed969173 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 15:07:28 +0200 Subject: [PATCH 08/18] feat(caddy): generate Caddyfile with Nomad template --- ansible/playbooks/saas/roles/caddy/tasks/main.yml | 9 +-------- ansible/playbooks/saas/roles/caddy/templates/nomad.hcl | 8 ++++++++ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/ansible/playbooks/saas/roles/caddy/tasks/main.yml b/ansible/playbooks/saas/roles/caddy/tasks/main.yml index bd65ce9a..b45d0b3e 100644 --- a/ansible/playbooks/saas/roles/caddy/tasks/main.yml +++ b/ansible/playbooks/saas/roles/caddy/tasks/main.yml @@ -8,14 +8,7 @@ mode: "0755" loop: - "{{ software_path }}/etc/caddy" - -- name: Copy default config file - ansible.builtin.template: - src: Caddyfile - dest: "{{ software_path }}/etc/caddy/Caddyfile" - owner: root - group: root - mode: "0644" + delegate_to: "{{ software.instance }}" - name: Copy nomad job to destination ansible.builtin.template: diff --git a/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl b/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl index c5982b04..edbe32a0 100644 --- a/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/caddy/templates/nomad.hcl @@ -57,6 +57,14 @@ job "{{ domain }}" { ports = ["caddy", "metrics"] } + template { + change_mode = "noop" + destination = "{{ software_path }}/etc/caddy/Caddyfile" + data = < Date: Sun, 5 Oct 2025 15:09:33 +0200 Subject: [PATCH 09/18] refactor(traefik): use software_path and delegate tasks to instance Update tasks to reference {{ software_path }}/etc/traefik and run on the target instance. Adjust the Nomad job template to use new volume mounts, add configfile args, replace the traefik_ssl_ui port with traefik_ui, and switch TLS certificate variables to {{ software.instance }}. Simplify service tags and remove host tag. --- .../playbooks/saas/roles/traefik/tasks/main.yml | 7 ++++--- .../saas/roles/traefik/templates/nomad.hcl | 17 ++++++++++------- .../saas/roles/traefik/templates/traefik.toml | 4 ++-- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/ansible/playbooks/saas/roles/traefik/tasks/main.yml b/ansible/playbooks/saas/roles/traefik/tasks/main.yml index 5e759ee5..ecb898bf 100644 --- a/ansible/playbooks/saas/roles/traefik/tasks/main.yml +++ b/ansible/playbooks/saas/roles/traefik/tasks/main.yml @@ -89,20 +89,21 @@ group: root mode: '0755' loop: - - "{{ software_path }}" - - "{{ software_path }}/letsencrypt" + - "{{ software_path }}/etc/traefik/letsencrypt" - /var/log/traefik + delegate_to: "{{ software.instance }}" - name: Copy config files ansible.builtin.template: src: "{{ item }}" - dest: "{{ software_path }}/{{ item }}" + dest: "{{ software_path }}/etc/traefik/{{ item }}" owner: root group: root mode: '0600' loop: - traefik.toml - traefik_tls.toml + delegate_to: "{{ software.instance }}" - name: Copy nomad job to destination ansible.builtin.template: diff --git a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl index 40c87e45..35f41b38 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl @@ -39,11 +39,10 @@ job "{{ domain }}" { service { name = "traefik" - port = "traefik_ssl_ui" + port = "traefik_ui" provider = "nomad" tags = [ - "fqdn:{{ domain }}", - "host:{{ inventory_hostname }}", + "fqdn:{{ domain }}" ] check { name = "traefik" @@ -66,11 +65,15 @@ job "{{ domain }}" { image = "traefik:{{ softwares.traefik.version }}" network_mode = "host" volumes = [ - "/data/{{ domain }}:/etc/traefik", - "/var/log/traefik:/var/log/traefik", - "/etc/ssl/simplestack:/etc/ssl/simplestack" + "{{ software_path }}/etc/traefik:/etc/traefik:rw", + "/var/log/traefik:/var/log/traefik:rw", + "/etc/ssl/simplestack:/etc/ssl/simplestack:ro" + ] + + args = [ + "--configfile", + "/etc/traefik/traefik.toml" ] - ports = ["traefik_ui", "traefik_ssl_ui"] } resources { diff --git a/ansible/playbooks/saas/roles/traefik/templates/traefik.toml b/ansible/playbooks/saas/roles/traefik/templates/traefik.toml index 903d7f3a..3030d1ea 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/traefik.toml +++ b/ansible/playbooks/saas/roles/traefik/templates/traefik.toml @@ -47,8 +47,8 @@ token = "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_traefik_token', missing='error') }}" [providers.nomad.endpoint.tls] ca = "/etc/ssl/simplestack/simplestack-ca.pem" - cert = "/etc/ssl/simplestack/{{ fact_instance.datacenter }}-server-nomad.pem" - key = "/etc/ssl/simplestack/{{ fact_instance.datacenter }}-server-nomad.key" + cert = "/etc/ssl/simplestack/{{ software.instance }}-dc1-client-nomad.pem" + key = "/etc/ssl/simplestack/{{ software.instance }}-dc1-client-nomad.key" [providers.file] filename = "/etc/traefik/traefik_tls.toml" From 39171a46f5ac2fb97a031e93eae23a67ef0bc9f9 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 15:09:50 +0200 Subject: [PATCH 10/18] feat(traefik): enable traefik API dashboard and expose API port --- ansible/playbooks/saas/roles/traefik/templates/nomad.hcl | 5 +++++ ansible/playbooks/saas/roles/traefik/templates/traefik.toml | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl index 35f41b38..b8065a6c 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl +++ b/ansible/playbooks/saas/roles/traefik/templates/nomad.hcl @@ -35,6 +35,10 @@ job "{{ domain }}" { to = 80 static = 80 } + port "traefik_api" { + to = 8080 + static = 8080 + } } service { @@ -69,6 +73,7 @@ job "{{ domain }}" { "/var/log/traefik:/var/log/traefik:rw", "/etc/ssl/simplestack:/etc/ssl/simplestack:ro" ] + ports = ["traefik_ui", "traefik_ssl_ui", "traefik_api"] args = [ "--configfile", diff --git a/ansible/playbooks/saas/roles/traefik/templates/traefik.toml b/ansible/playbooks/saas/roles/traefik/templates/traefik.toml index 3030d1ea..031c8bd1 100644 --- a/ansible/playbooks/saas/roles/traefik/templates/traefik.toml +++ b/ansible/playbooks/saas/roles/traefik/templates/traefik.toml @@ -2,6 +2,10 @@ checkNewVersion = false sendAnonymousUsage = false +[api] + dashboard = true + insecure = true + [entryPoints] [entryPoints.http] address = ":80" From 51557567fd3acfc7ceaee2bcbe0e139520a90610 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 15:10:06 +0200 Subject: [PATCH 11/18] refactor(ansible): move temporary build dir creation to pre_tasks --- ansible/playbooks/saas/image.yml | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/ansible/playbooks/saas/image.yml b/ansible/playbooks/saas/image.yml index bedbb887..9f819d3d 100644 --- a/ansible/playbooks/saas/image.yml +++ b/ansible/playbooks/saas/image.yml @@ -19,6 +19,17 @@ path: "/tmp/{{ catalog }}" pre_tasks: + - name: Create temporary build directory + ansible.builtin.file: + path: "{{ item }}" + recurse: true + state: directory + mode: '0755' + loop: + - /root/.docker + - "{{ build_work_dir }}/download" + - "{{ build_work_dir }}/{{ upstream_default_arch }}" + - name: Copy docker config file ansible.builtin.copy: content: | @@ -34,16 +45,6 @@ group: root mode: '0600' - - name: Create temporary build directory - ansible.builtin.file: - path: "{{ item }}" - recurse: true - state: directory - mode: '0755' - loop: - - "{{ build_work_dir }}/download" - - "{{ build_work_dir }}/{{ upstream_default_arch }}" - tasks: - name: Install dependencies ansible.builtin.include_role: From 51bbf338b5e0b2f64c0b2f159f01a7a12d35536e Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 17:50:15 +0200 Subject: [PATCH 12/18] refactor(nomad): simplify role configuration and remove legacy settings --- ansible/playbooks/paas/nomad.yml | 4 +- .../paas/roles/certificate/defaults/main.yml | 4 +- .../paas/roles/certificate/tasks/ca.yml | 18 +- .../paas/roles/nomad/defaults/main.yml | 176 +++++++++--------- .../paas/roles/nomad/handlers/main.yml | 7 - .../paas/roles/nomad/tasks/01_nodes_roles.yml | 64 ++----- .../paas/roles/nomad/tasks/02_network.yml | 14 +- .../paas/roles/nomad/tasks/04_tls_certs.yml | 25 ++- .../roles/nomad/tasks/06_configuration.yml | 42 ++--- .../roles/nomad/tasks/07_autoeligibility.yml | 173 ++++++++--------- .../playbooks/paas/roles/nomad/tasks/main.yml | 4 - .../roles/nomad/templates/50-bridge.yaml.j2 | 9 - .../paas/roles/nomad/templates/client.hcl.j2 | 27 +-- .../paas/roles/nomad/templates/docker.hcl.j2 | 16 +- .../paas/roles/nomad/templates/nomad.hcl.j2 | 71 +++---- .../paas/roles/nomad/templates/server.hcl.j2 | 35 +++- .../playbooks/paas/roles/nomad/vars/main.yml | 47 +---- 17 files changed, 306 insertions(+), 430 deletions(-) delete mode 100644 ansible/playbooks/paas/roles/nomad/templates/50-bridge.yaml.j2 diff --git a/ansible/playbooks/paas/nomad.yml b/ansible/playbooks/paas/nomad.yml index 18299451..20760e0d 100644 --- a/ansible/playbooks/paas/nomad.yml +++ b/ansible/playbooks/paas/nomad.yml @@ -1,10 +1,10 @@ --- - name: Create CA TLS any_errors_fatal: true - hosts: "{{ hosts_limit | default('infrastructure') }}" + hosts: localhost gather_facts: false strategy: linear - become: true + become: false tasks: - name: "Create TLS CA" ansible.builtin.include_role: diff --git a/ansible/playbooks/paas/roles/certificate/defaults/main.yml b/ansible/playbooks/paas/roles/certificate/defaults/main.yml index 67b69d2d..a7d47e9e 100644 --- a/ansible/playbooks/paas/roles/certificate/defaults/main.yml +++ b/ansible/playbooks/paas/roles/certificate/defaults/main.yml @@ -2,8 +2,8 @@ # defaults file for certificate certificate_ca_host: localhost -certificate_ca_host_dir: "~/.simple-stack/tls/{{ inventory_hostname }}" +certificate_ca_host_dir: "~/.simple-stack/tls" certificate_ca_pubkey: simplestack-ca.pem certificate_ca_privatekey: simplestack-ca-key.pem certificate_host_certificate_dir: /etc/ssl/simplestack -certificate_common_name: simplestack +certificate_common_name: nomad diff --git a/ansible/playbooks/paas/roles/certificate/tasks/ca.yml b/ansible/playbooks/paas/roles/certificate/tasks/ca.yml index 576e363e..2c634e6a 100644 --- a/ansible/playbooks/paas/roles/certificate/tasks/ca.yml +++ b/ansible/playbooks/paas/roles/certificate/tasks/ca.yml @@ -15,7 +15,7 @@ become: false register: cert_ca_tls_priv_present -- name: "Certificate | Create private key for CA (simplestack-ca-key.pem)" +- name: "Certificate | Create private key for CA" community.crypto.openssl_privatekey: path: "{{ certificate_ca_host_dir }}/{{ certificate_ca_privatekey }}" delegate_to: "{{ certificate_ca_host }}" @@ -54,19 +54,3 @@ become: false when: not cert_ca_tls_priv_present.stat.exists -- name: "Certificate | Create TLS directory on target" - ansible.builtin.file: - path: "{{ certificate_host_certificate_dir }}" - state: directory - mode: '0755' - recurse: true - when: not ca_pubkey_present.stat.exists - -- name: "Certificate | Copy Public certs on nodes - {{ certificate_ca_pubkey }}" - ansible.builtin.copy: - src: "{{ certificate_ca_host_dir }}/{{ certificate_ca_pubkey }}" - dest: "{{ certificate_host_certificate_dir }}/{{ certificate_ca_pubkey }}" - owner: "root" - group: "root" - mode: "0640" - when: not ca_pubkey_present.stat.exists diff --git a/ansible/playbooks/paas/roles/nomad/defaults/main.yml b/ansible/playbooks/paas/roles/nomad/defaults/main.yml index ec6ba2e4..edf5496d 100644 --- a/ansible/playbooks/paas/roles/nomad/defaults/main.yml +++ b/ansible/playbooks/paas/roles/nomad/defaults/main.yml @@ -2,23 +2,29 @@ # defaults file for install # General -nomad_mode: "{% if (ansible_play_hosts | length) == 1 %}single{% elif (ansible_play_hosts | length) > 1 %}cluster{% endif %}" -nomad_node_role: "{% if nomad_mode == 'single' %}both{% elif nomad_mode == 'cluster' %}{{ nomad_node_role | default('both') }}{% endif %}" + +## single / cluster +nomad_mode: single + +## client / server / both +nomad_node_role: both nomad_deploy_cluster_name: "{{ nomad_cluster_name | default('default') }}" -nomad_firewall: true nomad_timezone: "Europe/Paris" -nomad_group: "simplestack" +nomad_group: simplestack # Configuration nomad_dc_name: "dc1" -nomad_node_name: "{{ inventory_hostname }}" +nomad_project: "{{ fact_instance.project }}" nomad_region: "{{ fact_instance.region }}" +nomad_location: "{{ fact_instance.region }}" + +nomad_node_name: "{{ inventory_hostname }}" -nomad_system_user: "{% if nomad_node_role == 'server' %}nomad{% elif nomad_node_role == 'client' or nomad_node_role == 'both' %}root{% endif %}" -nomad_system_group: "{% if nomad_node_role == 'server' %}nomad{% elif nomad_node_role == 'client' or nomad_node_role == 'both' %}root{% endif %}" +nomad_system_user: "{{ (nomad_node_role == 'server') | ternary('nomad', 'root') }}" +nomad_system_group: "{{ (nomad_node_role == 'server') | ternary('nomad', 'root') }}" nomad_config_dir: "/etc/nomad.d" nomad_config_file: "nomad.hcl" @@ -28,97 +34,93 @@ nomad_data_dir_server: "/opt/nomad/server" nomad_state_dir_client: "/opt/nomad/client" nomad_job_files_dir: "/var/tmp" -nomad_disable_anonymous_signature: "false" -nomad_disable_update_check: "false" +nomad_disable_anonymous_signature: false +nomad_disable_update_check: false nomad_leave_on_terminate: true nomad_leave_on_interrupt: true -nomad_client_auto_join: 'true' -nomad_server_auto_join: 'true' +nomad_client_auto_join: true +nomad_server_auto_join: true nomad_s3_storage_enabled: true # Network -nomad_http_scheme: "https" +nomad_http_scheme: https nomad_http_ip: "127.0.0.1" nomad_http_port: 4646 -nomad_cluster_bridge: "br0" -nomad_iface: "{{ ansible_default_ipv4.interface }}" +nomad_cluster_bridge: "ens3" +nomad_iface: "ens3" -nomad_list_node_ip: "\ - {% set nomad_list_node_ip = [] %}\ - {% for host in groups[consul_deploy_cluster_name] %}\ - {% if nomad_list_node_ip.append(hostvars[host]['ansible_' + hostvars[host].nomad_cluster_bridge].ipv4.address | default(nomad_cluster_bridge)) %}{% endif %}\ - {% endfor %}\ - {{ nomad_list_node_ip }}" +nomad_bind_address: "0.0.0.0" +nomad_advertise_address: "{{ hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address'] }}" -nomad_bind_address: "{{ hostvars[inventory_hostname]['ansible_' + nomad_iface | replace('-', '_')]['ipv4']['address'] }}" +nomad_ports: + http: "{{ nomad_ports_http | default('4646', true) }}" + rpc: "{{ nomad_ports_rpc | default('4647', true) }}" + serf: "{{ nomad_ports_serf | default('4648', true) }}" -nomad_bind_addr: "0.0.0.0" - -nomad_address_http: "0.0.0.0" -nomad_address_rpc: "{% if nomad_mode == 'single' %}0.0.0.0{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_cluster_bridge | replace('-', '_')]['ipv4']['address'] }}{% endif %}" -nomad_address_serf: "{% if nomad_mode == 'single' %}0.0.0.0{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_cluster_bridge | replace('-', '_')]['ipv4']['address'] }}{% endif %}" - -nomad_advertise_http: "{% if nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_cluster_bridge | replace('-', '_')]['ipv4']['address'] }}{% endif %}" -nomad_advertise_rpc: "{% if nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_cluster_bridge | replace('-', '_')]['ipv4']['address'] }}{% endif %}" -nomad_advertise_serf: "{% if nomad_mode == 'single' %}127.0.0.1{% else %}{{ hostvars[inventory_hostname]['ansible_' + nomad_cluster_bridge | replace('-', '_')]['ipv4']['address'] }}{% endif %}" - -nomad_port_http: 4646 -nomad_port_rpc: 4647 -nomad_port_serf: 4648 - -# Network -nomad_bridge: true - -nomad_bridge_list: - - { name: "internal", interface: "br-internal", ip_range: "172.18.0.1/16" } - - { name: "metrics", interface: "br-metrics", ip_range: "172.19.0.1/16" } # Log -nomad_debug: "false" -nomad_log_file: "/var/log/nomad/nomad.log" -nomad_log_level: "WARN" +nomad_debug: false +nomad_log_file: /var/log/nomad/nomad.log +nomad_log_level: WARN nomad_log_rotate_bytes: 0 -nomad_log_rotate_duration: "24h" +nomad_log_rotate_duration: 24h nomad_log_rotate_max_files: 0 # Server -nomad_server_enabled: "true" +nomad_server_enabled: true -nomad_server_bootstrap_expect: "\ - {% set nomad_server_bootstrap_expect = [] %}\ +nomad_servers: "\ + {% set nomad_servers = [] %}\ {% if nomad_mode == 'single' %}\ - {% set _ = nomad_server_bootstrap_expect.append(1) %}\ + {% set _ = nomad_servers.append(inventory_hostname) %}\ {% else %}\ {% for host in groups[nomad_deploy_cluster_name] %}\ - {% if hostvars[host].consul_node_role in ['server', 'both'] %}\ - {% set _ = nomad_server_bootstrap_expect.append(1) %}\ + {% if hostvars[host].nomad_node_role in ['server', 'both'] %}\ + {% set _ = nomad_servers.append(host) %}\ {% endif %}\ {% endfor %}\ {% endif %}\ - {{ nomad_server_bootstrap_expect | length }}" + {{ nomad_servers }}" -nomad_server_rejoin_after_leave: "true" +nomad_servers_advertise_address: "\ + {% set nomad_servers_advertise_address = [] %}\ + {% if nomad_mode == 'single' %}\ + {% set _ = nomad_servers_advertise_address.append(hostvars[inventory_hostname].nomad_bind_address) %}\ + {% else %}\ + {% for host in groups[nomad_deploy_cluster_name] %}\ + {% if hostvars[host].nomad_node_role in ['server', 'both'] %}\ + {% set _ = nomad_servers_advertise_address.append(hostvars[host]['ansible_' + hostvars[host].nomad_iface]['ipv4']['address']) %}\ + {% endif %}\ + {% endfor %}\ + {% endif %}\ + {{ nomad_servers_advertise_address }}" + +nomad_server_retry_max: 0 +nomad_server_retry_join: false +nomad_serer_retry_interval: 30s +nomad_server_rejoin_after_leave: true nomad_server_enabled_schedulers: - service - batch - system +nomad_num_schedulers: "{{ ansible_processor_vcpus }}" -nomad_server_num_schedulers: 2 +nomad_server_num_schedulers: 1 nomad_server_event_buffer_size: 100 -nomad_server_node_gc_threshold: "24h" -nomad_server_eval_gc_threshold: "1h" -nomad_server_job_gc_threshold: "4h" -nomad_server_deployment_gc_threshold: "1h" - -nomad_server_heartbeat_grace: "10s" -nomad_server_min_heartbeat_ttl: "10s" -nomad_server_failover_heartbeat_ttl: "5m" +nomad_server_node_gc_threshold: 24h +nomad_server_eval_gc_threshold: 1h +nomad_server_job_gc_threshold: 4h +nomad_server_deployment_gc_threshold: 1h + +nomad_server_heartbeat_grace: 10s +nomad_server_min_heartbeat_ttl: 10s +nomad_server_failover_heartbeat_ttl: 5m nomad_server_max_heartbeats_per_second: 50.0 # nomad_server_encrypt: "" @@ -126,22 +128,22 @@ nomad_server_max_heartbeats_per_second: 50.0 nomad_server_raft_protocol: 3 # Client -nomad_client_enabled: "true" +nomad_client_enabled: true nomad_client_node_class: "{{ inventory_hostname }}-client" -nomad_client_node_pool: "default" +nomad_client_node_pool: default -nomad_client_no_host_uuid: "false" -nomad_client_max_kill_timeout: "30s" +nomad_client_no_host_uuid: false +nomad_client_max_kill_timeout: 30s -nomad_client_network_interface: docker0 +nomad_client_network_interface: "{{ nomad_iface }}" nomad_client_host_network_default: - name: "public" + name: public interface: "{{ ansible_default_ipv4.interface }}" nomad_client_host_network_cluster: - name: "cluster" + name: cluster interface: "{{ nomad_cluster_bridge }}" nomad_client_meta_list: {"arch": "{{ architecture_map[ansible_facts.architecture] }}", "location": "{{ fact_instance.location }}", "instance": "{{ inventory_hostname }}"} @@ -173,7 +175,7 @@ nomad_client_reserved_disk: 0 # TLS nomad_tls_ca_host: localhost -nomad_tls_ca_host_dir: "~/.simple-stack/tls/{{ inventory_hostname }}" +nomad_tls_ca_host_dir: "~/.simple-stack/tls" nomad_tls_ca_pubkey: "simplestack-ca.pem" nomad_tls_ca_privatekey: "simplestack-ca-key.pem" nomad_tls_ca_provider: "ownca" @@ -181,31 +183,33 @@ nomad_tls_host_certificate_dir: "/etc/ssl/simplestack" nomad_tls_common_name: "nomad" nomad_tls_check_delay: "+2w" -nomad_tls_http: "true" -nomad_tls_rpc: "true" - # TLS Server nomad_tls_cert_server: "{{ nomad_dc_name }}-server-nomad.pem" nomad_tls_privatekey_server: "{{ nomad_dc_name }}-server-nomad.key" -nomad_tls_common_name_server: "*.{{ nomad_dc_name }}.nomad" -nomad_tls_subject_alt_name_server: "DNS:localhost,IP:127.0.0.1,DNS:server.global.nomad,DNS:server.{{ nomad_region }}.nomad,DNS:server.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad,IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +nomad_tls_common_name_server: "*.{{ nomad_dc_name }}.{{ nomad_tls_common_name }}" +# nomad_tls_subject_alt_name_server: "DNS:localhost,IP:127.0.0.1,DNS:server.global.{{ certificate_subject_alt_name }},DNS:server.{{ nomad_region }}.{{ certificate_subject_alt_name }},DNS:server.{{ nomad_dc_name }}.{{ certificate_subject_alt_name }},DNS:*.{{ nomad_dc_name }}.{{ certificate_subject_alt_name }},IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +# nomad_tls_subject_alt_name_server: "DNS:localhost,IP:127.0.0.1,DNS:server.global.nomad,DNS:server.{{ nomad_region }}.nomad,DNS:server.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad,IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +nomad_tls_subject_alt_name_server: "DNS:localhost,IP:127.0.0.1,IP:172.17.0.1,DNS:server.global.nomad,DNS:server.{{ nomad_region }}.nomad,DNS:server.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad" # TLS client -nomad_tls_cert_client: "{{ nomad_dc_name }}-client-nomad.pem" -nomad_tls_privatekey_client: "{{ nomad_dc_name }}-client-nomad.key" +nomad_tls_cert_client: "{{ inventory_hostname }}-{{ nomad_dc_name }}-client-nomad.pem" +nomad_tls_privatekey_client: "{{ inventory_hostname }}-{{ nomad_dc_name }}-client-nomad.key" -nomad_tls_common_name_client: "*.{{ nomad_dc_name }}.nomad" -nomad_tls_subject_alt_name_client: "DNS:localhost,IP:127.0.0.1,DNS:client.global.nomad,DNS:client.{{ nomad_region }}.nomad,DNS:client.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad,IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +nomad_tls_common_name_client: "*.{{ nomad_dc_name }}.{{ nomad_tls_common_name }}" +# nomad_tls_subject_alt_name_client: "DNS:localhost,IP:127.0.0.1,DNS:client.global.{{ certificate_subject_alt_name }},DNS:client.{{ nomad_region }}.{{ nomad_tls_common_name }},DNS:client.{{ nomad_dc_name }}.{{ nomad_tls_common_name }},DNS:*.{{ nomad_dc_name }}.{{ nomad_tls_common_name }},IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +# nomad_tls_subject_alt_name_client: "DNS:localhost,IP:127.0.0.1,DNS:client.global.nomad,DNS:client.{{ nomad_region }}.nomad,DNS:client.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad,IP:172.26.64.1,IP:172.17.0.1,IP:172.18.0.1" +nomad_tls_subject_alt_name_client: "DNS:localhost,IP:127.0.0.1,IP:172.17.0.1,DNS:client.global.nomad,DNS:client.{{ nomad_region }}.nomad,DNS:client.{{ nomad_dc_name }}.nomad,DNS:*.{{ nomad_dc_name }}.nomad" nomad_tls_rpc_upgrade_mode: "false" nomad_tls_verify_server_hostname: "true" nomad_tls_verify_https_client: "false" # ACL -nomad_acl_enabled: "true" -nomad_acl_token_ttl: "30s" -nomad_acl_policy_ttl: "30s" +nomad_acl_enabled: true +nomad_acl_token_ttl: 30s +nomad_acl_policy_ttl: 30s +nomad_acl_replication_token: "" # Docker nomad_docker_client_dc_name: "dc1" @@ -279,13 +283,13 @@ nomad_telemetry_circonus_check_force_metric_activation: "false" # nomad_telemetry_circonus_broker_select_tag: "" # Autopilot -nomad_autopilot_cleanup_dead_servers: "true" -nomad_autopilot_last_contact_threshold: "200ms" +nomad_autopilot_cleanup_dead_servers: true +nomad_autopilot_last_contact_threshold: 200ms nomad_autopilot_max_trailing_logs: 250 -nomad_autopilot_server_stabilization_time: "10s" +nomad_autopilot_server_stabilization_time: 10s # UI -nomad_ui_enabled: "true" +nomad_ui_enabled: true nomad_ui_content_security_policy_connect_src: "*" nomad_ui_content_security_policy_default_src: "'none'" nomad_ui_content_security_policy_form_action: "'none'" diff --git a/ansible/playbooks/paas/roles/nomad/handlers/main.yml b/ansible/playbooks/paas/roles/nomad/handlers/main.yml index 9ea93323..7ce58f2c 100644 --- a/ansible/playbooks/paas/roles/nomad/handlers/main.yml +++ b/ansible/playbooks/paas/roles/nomad/handlers/main.yml @@ -5,13 +5,6 @@ ansible.builtin.apt: update_cache: true -- name: Netplan_apply - ansible.builtin.command: "netplan apply" - async: 45 - poll: 0 - register: netplan_apply - changed_when: netplan_apply.ansible_job_id != 0 - - name: Nomad_restart ansible.builtin.systemd_service: name: nomad diff --git a/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml b/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml index 9ec5412d..a69fc304 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml @@ -2,62 +2,30 @@ - name: Check number of host in play ansible.builtin.debug: msg: "Nomad will be deployed on {{ (ansible_play_hosts | length) }} host{% if (ansible_play_hosts | length) > 1 %}s{% endif %}" - verbosity: 1 - run_once: true -- name: "Nomad | Server Quorum" +- name: nomad_bind_address ansible.builtin.debug: - msg: "Server quorum: {{ nomad_servers_quorum }}" - verbosity: 1 - run_once: true - when: - - nomad_mode == 'cluster' - - nomad_roles_auto_assign + msg: "{{ nomad_bind_address }}" -- name: "Nomad | Clients available" +- name: nomad_advertise_address ansible.builtin.debug: - msg: "Clients available: {{ nomad_clients_available }}" - verbosity: 1 - run_once: true - when: - - nomad_mode == 'cluster' - - nomad_roles_auto_assign - -- name: "Nomad | Assign server role" - ansible.builtin.set_fact: - nomad_node_role: "server" - when: - - nomad_mode == 'cluster' - - inventory_hostname in groups[nomad_cluster_name][0:(nomad_servers_quorum | int )] - - nomad_roles_auto_assign + msg: "{{ nomad_advertise_address }}" -- name: "Nomad | Assign client role" - ansible.builtin.set_fact: - nomad_node_role: "client" - when: - - nomad_mode == 'cluster' - - inventory_hostname in groups[nomad_cluster_name][(nomad_servers_quorum | int ):] - - nomad_roles_auto_assign +- name: nomad_ports + ansible.builtin.debug: + msg: "{{ nomad_ports }}" -- name: "Nomad | Set role to nodes" - ansible.builtin.set_fact: - nomad_node_role: "{{ nomad_node_role }}" - when: - - nomad_mode == 'cluster' - - nomad_node_role is defined +- name: nomad_servers + ansible.builtin.debug: + msg: "{{ nomad_servers }}" -- name: "Nomad | Set role to node" - ansible.builtin.set_fact: - nomad_node_role: "both" - when: nomad_mode == 'single' +- name: nomad_client_meta_list + ansible.builtin.debug: + msg: "{{ nomad_client_meta_list }}" -- name: "Nomad | Insert Node role in local facts" - ansible.builtin.copy: - dest: /etc/ansible/facts.d/nomad_node_role.fact - content: "{{ nomad_node_role | to_nice_json }}" - mode: "0600" - when: - - ansible_local.nomad_node_role.nomad_node_role is not defined +- name: nomad_servers_advertise_address + ansible.builtin.debug: + msg: "{{ nomad_servers_advertise_address }}" - name: "Nomad | Roles Status" ansible.builtin.debug: diff --git a/ansible/playbooks/paas/roles/nomad/tasks/02_network.yml b/ansible/playbooks/paas/roles/nomad/tasks/02_network.yml index f6d53e9c..263a9373 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/02_network.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/02_network.yml @@ -35,9 +35,9 @@ verbosity: 1 when: not cni_install.stat.exists -- name: "Nomad | CNI | Unarchive plugin CNI for {{ architecture | upper }}" +- name: "Nomad | CNI | Unarchive plugin CNI" ansible.builtin.unarchive: - src: https://github.com/containernetworking/plugins/releases/download/{{ tag.json.tag_name }}/cni-plugins-linux-{{ architecture }}-{{ tag.json.tag_name }}.tgz + src: https://github.com/containernetworking/plugins/releases/download/{{ tag.json.tag_name }}/cni-plugins-linux-{{ nomad_architecture }}-{{ tag.json.tag_name }}.tgz dest: /opt/cni/bin remote_src: true when: not cni_install.stat.exists @@ -56,13 +56,3 @@ - { name: "net.bridge.bridge-nf-call-arptables", value: "1" } - { name: "net.bridge.bridge-nf-call-ip6tables", value: "1" } - { name: "net.bridge.bridge-nf-call-iptables", value: "1" } - -- name: "Nomad | Network | Conf Netplan for bridge(s)" - ansible.builtin.template: - src: 50-bridge.yaml.j2 - dest: /etc/netplan/50-bridge.yaml - mode: "0600" - notify: Netplan_apply - -- name: "Nomad | Network | Flush handlers" - ansible.builtin.meta: flush_handlers diff --git a/ansible/playbooks/paas/roles/nomad/tasks/04_tls_certs.yml b/ansible/playbooks/paas/roles/nomad/tasks/04_tls_certs.yml index d231059b..54f88757 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/04_tls_certs.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/04_tls_certs.yml @@ -1,4 +1,18 @@ --- +- name: "Certificate | Create TLS directory on target" + ansible.builtin.file: + path: "{{ nomad_tls_host_certificate_dir }}" + state: directory + mode: '0755' + +- name: "Certificate | Copy Public certs on nodes - {{ nomad_tls_ca_pubkey }}" + ansible.builtin.copy: + src: "{{ nomad_tls_ca_host_dir }}/{{ nomad_tls_ca_pubkey }}" + dest: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" + owner: "root" + group: "root" + mode: "0640" + - name: Nomad | Copy certificate on server (or both) nodes when: nomad_node_role in ['server', 'both'] block: @@ -20,7 +34,6 @@ name: certificate tasks_from: client vars: - certificate_ca_host_dir: "{{ nomad_tls_ca_host_dir }}" certificate_ca_pubkey: "{{ nomad_tls_ca_pubkey }}" certificate_ca_privatekey: "{{ nomad_tls_ca_privatekey }}" certificate_ca_provider: "{{ nomad_tls_ca_provider }}" @@ -29,6 +42,7 @@ certificate_client_privatekey: "{{ nomad_tls_privatekey_server }}" certificate_common_name: "{{ nomad_tls_common_name_server }}" certificate_subject_alt_name: "{{ nomad_tls_subject_alt_name_server }}" + run_once: true when: not cert_tls_server_present.stat.exists or (cert_tls_server_present.stat.exists and not tls_check_server.valid_at.delay) - name: "Nomad | Copy cert private server key on nodes" @@ -52,7 +66,7 @@ block: - name: "Nomad | Check if TLS cert exists for Client" ansible.builtin.stat: - path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_client }}" + path: "{{ nomad_tls_ca_host_dir }}{{ nomad_tls_cert_client }}" register: cert_tls_client_present - name: "Nomad | Get information on generated certificate for Clients" @@ -68,7 +82,6 @@ name: certificate tasks_from: client vars: - certificate_ca_host_dir: "{{ nomad_tls_ca_host_dir }}" certificate_ca_pubkey: "{{ nomad_tls_ca_pubkey }}" certificate_ca_privatekey: "{{ nomad_tls_ca_privatekey }}" certificate_ca_provider: "{{ nomad_tls_ca_provider }}" @@ -77,11 +90,7 @@ certificate_client_privatekey: "{{ nomad_tls_privatekey_client }}" certificate_common_name: "{{ nomad_tls_common_name_client }}" certificate_subject_alt_name: "{{ nomad_tls_subject_alt_name_client }}" - when: - - nomad_mode == 'cluster' - - ( groups[nomad_deploy_cluster_name] | map('extract', hostvars) | selectattr('nomad_node_role', 'equalto', 'client') | map(attribute='inventory_hostname') | length ) >= 1 - - ( not ( groups[nomad_deploy_cluster_name] | map('extract', hostvars) | selectattr('nomad_node_role', 'equalto', 'client') | map(attribute='cert_tls_client_present.stat.exists') | list | first ) ) or - (( groups[nomad_deploy_cluster_name] | map('extract', hostvars) | selectattr('nomad_node_role', 'equalto', 'client') | map(attribute='cert_tls_client_present.stat.exists') | list | first ) and not (groups[nomad_deploy_cluster_name] | map('extract', hostvars) | selectattr('nomad_node_role', 'equalto', 'client') | map(attribute='tls_check_client.valid_at.delay') | list | first ) ) + when: nomad_mode == 'cluster' - name: "Nomad | Copy cert client key on nodes" ansible.builtin.copy: diff --git a/ansible/playbooks/paas/roles/nomad/tasks/06_configuration.yml b/ansible/playbooks/paas/roles/nomad/tasks/06_configuration.yml index bd6e5ed5..fc3057c1 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/06_configuration.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/06_configuration.yml @@ -15,26 +15,16 @@ ansible.builtin.set_fact: nomad_encrypt_key: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_encrypt_key', missing='create', userpass=nomad_encrypt_key_out.stdout) }}" -- name: "Nomad Install | Set Gossip Encryption Key init done local facts" - ansible.builtin.copy: - dest: /etc/ansible/facts.d/nomad_encrypt_key.fact - content: | - { - "nomad_encrypt_key": "init_done" - } - mode: "0600" - when: ansible_local.nomad_encrypt_key.nomad_encrypt_key is not defined - - name: "Nomad Configuration | Add user nomad to docker group" ansible.builtin.user: - name: "nomad" + name: nomad groups: docker append: true when: nomad_node_role == 'client' or nomad_node_role == 'both' - name: "Nomad Configuration | Insert Nomad docker configuration" ansible.builtin.template: - src: "docker.hcl.j2" + src: docker.hcl.j2 dest: "{{ nomad_config_dir }}/docker.hcl" owner: nomad group: nomad @@ -43,7 +33,7 @@ - name: "Nomad Install | Copy configurations files" ansible.builtin.template: - src: "nomad.hcl.j2" + src: nomad.hcl.j2 dest: "{{ nomad_config_dir }}/nomad.hcl" owner: nomad group: nomad @@ -51,23 +41,23 @@ - name: "Nomad Install | Copy configurations files for servers" ansible.builtin.template: - src: "server.hcl.j2" + src: server.hcl.j2 dest: "{{ nomad_config_dir }}/server.hcl" owner: nomad group: nomad mode: '0644' notify: Nomad_restart - when: nomad_node_role == 'server' or nomad_node_role == 'both' + when: nomad_node_role in ['server', 'both'] - name: "Nomad Install | Copy configurations files for clients" ansible.builtin.template: - src: "client.hcl.j2" + src: client.hcl.j2 dest: "{{ nomad_config_dir }}/client.hcl" owner: nomad group: nomad mode: '0644' notify: Nomad_restart - when: nomad_node_role == 'client' or nomad_node_role == 'both' + when: nomad_node_role in ['client', 'both'] - name: "Nomad | Configuration | Flush handlers" ansible.builtin.meta: flush_handlers @@ -79,7 +69,7 @@ - name: Block block: - - name: "Nomad Install | Read Nomad management token from PasswordStore" + - name: "Nomad Install | Read Nomad management token from UI" ansible.builtin.set_fact: nomad_management_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='error') }}" rescue: @@ -96,20 +86,10 @@ register: nomad_management_token_result run_once: true - - name: "Nomad Install | Set Nomad management token and insert in PasswordStore" + - name: "Nomad Install | Set Nomad management token and insert in UI" ansible.builtin.set_fact: nomad_management_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='create', userpass=nomad_management_token_result.json.SecretID) }}" -- name: "Nomad Install | Set Nomad management token init done local facts" - ansible.builtin.copy: - dest: /etc/ansible/facts.d/nomad_management_token.fact - content: | - { - "nomad_management_token": "init_done" - } - mode: "0600" - when: ansible_local.nomad_management_token.nomad_management_token is not defined - - name: "Nomad Configuration | Enable MemoryOversubscription" ansible.builtin.uri: url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/operator/scheduler/configuration" @@ -125,7 +105,7 @@ return_content: true status_code: - 200 - when: nomad_node_role == 'server' or nomad_node_role == 'both' + when: nomad_node_role in ['server', 'both'] register: nomad_memoryoversubscription ignore_errors: true @@ -139,7 +119,7 @@ - "plugin-s3-node.hcl" when: - nomad_s3_storage_enabled - - nomad_node_role == 'client' or nomad_node_role == 'both' + - nomad_node_role in ['client', 'both'] notify: Nomad_s3_jobs - name: "Nomad Configuration | Flush handlers" diff --git a/ansible/playbooks/paas/roles/nomad/tasks/07_autoeligibility.yml b/ansible/playbooks/paas/roles/nomad/tasks/07_autoeligibility.yml index 824b1b67..08c70aa2 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/07_autoeligibility.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/07_autoeligibility.yml @@ -1,103 +1,110 @@ --- -- name: "Nomad Policy | Get policies list" - ansible.builtin.uri: - url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/policies" - ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" - client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" - client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" - method: GET - headers: - X-Nomad-Token: "{{ nomad_management_token | default(ansible_local.nomad_management_token.nomad_management_token) }}" - status_code: - - 200 - - 404 - return_content: true - register: nomad_policies_list_raw - -- name: "Nomad Policy | Set policies list fact" - ansible.builtin.set_fact: - nomad_policies_list: "{{ nomad_policies_list_raw.json | community.general.json_query('[*].Name') | string }}" - -- name: "Nomad Policy | Create policy for Nomad access autoeligibility" - ansible.builtin.uri: - url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/policy/autoeligibility" - ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" - client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" - client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" - method: POST - headers: - X-Nomad-Token: "{{ nomad_management_token | default(ansible_local.nomad_management_token.nomad_management_token) }}" - body: | - { - "Name": "autoeligibility", - "Description": "Nomad policy for single mode install operations (drain)", - "Rules": "node {\n policy = \"write\"\n}\n\nagent {\n policy = \"write\"\n}" - } - body_format: json - status_code: - - 200 - - 201 - when: '"autoeligibility" not in nomad_policies_list' - -- name: "Nomad Policy | Warning policy already created" - ansible.builtin.debug: - msg: "Policy already created" - verbosity: 1 - when: '"autoeligibility" in nomad_policies_list' - -- name: "Nomad Token | Get tokens list" - ansible.builtin.uri: - url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/tokens" - ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" - client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" - client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" - method: GET - headers: - X-Nomad-Token: "{{ nomad_management_token | default(ansible_local.nomad_management_token.nomad_management_token) }}" - status_code: - - 200 - - 404 - return_content: true - register: nomad_tokens_list_raw - -- name: "Nomad Token | Set tokens list fact" - ansible.builtin.set_fact: - nomad_tokens_list: "{{ nomad_tokens_list_raw.json | community.general.json_query('[*].Name') | string }}" +- name: Nomad | Copy certificate on server (or both) nodes + when: nomad_node_role in ['server', 'both'] + block: + - name: "Nomad Policy | Get policies list" + ansible.builtin.uri: + url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/policies" + ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" + client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" + client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" + method: GET + headers: + X-Nomad-Token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='error') }}" + status_code: + - 200 + - 404 + return_content: true + register: nomad_policies_list_raw -- name: "Nomad Token | Debug nomad_tokens_list" - ansible.builtin.debug: - msg: "{{ nomad_tokens_list }}" - verbosity: 1 + - name: Debug nomad_policies_list_raw (for auto eligibility) + ansible.builtin.debug: + msg: "{{ nomad_policies_list_raw.json }}" -- name: Block - block: - - name: "Nomad Install | Read Nomad nomad autoeligibility token" + - name: "Nomad Policy | Set policies list fact" ansible.builtin.set_fact: - nomad_autoeligibility_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_autoeligibility_token', missing='error') }}" + nomad_policies_list: "{{ nomad_policies_list_raw.json | community.general.json_query('[*].Name') | string }}" - rescue: - - name: "Nomad Token | Create token for Nomad access autoeligibility" + - name: "Nomad Policy | Create policy for Nomad access autoeligibility" ansible.builtin.uri: - url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/token" + url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/policy/autoeligibility" ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" - method: PUT + method: POST headers: - X-Nomad-Token: "{{ nomad_management_token | default(ansible_local.nomad_management_token.nomad_management_token) }}" + X-Nomad-Token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='error') }}" body: | { "Name": "autoeligibility", - "Type": "client", - "Policies": ["autoeligibility"], - "Global": false + "Description": "Nomad policy for single mode install operations (drain)", + "Rules": "node {\n policy = \"write\"\n}\n\nagent {\n policy = \"write\"\n}" } body_format: json status_code: - 200 - register: nomad_new_token_name + - 201 + when: '"autoeligibility" not in nomad_policies_list' - - name: "Nomad Install | Set Nomad Autoeligibility token and insert in PasswordStore" + - name: "Nomad Policy | Warning policy already created" + ansible.builtin.debug: + msg: "Policy already created" + verbosity: 1 + when: '"autoeligibility" in nomad_policies_list' + + - name: "Nomad Token | Get tokens list" + ansible.builtin.uri: + url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/tokens" + ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" + client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" + client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" + method: GET + headers: + X-Nomad-Token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='error') }}" + status_code: + - 200 + - 404 + return_content: true + register: nomad_tokens_list_raw + + - name: "Nomad Token | Set tokens list fact" ansible.builtin.set_fact: - nomad_autoeligibility_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_autoeligibility_token', missing='create', userpass=nomad_new_token_name.json.SecretID) }}" + nomad_tokens_list: "{{ nomad_tokens_list_raw.json | community.general.json_query('[*].Name') | string }}" + + - name: "Nomad Token | Debug nomad_tokens_list" + ansible.builtin.debug: + msg: "{{ nomad_tokens_list }}" + verbosity: 1 + + - name: Block + block: + - name: "Nomad Install | Read Nomad nomad autoeligibility token" + ansible.builtin.set_fact: + nomad_autoeligibility_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_autoeligibility_token', missing='error') }}" + + rescue: + - name: "Nomad Token | Create token for Nomad access autoeligibility" + ansible.builtin.uri: + url: "{{ nomad_http_scheme }}://{{ nomad_http_ip }}:{{ nomad_http_port }}/v1/acl/token" + ca_path: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" + client_cert: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" + client_key: "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" + method: PUT + headers: + X-Nomad-Token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_management_token', missing='error') }}" + body: | + { + "Name": "autoeligibility", + "Type": "client", + "Policies": ["autoeligibility"], + "Global": false + } + body_format: json + status_code: + - 200 + register: nomad_new_token_name + + - name: "Nomad Install | Set Nomad Autoeligibility token and insert in PasswordStore" + ansible.builtin.set_fact: + nomad_autoeligibility_token: "{{ lookup('simple-stack-ui', type='secret', key=inventory_hostname, subkey='nomad_autoeligibility_token', missing='create', userpass=nomad_new_token_name.json.SecretID) }}" diff --git a/ansible/playbooks/paas/roles/nomad/tasks/main.yml b/ansible/playbooks/paas/roles/nomad/tasks/main.yml index 77f05d64..93f96067 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/main.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/main.yml @@ -19,11 +19,7 @@ - name: "Nomad | Install Nomad Auto Eligibility Node" ansible.builtin.include_tasks: "07_autoeligibility.yml" - when: nomad_mode == 'single' - name: "Nomad | Change SystemD configuration" ansible.builtin.include_tasks: "08_systemd_tuning.yml" -- name: "Nomad | Firewall configuration" - ansible.builtin.include_tasks: "09_firewall.yml" - when: nomad_firewall diff --git a/ansible/playbooks/paas/roles/nomad/templates/50-bridge.yaml.j2 b/ansible/playbooks/paas/roles/nomad/templates/50-bridge.yaml.j2 deleted file mode 100644 index 711ab642..00000000 --- a/ansible/playbooks/paas/roles/nomad/templates/50-bridge.yaml.j2 +++ /dev/null @@ -1,9 +0,0 @@ -network: - version: 2 - bridges: -{% for item in nomad_bridge_list %} - {{ item.interface }}: - addresses: [{{ item.ip_range }}] - dhcp4: no - dhcp6: no -{% endfor %} diff --git a/ansible/playbooks/paas/roles/nomad/templates/client.hcl.j2 b/ansible/playbooks/paas/roles/nomad/templates/client.hcl.j2 index 5eb0041d..184a5b9d 100644 --- a/ansible/playbooks/paas/roles/nomad/templates/client.hcl.j2 +++ b/ansible/playbooks/paas/roles/nomad/templates/client.hcl.j2 @@ -1,14 +1,19 @@ client { - enabled = {{ nomad_client_enabled }} - + enabled = {{ nomad_client_enabled | bool | lower }} state_dir = "{{ nomad_state_dir_client }}" node_class = "{{ nomad_client_node_class }}" node_pool = "{{ nomad_client_node_pool }}" - no_host_uuid = {{ nomad_client_no_host_uuid }} + no_host_uuid = {{ nomad_client_no_host_uuid | bool | lower }} + servers = [ + {%- set comma = joiner(",") -%} + {%- for server in nomad_servers_advertise_address -%} + {{ comma() }}"{{ server }}:{{ nomad_ports.rpc }}" + {%- endfor -%} ] + {% if nomad_client_network_interface is defined %} network_interface = "{{ nomad_client_network_interface }}" {% endif %} @@ -22,6 +27,7 @@ client { reserved_ports = "{{ nomad_client_host_network_default.reserved_ports }}" {% endif %} } + {% if nomad_mode == "cluster" %} host_network "{{ nomad_client_host_network_cluster.name }}" { interface = "{{ nomad_client_host_network_cluster.interface }}" @@ -33,21 +39,6 @@ client { {% endif %} } {% endif %} -{% if nomad_bridge %} -{% for item in nomad_bridge_list %} - host_network "{{ item.name }}" { -{% if nomad_bridge_list is defined and item.interface is defined %} - interface = "{{ item.interface }}" -{% endif %} -{% if nomad_bridge_list is defined and item.cidr is defined %} - cidr = "{{ item.cidr }}" -{% endif %} -{% if nomad_bridge_list is defined and item.reserved_ports is defined %} - reserved_ports = "{{ item.reserved_ports }}" -{% endif %} - } -{% endfor %} -{% endif %} {% if nomad_client_meta_list%} meta = { diff --git a/ansible/playbooks/paas/roles/nomad/templates/docker.hcl.j2 b/ansible/playbooks/paas/roles/nomad/templates/docker.hcl.j2 index 467a8256..14043718 100644 --- a/ansible/playbooks/paas/roles/nomad/templates/docker.hcl.j2 +++ b/ansible/playbooks/paas/roles/nomad/templates/docker.hcl.j2 @@ -26,15 +26,15 @@ plugin "docker" { allow_caps = [{% for item in nomad_docker_client_allow_caps %}"{{ item }}"{% if not loop.last %}, {% endif %}{% endfor %}] gc { - image = {{ nomad_docker_gc_image }} - image_delay = "{{ nomad_docker_gc_image_delay }}" - container = {{ nomad_docker_gc_container }} + image = {{ nomad_docker_gc_image }} + image_delay = "{{ nomad_docker_gc_image_delay }}" + container = {{ nomad_docker_gc_container }} - dangling_containers { - enabled = {{ nomad_docker_gc_dangling_containers_enabled }} - dry_run = {{ nomad_docker_gc_dangling_containers_dry_run }} - period = "{{ nomad_docker_gc_dangling_containers_period }}" - creation_grace = "{{ nomad_docker_gc_dangling_containers_creation_grace }}" + dangling_containers { + enabled = {{ nomad_docker_gc_dangling_containers_enabled }} + dry_run = {{ nomad_docker_gc_dangling_containers_dry_run }} + period = "{{ nomad_docker_gc_dangling_containers_period }}" + creation_grace = "{{ nomad_docker_gc_dangling_containers_creation_grace }}" } } } diff --git a/ansible/playbooks/paas/roles/nomad/templates/nomad.hcl.j2 b/ansible/playbooks/paas/roles/nomad/templates/nomad.hcl.j2 index 2d493d4b..7d027483 100644 --- a/ansible/playbooks/paas/roles/nomad/templates/nomad.hcl.j2 +++ b/ansible/playbooks/paas/roles/nomad/templates/nomad.hcl.j2 @@ -2,76 +2,50 @@ name = "{{ nomad_node_name }}" region = "{{ nomad_region }}" datacenter = "{{ nomad_dc_name }}" -disable_anonymous_signature = {{ nomad_disable_anonymous_signature }} -disable_update_check = {{ nomad_disable_update_check }} +disable_anonymous_signature = {{ nomad_disable_anonymous_signature | bool | lower }} +disable_update_check = {{ nomad_disable_update_check | bool | lower }} data_dir = "{{ nomad_data_dir }}" -{% if nomad_mode == 'single'%} -bind_addr = "{{ nomad_bind_addr }}" -{% else %} -addresses { - http = "{{ nomad_address_http }}" - rpc = "{{ nomad_address_rpc }}" - serf = "{{ nomad_address_serf }}" -} -{% endif %} +bind_addr = "{{ nomad_bind_address }}" advertise { - http = "{{ nomad_advertise_http }}" - rpc = "{{ nomad_advertise_rpc }}" - serf = "{{ nomad_advertise_serf }}" + http = "{{ nomad_advertise_address }}:{{ nomad_ports.http }}" + rpc = "{{ nomad_advertise_address }}:{{ nomad_ports.rpc }}" + serf = "{{ nomad_advertise_address }}:{{ nomad_ports.serf }}" } ports { - http = {{ nomad_port_http }} - rpc = {{ nomad_port_rpc }} - serf = {{ nomad_port_serf }} + http = {{ nomad_ports.http }} + rpc = {{ nomad_ports.rpc }} + serf = {{ nomad_ports.serf }} } -enable_debug = {{ nomad_debug }} +enable_debug = {{ nomad_debug | bool | lower }} log_file = "{{ nomad_log_file }}" log_level = "{{ nomad_log_level }}" log_rotate_bytes = {{ nomad_log_rotate_bytes }} log_rotate_duration = "{{ nomad_log_rotate_duration }}" log_rotate_max_files = {{ nomad_log_rotate_max_files }} -leave_on_terminate = {{ nomad_leave_on_terminate | lower }} -leave_on_interrupt = {{ nomad_leave_on_interrupt | lower }} - - -{% if nomad_node_role == 'server' or nomad_node_role == 'both' %} - -tls { - http = {{ nomad_tls_http }} - rpc = {{ nomad_tls_rpc }} - ca_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" - cert_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_server }}" - key_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_server }}" - rpc_upgrade_mode = {{ nomad_tls_rpc_upgrade_mode }} - verify_server_hostname = "{{ nomad_tls_verify_server_hostname }}" - verify_https_client = "{{ nomad_tls_verify_https_client }}" -} - -{% elif nomad_node_role == 'client' %} +leave_on_terminate = {{ nomad_leave_on_terminate | bool | lower }} +leave_on_interrupt = {{ nomad_leave_on_interrupt | bool | lower }} tls { - http = {{ nomad_tls_http }} - rpc = {{ nomad_tls_rpc }} + http = true + rpc = true ca_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_ca_pubkey }}" - cert_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_cert_client }}" - key_file = "{{ nomad_tls_host_certificate_dir }}/{{ nomad_tls_privatekey_client }}" + cert_file = "{{ nomad_tls_host_certificate_dir }}/{{ (nomad_node_role == 'client') | ternary(nomad_tls_cert_client, nomad_tls_cert_server) }}" + key_file = "{{ nomad_tls_host_certificate_dir }}/{{ (nomad_node_role == 'client') | ternary(nomad_tls_privatekey_client, nomad_tls_privatekey_server) }}" rpc_upgrade_mode = {{ nomad_tls_rpc_upgrade_mode }} verify_server_hostname = "{{ nomad_tls_verify_server_hostname }}" verify_https_client = "{{ nomad_tls_verify_https_client }}" } -{% endif %} - acl { - enabled = {{ nomad_acl_enabled }} + enabled = {{ nomad_acl_enabled | bool | lower }} token_ttl = "{{ nomad_acl_token_ttl }}" policy_ttl = "{{ nomad_acl_policy_ttl }}" - replication_token = "" + replication_token = "{{ nomad_acl_replication_token }}" } telemetry { @@ -104,14 +78,19 @@ telemetry { } autopilot { - cleanup_dead_servers = {{ nomad_autopilot_cleanup_dead_servers }} + cleanup_dead_servers = {{ nomad_autopilot_cleanup_dead_servers | bool | lower }} last_contact_threshold = "{{ nomad_autopilot_last_contact_threshold }}" max_trailing_logs = {{ nomad_autopilot_max_trailing_logs }} server_stabilization_time = "{{ nomad_autopilot_server_stabilization_time }}" } +limits { + http_max_conns_per_client = 300 + rpc_max_conns_per_client = 300 +} + ui { - enabled = {{ nomad_ui_enabled }} + enabled = {{ nomad_ui_enabled | bool | lower }} content_security_policy { connect_src = ["{{ nomad_ui_content_security_policy_connect_src }}"] diff --git a/ansible/playbooks/paas/roles/nomad/templates/server.hcl.j2 b/ansible/playbooks/paas/roles/nomad/templates/server.hcl.j2 index 486e6a01..e9edfca8 100644 --- a/ansible/playbooks/paas/roles/nomad/templates/server.hcl.j2 +++ b/ansible/playbooks/paas/roles/nomad/templates/server.hcl.j2 @@ -1,17 +1,34 @@ server { - enabled = {{ nomad_server_enabled }} - - bootstrap_expect = {{ nomad_server_bootstrap_expect }} - + enabled = {{ nomad_server_enabled | bool | lower }} + bootstrap_expect = {{ nomad_servers | length }} data_dir = "{{ nomad_data_dir_server }}" - rejoin_after_leave = {{ nomad_server_rejoin_after_leave }} - - # enabled_schedulers = [{% for item in nomad_server_enabled_schedulers %}"{{ item }}"{% if not loop.last %},{% endif %}{% endfor %}] - - # num_schedulers = {{ nomad_server_num_schedulers }} + {% if nomad_server_retry_join | bool -%} + retry_join = [ + {%- set comma = joiner(",") -%} + {% for server in nomad_servers_advertise_address -%} + {{ comma() }}"{{ server }}" + {%- endfor -%} ] + retry_max = {{ nomad_server_retry_max }} + retry_interval = "{{ nomad_serer_retry_interval }}" + {% else -%} + start_join = [ + {%- set comma = joiner(",") -%} + {% for server in nomad_servers_advertise_address -%} + {{ comma() }}"{{ server }}" + {%- endfor -%} ] + {%- endif %} + + rejoin_after_leave = {{ nomad_server_rejoin_after_leave | bool | lower }} + + enabled_schedulers = [ + {%- set comma = joiner(",") -%} + {% for scheduler in nomad_server_enabled_schedulers -%} + {{ comma() }}"{{ scheduler }}" + {%- endfor -%} ] + num_schedulers = {{ nomad_server_num_schedulers }} heartbeat_grace = "{{ nomad_server_heartbeat_grace }}" min_heartbeat_ttl = "{{ nomad_server_min_heartbeat_ttl }}" diff --git a/ansible/playbooks/paas/roles/nomad/vars/main.yml b/ansible/playbooks/paas/roles/nomad/vars/main.yml index ee5acfd6..524bbba6 100644 --- a/ansible/playbooks/paas/roles/nomad/vars/main.yml +++ b/ansible/playbooks/paas/roles/nomad/vars/main.yml @@ -2,44 +2,11 @@ # vars file for install # Architecture replacement -architecture_map: - i386: '386' - x86_64: 'amd64' - aarch64: 'arm64' - armv7l: 'armv7' - armv6l: 'armv6' +nomad_architecture_map: + i386: "386" + x86_64: amd64 + aarch64: arm64 + armv7l: armv7 + armv6l: armv6 -architecture: "{{ architecture_map[ansible_architecture] | default(ansible_architecture) }}" - -nomad_ufw_rules: - client: - - { proto: "tcp", port: "4646", direction: "in" } - - { proto: "tcp", port: "4646", direction: "out" } - - { proto: "tcp", port: "4647", direction: "in" } - - { proto: "tcp", port: "4647", direction: "out" } - - { proto: "tcp", port: "20000:32000", direction: "in" } - - { proto: "tcp", port: "20000:32000", direction: "out" } - - { proto: "udp", port: "20000:32000", direction: "in" } - - { proto: "udp", port: "20000:32000", direction: "out" } - server: - - { proto: "tcp", port: "4646", direction: "in" } - - { proto: "tcp", port: "4646", direction: "out" } - - { proto: "tcp", port: "4647", direction: "in" } - - { proto: "tcp", port: "4647", direction: "out" } - - { proto: "tcp", port: "4648", direction: "in" } - - { proto: "tcp", port: "4648", direction: "out" } - - { proto: "udp", port: "4648", direction: "in" } - - { proto: "udp", port: "4648", direction: "out" } - both: - - { proto: "tcp", port: "4646", direction: "in" } - - { proto: "tcp", port: "4646", direction: "out" } - - { proto: "tcp", port: "4647", direction: "in" } - - { proto: "tcp", port: "4647", direction: "out" } - - { proto: "tcp", port: "4648", direction: "in" } - - { proto: "tcp", port: "4648", direction: "out" } - - { proto: "udp", port: "4648", direction: "in" } - - { proto: "udp", port: "4648", direction: "out" } - - { proto: "tcp", port: "20000:32000", direction: "in" } - - { proto: "tcp", port: "20000:32000", direction: "out" } - - { proto: "udp", port: "20000:32000", direction: "in" } - - { proto: "udp", port: "20000:32000", direction: "out" } +nomad_architecture: "{{ nomad_architecture_map[ansible_architecture] }}" From 7cdb719ebae0a985aa4588d641f28ea0353eea18 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 17:50:29 +0200 Subject: [PATCH 13/18] style(ansible): use fully qualified modules and quote mode --- ansible/playbooks/paas/timesyncd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ansible/playbooks/paas/timesyncd.yml b/ansible/playbooks/paas/timesyncd.yml index 27494e08..e96b42aa 100644 --- a/ansible/playbooks/paas/timesyncd.yml +++ b/ansible/playbooks/paas/timesyncd.yml @@ -6,13 +6,13 @@ become: true pre_tasks: - name: Create ansible facts.d directory - become: yes - file: + become: true + ansible.builtin.file: path: /etc/systemd/timesyncd.conf.d state: directory owner: "root" group: "root" - mode: 0755 + mode: '0755' - name: Update timesyncd.conf ansible.builtin.copy: From eb3b9c60a275e5ee77d0bbb982188ddf7cf363ee Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 17:51:01 +0200 Subject: [PATCH 14/18] style(ansible): fully qualify modules and drop unused packages --- ansible/playbooks/paas/main.yml | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/ansible/playbooks/paas/main.yml b/ansible/playbooks/paas/main.yml index b62bf7ae..85c0b7f3 100644 --- a/ansible/playbooks/paas/main.yml +++ b/ansible/playbooks/paas/main.yml @@ -6,13 +6,13 @@ become: true pre_tasks: - name: Create ansible facts.d directory - become: yes - file: + become: true + ansible.builtin.file: path: /etc/ansible/facts.d state: directory owner: "root" group: "root" - mode: 0755 + mode: '0755' - name: Get ipinfo.io ansible.builtin.uri: @@ -32,13 +32,8 @@ ansible.builtin.apt: pkg: - python3-debian - - python3-passlib - - libxml2-utils - - ruby-rubygems - - binutils - unzip - make - - python3-pymysql - jq state: present @@ -49,4 +44,3 @@ retries: 10 roles: - unattended-upgrades - - restic From aefb3f2eb0cadfcbe8bb72806f032dbb6a52ba1d Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 18:08:19 +0200 Subject: [PATCH 15/18] feat(ansible): add systemd-resolved configuration playbook Add a new Ansible playbook `systemd-resolved.yml` to configure the `systemd-resolved` service. The playbook creates the resolved configuration directory, copies a basic resolved.conf with DNSStubListener enabled, and ensures the service is restarted after changes. This provides standardized DNS handling across the infrastructure. --- ansible/playbooks/paas/systemd-resolved.yml | 28 +++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 ansible/playbooks/paas/systemd-resolved.yml diff --git a/ansible/playbooks/paas/systemd-resolved.yml b/ansible/playbooks/paas/systemd-resolved.yml new file mode 100644 index 00000000..f07ac0bb --- /dev/null +++ b/ansible/playbooks/paas/systemd-resolved.yml @@ -0,0 +1,28 @@ +--- +- name: Configure systemd-resolved + any_errors_fatal: true + hosts: "{{ hosts_limit | default('infrastructure') }}" + gather_facts: true + become: true + tasks: + - name: Systemd-resolved | Create resolved directory + ansible.builtin.file: + path: /etc/systemd/resolved.conf.d + state: directory + mode: '0755' + + - name: Systemd-resolved | Copy systemd resolved config + ansible.builtin.copy: + content: | + [Resolve] + DNSStubListener=yes + dest: /etc/systemd/resolved.conf.d/systemd-resolved.conf + mode: '0644' + notify: Restart systemd-resolved + + handlers: + - name: Restart systemd-resolved + ansible.builtin.systemd: + state: restarted + daemon_reload: true + name: systemd-resolved From b5ce62d9900ee6fb3dfd14328619601647c8c8d8 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 23:34:08 +0200 Subject: [PATCH 16/18] feat(nomad): Update debug tasks and timezone configuration --- .../{01_nodes_roles.yml => 01_debug.yml} | 36 +++++++++---------- .../playbooks/paas/roles/nomad/tasks/main.yml | 9 +++-- 2 files changed, 23 insertions(+), 22 deletions(-) rename ansible/playbooks/paas/roles/nomad/tasks/{01_nodes_roles.yml => 01_debug.yml} (51%) diff --git a/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml b/ansible/playbooks/paas/roles/nomad/tasks/01_debug.yml similarity index 51% rename from ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml rename to ansible/playbooks/paas/roles/nomad/tasks/01_debug.yml index a69fc304..b3a46a7a 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/01_nodes_roles.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/01_debug.yml @@ -1,39 +1,35 @@ --- -- name: Check number of host in play +- name: "Nomad | debug | Check number of hosts" ansible.builtin.debug: msg: "Nomad will be deployed on {{ (ansible_play_hosts | length) }} host{% if (ansible_play_hosts | length) > 1 %}s{% endif %}" + verbosity: 1 -- name: nomad_bind_address +- name: "Nomad | debug | nomad_node_role" ansible.builtin.debug: - msg: "{{ nomad_bind_address }}" + msg: "{{ nomad_node_role }}" + verbosity: 1 -- name: nomad_advertise_address +- name: "Nomad | debug | nomad_bind_address" ansible.builtin.debug: - msg: "{{ nomad_advertise_address }}" + msg: "{{ nomad_bind_address }}" + verbosity: 1 -- name: nomad_ports +- name: "Nomad | debug | nomad_advertise_address" ansible.builtin.debug: - msg: "{{ nomad_ports }}" + msg: "{{ nomad_advertise_address }}" + verbosity: 1 -- name: nomad_servers +- name: "Nomad | debug | nomad_servers" ansible.builtin.debug: msg: "{{ nomad_servers }}" + verbosity: 1 -- name: nomad_client_meta_list +- name: "Nomad | debug | nomad_client_meta_list" ansible.builtin.debug: msg: "{{ nomad_client_meta_list }}" + verbosity: 1 -- name: nomad_servers_advertise_address +- name: "Nomad | debug | nomad_servers_advertise_address" ansible.builtin.debug: msg: "{{ nomad_servers_advertise_address }}" - -- name: "Nomad | Roles Status" - ansible.builtin.debug: - msg: | - {{ inventory_hostname }} - Nomad role: {{ nomad_node_role }} verbosity: 1 - -- name: "Nomad | Commons tasks | Set-Timezone" - community.general.timezone: - name: "{{ nomad_timezone }}" - hwclock: local diff --git a/ansible/playbooks/paas/roles/nomad/tasks/main.yml b/ansible/playbooks/paas/roles/nomad/tasks/main.yml index 93f96067..75a72cd8 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/main.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/main.yml @@ -1,6 +1,11 @@ --- -- name: "Nomad | Commons tasks" - ansible.builtin.include_tasks: "01_nodes_roles.yml" +- name: "Nomad | Debug" + ansible.builtin.include_tasks: "01_debug.yml" + +- name: "Nomad | Set timezone" + community.general.timezone: + name: "{{ nomad_timezone }}" + hwclock: local - name: "Nomad | Install CNI" ansible.builtin.include_tasks: "02_network.yml" From fde5100105f293dc8065220575337804b720942c Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 23:34:31 +0200 Subject: [PATCH 17/18] fix(nomad): use ansible interface IP for nomad server address --- ansible/playbooks/paas/roles/nomad/defaults/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ansible/playbooks/paas/roles/nomad/defaults/main.yml b/ansible/playbooks/paas/roles/nomad/defaults/main.yml index edf5496d..b4945a05 100644 --- a/ansible/playbooks/paas/roles/nomad/defaults/main.yml +++ b/ansible/playbooks/paas/roles/nomad/defaults/main.yml @@ -89,7 +89,7 @@ nomad_servers: "\ nomad_servers_advertise_address: "\ {% set nomad_servers_advertise_address = [] %}\ {% if nomad_mode == 'single' %}\ - {% set _ = nomad_servers_advertise_address.append(hostvars[inventory_hostname].nomad_bind_address) %}\ + {% set _ = nomad_servers_advertise_address.append(hostvars[inventory_hostname]['ansible_' + nomad_iface]['ipv4']['address']) %}\ {% else %}\ {% for host in groups[nomad_deploy_cluster_name] %}\ {% if hostvars[host].nomad_node_role in ['server', 'both'] %}\ From d488efcd435a749de5e66df84366716c85ed4968 Mon Sep 17 00:00:00 2001 From: Mathieu Garcia Date: Sun, 5 Oct 2025 23:34:54 +0200 Subject: [PATCH 18/18] feat(nomad): ensure latest nomad binary is installed --- ansible/playbooks/paas/roles/nomad/tasks/05_install.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ansible/playbooks/paas/roles/nomad/tasks/05_install.yml b/ansible/playbooks/paas/roles/nomad/tasks/05_install.yml index dc2cda04..8970d792 100644 --- a/ansible/playbooks/paas/roles/nomad/tasks/05_install.yml +++ b/ansible/playbooks/paas/roles/nomad/tasks/05_install.yml @@ -1,6 +1,9 @@ +--- - name: "Nomad Install | Install binary" ansible.builtin.apt: name: nomad + state: latest + allow_change_held_packages: true update_cache: true when: nomad_version is not defined