diff --git a/.github/workflows/os_builder.yaml b/.github/workflows/os_builder.yaml index 85cb94fd..df040c96 100644 --- a/.github/workflows/os_builder.yaml +++ b/.github/workflows/os_builder.yaml @@ -20,11 +20,11 @@ jobs: sudo apt-get update sudo apt-get install -y ansible cd os_builders - ansible-playbook -i inventory/localhost.yml playbooks/prep_builder.yml + ansible-playbook prep_builder.yml - name: Validate packer files run: | - cd os_builders/packfiles && + cd os_builders && packer init . && packer validate -syntax-only . @@ -45,11 +45,12 @@ jobs: pip3 install --upgrade pip pip3 install ansible python-debian - name: Run pre-prep playbook - # Patch the inventory to run on this machine run: | - . venv/bin/activate - cd os_builders && sed -i 's/all/default/g' inventory/localhost.yml - ansible-playbook -i inventory/localhost.yml playbooks/prepare_user_image.yml --extra-vars provision_this_machine=True + . venv/bin/activate + cd os_builders + ansible-playbook vm_baseline.yml + ansible-playbook image_fixes.yml + ansible-playbook tidy_image.yml test_image_build_rocky: strategy: @@ -62,8 +63,11 @@ jobs: - name: Install Ansible run: | dnf install epel-release -y + # These have to be separate steps as ansible is provided by epel-release dnf install ansible -y - name: Run pre-prep playbook run: | - cd os_builders && sed -i 's/all/default/g' inventory/localhost.yml - ansible-playbook -i inventory/localhost.yml playbooks/prepare_user_image.yml --extra-vars provision_this_machine=True + cd os_builders + ansible-playbook vm_baseline.yml + ansible-playbook image_fixes.yml + ansible-playbook tidy_image.yml diff --git a/os_builders/README.md b/os_builders/README.md index 6d4940cd..af831ee8 100644 --- a/os_builders/README.md +++ b/os_builders/README.md @@ -1,131 +1,157 @@ -Image Building -============== +# Image Building -This directory contains the Ansible playbooks and Packer templates for building the VM images. -It sets a baseline for all VMs to comply with our security policies, in an OS agnostic way. +## Contents: -Pipeline --------- +- [Pipeline](#pipeline) +- [Setting Up the Environment](#setting-up-the-environment) +- [Building Images for Release](#building-images-for-release) +- [Testing Changes to Images (Troubleshoot or Bug Fixing)](#testing-changes-to-images-troubleshoot-or-bug-fixing) +- [Project Layout](#project-layout) -The pipeline consists of the following steps: - -- Packer builds the VM image, with a packer user and password set to `packer` -- The VM is booted and Ansible is run to configure the VM using the `image_prep.yml` playbook -- The image is tidied, the packer user is deleted and the VM shuts down. -- Packer converts this to a qcow2 image ready for upload. - -*Note: This script does not upload or test the image, this is done separately currently* - -Building Locally -================= - -The easiest way is to run the Ansible playbooks which will prepare the current machine, and -also handle the multi-stage builds. - -Preparing a builder -------------------- - -To build locally, you need to have the following installed: -- ansible - -First, install required ansible collections: -``` -ansible-galaxy install -r requirements.yml -``` - -Then run the following command to install Qemu and setup the user's groups. You will need to log out and back in again for the groups to take effect. - - -``` -ansible-playbook -i inventory/localhost playbooks/prep_builder.yml --ask-become-pass -``` - -Running the build --------------------- - -Images can be built with the following command -``` -ansible-playbook -i inventory/localhost playbooks/builder.yml -```` - -This will build all images (it implies the `all` tag). Individual tags can be selected as follows - -``` -ansible-playbook -i inventory/localhost playbooks/builder.yml -t -```` - -The following tags are available: -- `all` - Build all images (default) -- `ubuntu` - Build all Ubuntu variants -- `ubuntu_2204` - Build Ubuntu 22.04 - - -Running builds on a VM ----------------------- - -By default we show a VNC window for the packer build, this is useful for debugging but will not work on a headless VM. - -To run a headless build, you need to set the `headless` variable to true. This can be done by passing the variable on the command line: +## Pipeline -``` -ansible-playbook -i inventory/localhost playbooks/builder.yml --extra-vars packer_headless=true -``` - - -Development and Testing -======================== - -Packer uses a multi stage build. Stage 1 will build the VM image using auto-install. Stage 2 will provision the image with any customisations we want. - -This allows us to rapidly iterate on our customisations without having to wait for the OS install to complete. - -Directory Layout ----------------- - -- `packfiles/` - Packer templates for building the VM images -- `packfiles/ubuntu_sources.pkr.hcl` - Contains image definitions for Ubuntu -- `packfiles/rocky_sources.pkr.hcl` - Contains image definitions for Rocky (TODO) -- `packfiles/build.pkr.hcl` - Contains the build steps for the VM image. This uses a two stage build described below - -CI/CD Files: - -- `packfiles/headless.pkrvars.hcl` - Contains the variables for doing a headless build - - -Testing New OS Variants --------------------------- -It's recommended you run this locally, so that you can see the VNC window and debug any issues: - -- Ensure the builder is configured, as above -- Add your new build to the sources file, you need to add a base step and a provisioning step. See the Ubuntu file for an example. -- Run your new/modified stage 1 build through the auto-install step: `cd packerfiles && packer build --only=stage1**` - -For example: -`cd packerfiles && packer build --only=stage1*ubuntu_2204 .` - -- Test the provisioning step on your new image: -`cd packerfiles && packer build --only=stage2**` - - -Prototyping new Ansible changes on a VM ----------------------------------------- -It's recommended you use an existing VM for this testing, as it will be quicker than running an OS install and uploading : - -- Add your hosts to a testing inventory file, e.g. `cat inventory/testing.yml`: - -``` -all: - hosts: - host-172-16-255-255.nubes.stfc.ac.uk: - ansible_user: ubuntu # or rocky -``` - -**Ensure you are on a VM!** - -The `provision_this_machine` variable acts as a guard from trashing your own machine. +The pipeline consists of the following steps: -- Run the playbook -``` -ansible-playbook -i inventory/testing.yml playbooks/provision_image.yml --extra-vars provision_this_machine=true +- Packer pulls the latest generic image from the OS mirror into OpenStack +- Packer generates a SSH key and uploads it to OpenStack +- The VM is booted with the public key and uses the app creds user +- Ansible is run to configure the VM using the [prep_user_image](prep_user_image.yml) playbook +- Packer snapshots the VM and deletes the VM, SSH key and generic image + +## Setting up the environment + +1. Install pip, Ansible, Packer and OpenStack CLI + ```shell + # Ubuntu + sudo apt install python3-pip python3-venv -y + + # Rocky + sudo dnf install python3-pip python3-venv -y + + python3 -m venv image_builders + source image_builder/bin/activate + pip install -r requirements.txt + + ansible-playbook prep_builder.yml + ``` +2. Create an applications credential (admin is only required to make images public) + ```shell + # Either place app creds in directory + + mkdir -p ~/.config/openstack + mv clouds.yaml ~/.config/openstack/clouds.yaml + + # Or + + # Export credentials + export OS_AUTH_URL=https://:5000/v3 + export OS_APPLICATION_CREDENTIAL_ID= + export OS_APPLICATION_CREDENTIAL_SECRET= + ``` +3. Git clone repository + ```shell + git clone https://github.com/stfc/cloud-image-builders.git + cd cloud-image-builders/os_builders + ``` + +## Building Images for Release + +1. Activate virtual environment if not already + ```shell + source image_builder/bin/activate # As made in the set up steps + ``` + +2. Edit OpenStack External Network ID + ```shell + # Contents of: build.pkr.hcl + 18 networks = [""] # OpenStack External Network ID + ``` +3. Run Packer + ```shell + packer build . + # Or to build only certain images + packer build -only openstack.,openstack. . + ``` +4. Rename the current images to warehoused and new images to current name + ```shell + # REQUIRES ADMIN + # For each image you are releasing + current_image_name="" + new_image_id="" + timestamp=$(date +%F) + openstack image set --deactivate --name "warehoused-${current_image_name}-${timestamp}" $current_image_name + openstack image set --public --name "${current_image_name}" $new_image_id + + # For example, this may look like the below + current_image_name="ubuntu-noble-24.04-nogui" + new_image_id="ubuntu-noble-24.04-nogui-2025-11-20-abcde" + timestamp=$(date +%F) + openstack image set --deactivate --name "warehoused-${current_image_name}-${timestamp}" $current_image_name + openstack image set --public --name "${current_image_name}" $new_image_id + + # ubuntu-noble-24.04-nogui is: + # - deactivated + # - renamed to warehoused-ubuntu-noble-24.04-nogui-2025-22-20 + # ubuntu-noble-24.04-nogui-2025-11-20-abcde is: + # - set to public + # - renamed to ubuntu-noble-24.04-nogui + ``` + +## Testing Changes to Images (Troubleshoot or Bug Fixing) + +1. Activate virtual environment if not already + ```shell + source image_builder/bin/activate # As made in the set up steps + ``` +2. Generate a temporary SSH key to use on the VMs and add to OpenStack + ```shell + passphrase=$(pwgen 10 1) + fed_id= + ssh-keygen -t rsa -f image_testing_key -N $passphrase + openstack keypair create --public-key=./image_testing_key.pub "image-testing-key-${fed_id}" + ``` +3. Create a VM using the current latest image for the OS you are fixing + ```shell + openstack server create --image --key-name "image-testing-key-${fed_id}" \ + --flavor l3.nano --network Internal --wait + ``` +4. Edit `inventory.yml` and add your hosts IP + ```shell + # Get IP of VM + openstack server show -f json | jq .addresses.Internal | jq first + + # Contents of: inventory.yml + 5 ansible_host: "172.16.255.255" # Your hosts IP + ``` +5. Run the baseline against the VM + ```shell + ansible-playbook -i inventory vm_baseline.yml + ``` +6. Run any other custom playbooks against the VM which you want to test + ```shell + ansible-playbook -i inventory image_fixes.yml + ansible-playbook -i inventory tidy_image.yml + ansible-playbook -i inventory + ``` + +7. Repeat step 5/6 making changes to the playbooks and commit and PR any changes that are working. + +## Project Layout +```shell +os_builders +├── README.md +├── build.pkr.hcl # Packer build file +├── galaxy.yml # Ansible Galaxy collection metadata +├── prep_builder.yml # Playbook to install Packer +├── vm_baseline.yml # Playbook to configure the images +├── image_fixes.yml # Playbook to apply fixes to the images +├── quattor.yml # Playbook to install quattor onto the image +├── tidy_image.yml # Playbook to tidy the image before snapshotting +├── requirements.txt # Specifies Ansible version +└── roles # Roles to configure the image + ├── container_registry/ + ├── nubes_bootcontext/ + ├── prep_builder/ + ├── tidy_image/ + └── vm_baseline/ ``` - diff --git a/os_builders/ansible.cfg b/os_builders/ansible.cfg deleted file mode 100644 index 07e16a59..00000000 --- a/os_builders/ansible.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[defaults] -roles_path = roles \ No newline at end of file diff --git a/os_builders/build.pkr.hcl b/os_builders/build.pkr.hcl new file mode 100644 index 00000000..bd43902e --- /dev/null +++ b/os_builders/build.pkr.hcl @@ -0,0 +1,132 @@ +packer { + required_plugins { + ansible = { + version = " >= 1.0.4" + source = "github.com/hashicorp/ansible" + } + openstack = { + version = " >= 1.1.2" + source = "github.com/hashicorp/openstack" + } + } +} + +locals { + date_suffix = "${formatdate("YYYY-MM-DD", timestamp())}" + metadata = { + "hw_machine_type" : "q35", + "hw_disk_bus" : "scsi", + "hw_firmware_type" : "uefi", + "hw_qemu_guest_agent" : "yes", + "hw_scsi_model" : "virtio-scsi", + "hw_vif_multiqueue_enabled" : "true", + "os_require_quiesce" : "yes" + } + aq_metadata = { + "AQ_ARCHETYPE": "cloud", + "AQ_DOMAIN": "prod_cloud", + "aq_managed": "true", + "AQ_OS": "rocky", + "AQ_OSNAME": "rocky", + "AQ_PERSONALITY": "nubesvms", + } +} + +source "openstack" "builder" { + domain_name = "Default" + flavor = "l3.nano" + security_groups = ["default"] + networks = [""] # OpenStack External Network ID + image_visibility = "private" + ssh_timeout = "20m" +} + +build { + source "openstack.builder" { + name = "ubuntu-jammy" + image_name = "ubuntu-jammy-22.04-nogui-${ local.date_suffix }" + ssh_username = "ubuntu" + external_source_image_url = "https://cloud-images.ubuntu.com/jammy/current/jammy-server-cloudimg-amd64.img" + metadata = local.metadata + } + source "openstack.builder" { + name = "ubuntu-noble" + image_name = "ubuntu-noble-24.04-nogui-${ local.date_suffix }" + ssh_username = "ubuntu" + external_source_image_url = "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img" + metadata = local.metadata + } + source "openstack.builder" { + name = "ubuntu-azimuth" + external_source_image_url = "https://object.arcus.openstack.hpc.cam.ac.uk/swift/v1/AUTH_f0dc9cb312144d0aa44037c9149d2513/azimuth-images/ubuntu-jammy-desktop-250701-1116.qcow2" + image_name = "test-azimuth" + ssh_username = "ubuntu" + metadata = local.metadata + } + source "openstack.builder" { + name = "rocky-8" + image_name = "rocky-8-nogui-${ local.date_suffix }" + ssh_username = "rocky" + external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/8/images/x86_64/Rocky-8-GenericCloud-Base.latest.x86_64.qcow2" + metadata = local.metadata + } + source "openstack.builder" { + name = "rocky-9" + image_name = "rocky-9-nogui-${ local.date_suffix }" + ssh_username = "rocky" + external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/9/images/x86_64/Rocky-9-GenericCloud-Base.latest.x86_64.qcow2" + metadata = local.metadata + } + source "openstack.builder" { + name = "rocky-8-aq" + image_name = "rocky-8-aq-${ local.date_suffix }" + ssh_username = "rocky" + external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/8/images/x86_64/Rocky-8-GenericCloud-Base.latest.x86_64.qcow2" + metadata = merge(local.metadata, local.aq_metadata, {"AQ_OSVERSION": "8x-x86_64"}) + } + source "openstack.builder" { + name = "rocky-9-aq" + image_name = "rocky-9-aq-${ local.date_suffix }" + ssh_username = "rocky" + external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/9/images/x86_64/Rocky-9-GenericCloud-Base.latest.x86_64.qcow2" + metadata = merge(local.metadata, local.aq_metadata, {"AQ_OSVERSION": "9x-x86_64"}) + } + + sources = ["openstack.builder"] + + provisioner "ansible" { + user = "${build.User}" + playbook_file = "vm_baseline.yml" + extra_arguments = [ + # Workaround https://github.com/hashicorp/packer/issues/12416 + "--scp-extra-args", "'-O'", + ] + } + + provisioner "ansible" { + user = "${build.User}" + playbook_file = "image_fixes.yml" + extra_arguments = [ + # Workaround https://github.com/hashicorp/packer/issues/12416 + "--scp-extra-args", "'-O'", + ] + } + + provisioner "ansible" { + user = "${build.User}" + playbook_file = "tidy_image.yml" + extra_arguments = [ + # Workaround https://github.com/hashicorp/packer/issues/12416 + "--scp-extra-args", "'-O'", + ] + } + provisioner "ansible" { + only = ["openstack.rocky-8-aq", "openstack.rocky-9-aq"] + user = "${build.User}" + playbook_file = "quattor.yml" + extra_arguments = [ + # Workaround https://github.com/hashicorp/packer/issues/12416 + "--scp-extra-args", "'-O'", + ] + } +} diff --git a/os_builders/image_fixes.yml b/os_builders/image_fixes.yml new file mode 100644 index 00000000..03bb500d --- /dev/null +++ b/os_builders/image_fixes.yml @@ -0,0 +1,8 @@ +- name: Apply fixes to STFC Cloud user image + hosts: all + pre_tasks: + - name: User warning + ansible.builtin.debug: + msg: "[Warning] Do not run on non-cloud machine" + roles: + - role: image_fixes \ No newline at end of file diff --git a/os_builders/inventory.yml b/os_builders/inventory.yml new file mode 100644 index 00000000..224e5cab --- /dev/null +++ b/os_builders/inventory.yml @@ -0,0 +1,6 @@ +--- +all: + hosts: + test-vm: + ansible_host: "172.16.255.255" + ansible_user: "ubuntu" # or rocky diff --git a/os_builders/inventory/localhost.yml b/os_builders/inventory/localhost.yml deleted file mode 100644 index fa71c5ce..00000000 --- a/os_builders/inventory/localhost.yml +++ /dev/null @@ -1,8 +0,0 @@ -all: - hosts: - localhost: - ansible_connection: local -default: - hosts: - localhost: - ansible_connection: local diff --git a/os_builders/packfiles/build.pkr.hcl b/os_builders/packfiles/build.pkr.hcl deleted file mode 100644 index 819d2529..00000000 --- a/os_builders/packfiles/build.pkr.hcl +++ /dev/null @@ -1,78 +0,0 @@ -packer { - required_plugins { - ansible = { - version = " >= 1.0.4" - source = "github.com/hashicorp/ansible" - } - openstack = { - version = " >= 1.1.2" - source = "github.com/hashicorp/openstack" - } - } -} - -source "openstack" "builder" { - domain_name = "Default" - flavor = "l3.nano" - security_groups = ["default"] - networks = ["5be315b7-7ebd-4254-97fe-18c1df501538"] - image_visibility = "private" - ssh_timeout = "20m" - metadata = { - "hw_machine_type" : "q35", - "hw_disk_bus" : "scsi", - "hw_firmware_type" : "uefi", - "hw_qemu_guest_agent" : "yes", - "hw_scsi_model" : "virtio-scsi", - "hw_vif_multiqueue_enabled" : "true", - "os_require_quiesce" : "yes" - } -} - -build { - source "openstack.builder" { - name = "ubuntu-jammy" - image_name = "ubuntu-jammy-22.04-nogui-baseline" - ssh_username = "ubuntu" - external_source_image_url = "https://cloud-images.ubuntu.com/jammy/current/jammy-server-cloudimg-amd64.img" - } - source "openstack.builder" { - name = "ubuntu-noble" - image_name = "ubuntu-noble-24.04-nogui-baseline" - ssh_username = "ubuntu" - external_source_image_url = "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img" - } - source "openstack.builder" { - name = "ubuntu-azimuth" - external_source_image_url = "https://object.arcus.openstack.hpc.cam.ac.uk/swift/v1/AUTH_f0dc9cb312144d0aa44037c9149d2513/azimuth-images/ubuntu-jammy-desktop-250701-1116.qcow2" - image_name = "test-azimuth" - ssh_username = "ubuntu" - } - source "openstack.builder" { - name = "rocky-8" - image_name = "rocky-8-nogui-baseline" - ssh_username = "rocky" - external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/8/images/x86_64/Rocky-8-GenericCloud-Base.latest.x86_64.qcow2" - } - source "openstack.builder" { - name = "rocky-9" - image_name = "rocky-9-nogui-baseline" - ssh_username = "rocky" - external_source_image_url = "https://www.mirrorservice.org/sites/download.rockylinux.org/pub/rocky/9/images/x86_64/Rocky-9-GenericCloud-Base.latest.x86_64.qcow2" - } - - sources = ["openstack.builder"] - - provisioner "ansible" { - user = "${build.User}" - playbook_file = "${path.root}/../playbooks/prepare_user_image.yml" - extra_arguments = [ - # Include safety checks - "--extra-vars", "provision_this_machine=true, tidy_image=True", - # Workaround https://github.com/hashicorp/packer/issues/12416 - "--scp-extra-args", "'-O'", - #"--ssh-extra-args", "-o IdentitiesOnly=yes -o HostKeyAlgorithms=+ssh-rsa -o PubkeyAcceptedAlgorithms=+ssh-rsa" - ] - } -} - diff --git a/os_builders/packfiles/ubuntu/http-server/meta-data b/os_builders/packfiles/ubuntu/http-server/meta-data deleted file mode 100644 index e69de29b..00000000 diff --git a/os_builders/packfiles/ubuntu/http-server/user-data b/os_builders/packfiles/ubuntu/http-server/user-data deleted file mode 100644 index eabe3c61..00000000 --- a/os_builders/packfiles/ubuntu/http-server/user-data +++ /dev/null @@ -1,26 +0,0 @@ -#cloud-config -autoinstall: - version: 1 - early-commands: - # workaround to stop ssh for packer as it thinks it timed out - - sudo systemctl stop ssh - locale: en_GB.UTF-8 - keyboard: - layout: gb - storage: - layout: - name: direct - ssh: - install-server: yes - allow-pw: yes - # Defer updates until ansible runs - updates: security - user-data: - users: - - name: packer - # openssl passwd -6 -stdin <<< packer - passwd: "$6$M.lbMd/giDrHLKPD$G9p0JcIWtYDFcXG497OQxvc5aWBaN5o0KdLhlCqrsDyXUhPhcDNsLETFMpeS8JpjyEZOW1qaq6sJq4mS4iO4z." - groups: [adm, cdrom, dip, plugdev, lxd, sudo] - lock-passwd: false - sudo: ALL=(ALL) NOPASSWD:ALL - shell: /bin/bash diff --git a/os_builders/packfiles/ubuntu/http-server/vendor-data b/os_builders/packfiles/ubuntu/http-server/vendor-data deleted file mode 100644 index e69de29b..00000000 diff --git a/os_builders/playbooks/builder.yml b/os_builders/playbooks/builder.yml deleted file mode 100644 index e020853d..00000000 --- a/os_builders/playbooks/builder.yml +++ /dev/null @@ -1,11 +0,0 @@ -- name: Build images - hosts: localhost - gather_facts: false - - roles: - - role: ../roles/run_packer - build_variant: "ubuntu_2204" - tags: - - all - - ubuntu - - ubuntu_2204 diff --git a/os_builders/playbooks/prep_builder.yml b/os_builders/playbooks/prep_builder.yml deleted file mode 100644 index 6f1dfdc2..00000000 --- a/os_builders/playbooks/prep_builder.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: Prep builder - hosts: localhost - gather_facts: true # Required to add user to libvirt group - - roles: - - role: ../roles/prep_builder - become: true - - post_tasks: - - name: Run Packer init - command: - cmd: packer init . - chdir: "{{ playbook_dir }}/../packfiles" - tags: packer diff --git a/os_builders/playbooks/prepare_user_image.yml b/os_builders/playbooks/prepare_user_image.yml deleted file mode 100644 index 88473761..00000000 --- a/os_builders/playbooks/prepare_user_image.yml +++ /dev/null @@ -1,15 +0,0 @@ -- name: Prep STFC Cloud User Image - hosts: all - become: true - - pre_tasks: - - name: User warning - ansible.builtin.debug: - msg: "[Warning] Do not run on non-cloud machine" - - roles: - - role: ../roles/vm_baseline - - role: ../roles/container_registry - - role: ../roles/nubes_bootcontext - - role: ../roles/tidy_image - when: "{{ tidy_image|default(false)|bool == True }}" diff --git a/os_builders/prep_builder.yml b/os_builders/prep_builder.yml new file mode 100644 index 00000000..229dbb53 --- /dev/null +++ b/os_builders/prep_builder.yml @@ -0,0 +1,10 @@ +--- +- name: Prep builder + hosts: localhost + roles: + - role: prep_builder + + post_tasks: + - name: Initialise Packer + command: + cmd: packer init . diff --git a/os_builders/quattor.yml b/os_builders/quattor.yml new file mode 100644 index 00000000..72866cae --- /dev/null +++ b/os_builders/quattor.yml @@ -0,0 +1,4 @@ +- name: Install Quattor Onto STFC Cloud User Image + hosts: all + roles: + - role: quattor diff --git a/os_builders/requirements.txt b/os_builders/requirements.txt new file mode 100644 index 00000000..75ed9c84 --- /dev/null +++ b/os_builders/requirements.txt @@ -0,0 +1,5 @@ +# Newer Ansible versions do not support the Python 3.6 interpreter used by Rocky 8 +ansible==9.13.0 +ansible-core==2.16.14 +# The most compatible OpenStack CLI version with OpenStack Yoga +python-openstackclient==5.8.0 diff --git a/os_builders/requirements.yml b/os_builders/requirements.yml deleted file mode 100644 index f924ac50..00000000 --- a/os_builders/requirements.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -collections: - - name: community.general \ No newline at end of file diff --git a/os_builders/roles/vm_baseline/files/authorized_keys b/os_builders/roles/common/admin_keys/files/authorized_keys similarity index 100% rename from os_builders/roles/vm_baseline/files/authorized_keys rename to os_builders/roles/common/admin_keys/files/authorized_keys diff --git a/os_builders/roles/vm_baseline/files/update_keys.sh b/os_builders/roles/common/admin_keys/files/update_keys.sh similarity index 70% rename from os_builders/roles/vm_baseline/files/update_keys.sh rename to os_builders/roles/common/admin_keys/files/update_keys.sh index b38f7b37..0f6a88d1 100644 --- a/os_builders/roles/vm_baseline/files/update_keys.sh +++ b/os_builders/roles/common/admin_keys/files/update_keys.sh @@ -9,7 +9,7 @@ fi if lsattr $KEYSPATH/.ssh/authorized_keys | grep "\-i\-" then - echo "file $KEYSPATH/.ssh/authorized_keys is immutable, cannot write admin key list to it. Generated from \`update_keys.sh\`." #| mail -s "Error during VM authorized_keys update" cloud-support@stfc.ac.uk + echo "file $KEYSPATH/.ssh/authorized_keys is immutable, cannot write admin key list to it. Generated from \`update_keys.sh\`." else wget http://openstack.nubes.rl.ac.uk:9999/admin_key_list if [ -f admin_key_list ] @@ -17,11 +17,11 @@ else mv admin_key_list $KEYSPATH/.ssh/authorized_keys if [ ! -s $KEYSPATH/.ssh/authorized_keys ] then - echo "file $KEYSPATH/.ssh/admin_key_list is empty. The download from service node failed. Generated from \`update_keys.sh\`." #| mail -s "Error during VM authorized_keys update" cloud-support@stfc.ac.uk + echo "file $KEYSPATH/.ssh/admin_key_list is empty. The download from service node failed. Generated from \`update_keys.sh\`." else - grep "Alexander Dibbo" $KEYSPATH/.ssh/authorized_keys || { echo "file $KEYSPATH/.ssh/admin_key_list does not contain the correct keys. Generated from \`update_keys.sh\`."; } #| mail -s "Error during VM authorized_keys update" cloud-support@stfc.ac.uk ; } + grep "Alexander Dibbo" $KEYSPATH/.ssh/authorized_keys || { echo "file $KEYSPATH/.ssh/admin_key_list does not contain the correct keys. Generated from \`update_keys.sh\`."; } fi else - echo "file $KEYSPATH/.ssh/admin_key_list does not exist. The download from service node failed. Generated from \`update_keys.sh\`." #| mail -s "Error during VM authorized_keys update" cloud-support@stfc.ac.uk + echo "file $KEYSPATH/.ssh/admin_key_list does not exist. The download from service node failed. Generated from \`update_keys.sh\`." fi fi diff --git a/os_builders/roles/vm_baseline/tasks/ssh.yml b/os_builders/roles/common/admin_keys/tasks/main.yml similarity index 83% rename from os_builders/roles/vm_baseline/tasks/ssh.yml rename to os_builders/roles/common/admin_keys/tasks/main.yml index 25270900..7bbf0432 100644 --- a/os_builders/roles/vm_baseline/tasks/ssh.yml +++ b/os_builders/roles/common/admin_keys/tasks/main.yml @@ -1,7 +1,8 @@ - name: Install openssh-server - package: + ansible.builtin.package: name: openssh-server state: present + become: true - name: Enable root login for authorized admins block: @@ -10,55 +11,58 @@ when: not lookup('env', 'ROOT_PASSWORD') - name: Enable root account - user: + ansible.builtin.user: name: cloud state: present shell: /bin/bash # If a password is not set in the environment, disable password based login password: "{{ lookup('env', 'ROOT_PASSWORD', default='!') }}" + become: true - name: Ensure cloud sudoers.d directory exists - file: + ansible.builtin.file: path: "/etc/sudoers.d" state: directory owner: root group: root mode: 0440 + become: true - name: Add cloud user to sudoers - copy: + ansible.builtin.copy: dest: /etc/sudoers.d/cloud-team-user content: "cloud ALL=(ALL) NOPASSWD:ALL" mode: 0440 + become: true - name: Ensure cloud home directory exists - file: + ansible.builtin.file: path: "/home/cloud/.ssh" state: directory owner: cloud group: cloud mode: 0700 + become: true - name: Copy authorized cloud admin keys into cloud's authorized_keys - copy: + ansible.builtin.copy: src: "authorized_keys" dest: "/home/cloud/.ssh/authorized_keys" owner: cloud group: cloud mode: 0600 + become: true - name: Copy authorized key update script in - copy: + ansible.builtin.copy: src: "update_keys.sh" dest: "/usr/local/sbin/update_keys.sh" owner: root group: root mode: 0700 - -# ensure that cron is installed -- include_tasks: cron.yml + become: true - name: Update admin keys periodically ansible.builtin.cron: @@ -66,3 +70,4 @@ minute: 0 hour: 7 job: "/usr/local/sbin/update_keys.sh" + become: true diff --git a/os_builders/roles/container_registry/files/docker.json b/os_builders/roles/common/container_registry/files/docker.json similarity index 100% rename from os_builders/roles/container_registry/files/docker.json rename to os_builders/roles/common/container_registry/files/docker.json diff --git a/os_builders/roles/container_registry/tasks/main.yml b/os_builders/roles/common/container_registry/tasks/main.yml similarity index 100% rename from os_builders/roles/container_registry/tasks/main.yml rename to os_builders/roles/common/container_registry/tasks/main.yml diff --git a/os_builders/roles/vm_baseline/tasks/grub-cmdline.yml b/os_builders/roles/common/grub/tasks/main.yml similarity index 93% rename from os_builders/roles/vm_baseline/tasks/grub-cmdline.yml rename to os_builders/roles/common/grub/tasks/main.yml index 167ca70a..8bd5e833 100644 --- a/os_builders/roles/vm_baseline/tasks/grub-cmdline.yml +++ b/os_builders/roles/common/grub/tasks/main.yml @@ -31,12 +31,12 @@ become: true shell: "grub2-mkconfig -o /etc/grub2.cfg" when: - - ansible_distribution == "Rocky" + - ansible_os_family == "RedHat" - in_container.stdout != "0::/" - name: run updated grub config for ubuntu become: true shell: "update-grub" when: - - ansible_distribution == "Ubuntu" + - ansible_os_family == "Debian" - in_container.stdout != "0::/" diff --git a/os_builders/roles/nubes_bootcontext/files/motd b/os_builders/roles/common/nubes_bootcontext/files/motd similarity index 100% rename from os_builders/roles/nubes_bootcontext/files/motd rename to os_builders/roles/common/nubes_bootcontext/files/motd diff --git a/os_builders/roles/nubes_bootcontext/files/nubes-boot.service b/os_builders/roles/common/nubes_bootcontext/files/nubes-boot.service similarity index 100% rename from os_builders/roles/nubes_bootcontext/files/nubes-boot.service rename to os_builders/roles/common/nubes_bootcontext/files/nubes-boot.service diff --git a/os_builders/roles/nubes_bootcontext/files/nubes-bootcontext.sh b/os_builders/roles/common/nubes_bootcontext/files/nubes-bootcontext.sh similarity index 100% rename from os_builders/roles/nubes_bootcontext/files/nubes-bootcontext.sh rename to os_builders/roles/common/nubes_bootcontext/files/nubes-bootcontext.sh diff --git a/os_builders/roles/nubes_bootcontext/files/update_cloud_users.sh b/os_builders/roles/common/nubes_bootcontext/files/update_cloud_users.sh similarity index 100% rename from os_builders/roles/nubes_bootcontext/files/update_cloud_users.sh rename to os_builders/roles/common/nubes_bootcontext/files/update_cloud_users.sh diff --git a/os_builders/roles/nubes_bootcontext/tasks/main.yml b/os_builders/roles/common/nubes_bootcontext/tasks/main.yml similarity index 95% rename from os_builders/roles/nubes_bootcontext/tasks/main.yml rename to os_builders/roles/common/nubes_bootcontext/tasks/main.yml index a1a92920..dcda9e4c 100644 --- a/os_builders/roles/nubes_bootcontext/tasks/main.yml +++ b/os_builders/roles/common/nubes_bootcontext/tasks/main.yml @@ -7,7 +7,7 @@ name: dnsutils state: present update_cache: true - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: Copy in nubes-bootcontext script copy: diff --git a/os_builders/roles/nubes_bootcontext/tasks/update_cloud_users.yml b/os_builders/roles/common/nubes_bootcontext/tasks/update_cloud_users.yml similarity index 100% rename from os_builders/roles/nubes_bootcontext/tasks/update_cloud_users.yml rename to os_builders/roles/common/nubes_bootcontext/tasks/update_cloud_users.yml diff --git a/os_builders/roles/vm_baseline/tasks/cron.yml b/os_builders/roles/common/packages/tasks/cron.yml similarity index 50% rename from os_builders/roles/vm_baseline/tasks/cron.yml rename to os_builders/roles/common/packages/tasks/cron.yml index 36adc9dd..b8e269d6 100644 --- a/os_builders/roles/vm_baseline/tasks/cron.yml +++ b/os_builders/roles/common/packages/tasks/cron.yml @@ -1,13 +1,15 @@ - name: Install cron on Ubuntu - apt: + ansible.builtin.apt: name: "cron" state: present update_cache: yes - when: ansible_distribution == "Ubuntu" + become: true + when: ansible_os_family == "Debian" - name: Install cron on RL - yum: + ansible.builtin.yum: name: "cronie" state: present - when: ansible_distribution == "Rocky" + become: true + when: ansible_os_family == "RedHat" diff --git a/os_builders/roles/common/packages/tasks/main.yml b/os_builders/roles/common/packages/tasks/main.yml new file mode 100644 index 00000000..e9eff4f1 --- /dev/null +++ b/os_builders/roles/common/packages/tasks/main.yml @@ -0,0 +1,6 @@ +--- +# Include packages that must be installed for security compliance +- include_tasks: system_updates.yml +- include_tasks: cron.yml +- include_tasks: qemu-guest-agent.yml +- include_tasks: ukescienceca.yml diff --git a/os_builders/roles/vm_baseline/tasks/qemu-guest-agent.yml b/os_builders/roles/common/packages/tasks/qemu-guest-agent.yml similarity index 65% rename from os_builders/roles/vm_baseline/tasks/qemu-guest-agent.yml rename to os_builders/roles/common/packages/tasks/qemu-guest-agent.yml index 194b329c..cc1463c2 100644 --- a/os_builders/roles/vm_baseline/tasks/qemu-guest-agent.yml +++ b/os_builders/roles/common/packages/tasks/qemu-guest-agent.yml @@ -1,17 +1,20 @@ - name: Install Qemu Guest Agent on Ubuntu - apt: + ansible.builtin.apt: name: qemu-guest-agent state: present update_cache: yes - when: ansible_distribution == "Ubuntu" + become: true + when: ansible_os_family == "Debian" - name: Install Qemu Guest Agent on RL - yum: + ansible.builtin.yum: name: "qemu-guest-agent" state: present - when: ansible_distribution == "Rocky" + become: true + when: ansible_os_family == "RedHat" - name: Enable Qemu Guest Agent ansible.builtin.systemd_service: name: qemu-guest-agent.service enabled: true + become: true diff --git a/os_builders/roles/vm_baseline/tasks/update.yml b/os_builders/roles/common/packages/tasks/system_updates.yml similarity index 79% rename from os_builders/roles/vm_baseline/tasks/update.yml rename to os_builders/roles/common/packages/tasks/system_updates.yml index d297c49c..9cd07510 100644 --- a/os_builders/roles/vm_baseline/tasks/update.yml +++ b/os_builders/roles/common/packages/tasks/system_updates.yml @@ -4,11 +4,12 @@ when: ansible_distribution != "Rocky" and ansible_distribution != "Ubuntu" - name: Install system updates for RL systems - yum: + ansible.builtin.yum: name: "*" state: latest update_cache: yes - when: ansible_distribution == "Rocky" + become: true + when: ansible_os_family == "RedHat" register: result until: result is not failed retries: 5 @@ -26,11 +27,3 @@ until: result is not failed retries: 5 delay: 30 - -- name: get IP - shell: hostname -I - register: my_hostname - -- name: print IP - debug: - msg: "{{ my_hostname }}" \ No newline at end of file diff --git a/os_builders/roles/common/packages/tasks/ukescienceca.yml b/os_builders/roles/common/packages/tasks/ukescienceca.yml new file mode 100644 index 00000000..06649b9c --- /dev/null +++ b/os_builders/roles/common/packages/tasks/ukescienceca.yml @@ -0,0 +1,55 @@ +- name: Install UK eScience CA on Ubuntu + block: + - name: Install required package + ansible.builtin.apt: + name: python3-debian + update_cache: true + state: present + become: true + + - name: Add EU Grid PMA repo + ansible.builtin.deb822_repository: + name: eu-grid-pma + uris: http://repository.egi.eu/sw/production/cas/1/current + signed_by: "https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-4" + suites: egi-igtf + components: core + state: present + become: true + + - name: Install UK eScience Root + ansible.builtin.apt: + name: ca-ukescienceroot-2007 + state: present + update_cache: yes + become: true + + - name: Install UK eScience CA + ansible.builtin.apt: + name: ca-ukescienceca-2b + state: present + update_cache: yes + become: true + when: ansible_os_family == "Debian" + +- name: Install UK eScience Root CA on RL + block: + - name: add CA repo + ansible.builtin.yum_repository: + name: eScienceCAs + file: eScienceCAs + description: "Repository to install the UK eScience (and other IGTF) CA certs from" + baseurl: https://repository.egi.eu/sw/production/cas/1/current + enabled: true + gpgcheck: true + gpgkey: "https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-4" + become: true + + - name: Install UK eScience Packages + ansible.builtin.yum: + name: + - ca_UKeScienceRoot-2007 + - ca_UKeScienceCA-2B + state: present + become: true + when: ansible_os_family == "RedHat" diff --git a/os_builders/roles/vm_baseline/files/etc/pakiti2/pakiti2-client.conf b/os_builders/roles/common/pakiti/files/etc/pakiti2/pakiti2-client.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/etc/pakiti2/pakiti2-client.conf rename to os_builders/roles/common/pakiti/files/etc/pakiti2/pakiti2-client.conf diff --git a/os_builders/roles/vm_baseline/files/etc/rsyslog.conf b/os_builders/roles/common/pakiti/files/etc/rsyslog.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/etc/rsyslog.conf rename to os_builders/roles/common/pakiti/files/etc/rsyslog.conf diff --git a/os_builders/roles/vm_baseline/files/pakiti-client-2.1.4-3.RAL.noarch.rpm b/os_builders/roles/common/pakiti/files/pakiti-client-2.1.4-3.RAL.noarch.rpm similarity index 100% rename from os_builders/roles/vm_baseline/files/pakiti-client-2.1.4-3.RAL.noarch.rpm rename to os_builders/roles/common/pakiti/files/pakiti-client-2.1.4-3.RAL.noarch.rpm diff --git a/os_builders/roles/vm_baseline/tasks/pakiti.yml b/os_builders/roles/common/pakiti/tasks/main.yml similarity index 91% rename from os_builders/roles/vm_baseline/tasks/pakiti.yml rename to os_builders/roles/common/pakiti/tasks/main.yml index 05538b8f..cb530016 100644 --- a/os_builders/roles/vm_baseline/tasks/pakiti.yml +++ b/os_builders/roles/common/pakiti/tasks/main.yml @@ -35,14 +35,14 @@ - name: Update apt-cache apt: update_cache: yes - when: ansible_distribution == "Ubuntu" and 'pakiti-client' not in ansible_facts.packages + when: ansible_os_family == "Debian" and 'pakiti-client' not in ansible_facts.packages - name: Install Pakiti on RL yum: name: "/tmp/pakiti-client-2.1.4-3.RAL.noarch.rpm" state: present disable_gpg_check: true - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: Ensure pakiti config directory exists file: diff --git a/os_builders/roles/vm_baseline/files/rsyslog.d/20-ufw.conf b/os_builders/roles/common/rsyslog/files/rsyslog.d/20-ufw.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/rsyslog.d/20-ufw.conf rename to os_builders/roles/common/rsyslog/files/rsyslog.d/20-ufw.conf diff --git a/os_builders/roles/vm_baseline/files/rsyslog.d/21-cloudinit.conf b/os_builders/roles/common/rsyslog/files/rsyslog.d/21-cloudinit.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/rsyslog.d/21-cloudinit.conf rename to os_builders/roles/common/rsyslog/files/rsyslog.d/21-cloudinit.conf diff --git a/os_builders/roles/vm_baseline/tasks/rsyslog.yml b/os_builders/roles/common/rsyslog/tasks/main.yml similarity index 72% rename from os_builders/roles/vm_baseline/tasks/rsyslog.yml rename to os_builders/roles/common/rsyslog/tasks/main.yml index 383845c9..1c2e5265 100644 --- a/os_builders/roles/vm_baseline/tasks/rsyslog.yml +++ b/os_builders/roles/common/rsyslog/tasks/main.yml @@ -1,20 +1,23 @@ - name: Ensure rsyslog is installed - package: + ansible.builtin.package: name: rsyslog state: present + become: true - name: Copy rsyslog.conf - copy: + ansible.builtin.copy: src: "etc/rsyslog.conf" dest: "/etc/rsyslog.conf" owner: root group: root mode: 0644 + become: true - name: Copy rsyslog.d - copy: + ansible.builtin.copy: src: "rsyslog.d" dest: "/etc/rsyslog.d" owner: root group: root mode: 0755 + become: true diff --git a/os_builders/roles/vm_baseline/files/wazuh/enrich-wazuh-agent.py b/os_builders/roles/common/wazuh/files/enrich-wazuh-agent.py similarity index 100% rename from os_builders/roles/vm_baseline/files/wazuh/enrich-wazuh-agent.py rename to os_builders/roles/common/wazuh/files/enrich-wazuh-agent.py diff --git a/os_builders/roles/vm_baseline/files/wazuh/local_internal_options.conf b/os_builders/roles/common/wazuh/files/local_internal_options.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/wazuh/local_internal_options.conf rename to os_builders/roles/common/wazuh/files/local_internal_options.conf diff --git a/os_builders/roles/vm_baseline/files/wazuh/ossec.template b/os_builders/roles/common/wazuh/files/ossec.template similarity index 100% rename from os_builders/roles/vm_baseline/files/wazuh/ossec.template rename to os_builders/roles/common/wazuh/files/ossec.template diff --git a/os_builders/roles/vm_baseline/files/wazuh/preexec.conf b/os_builders/roles/common/wazuh/files/preexec.conf similarity index 100% rename from os_builders/roles/vm_baseline/files/wazuh/preexec.conf rename to os_builders/roles/common/wazuh/files/preexec.conf diff --git a/os_builders/roles/vm_baseline/tasks/wazuh/install_wazuh_rocky.yml b/os_builders/roles/common/wazuh/tasks/install_wazuh_rocky.yml similarity index 100% rename from os_builders/roles/vm_baseline/tasks/wazuh/install_wazuh_rocky.yml rename to os_builders/roles/common/wazuh/tasks/install_wazuh_rocky.yml diff --git a/os_builders/roles/vm_baseline/tasks/wazuh/install_wazuh_ubuntu.yml b/os_builders/roles/common/wazuh/tasks/install_wazuh_ubuntu.yml similarity index 100% rename from os_builders/roles/vm_baseline/tasks/wazuh/install_wazuh_ubuntu.yml rename to os_builders/roles/common/wazuh/tasks/install_wazuh_ubuntu.yml diff --git a/os_builders/roles/vm_baseline/tasks/wazuh.yml b/os_builders/roles/common/wazuh/tasks/main.yml similarity index 93% rename from os_builders/roles/vm_baseline/tasks/wazuh.yml rename to os_builders/roles/common/wazuh/tasks/main.yml index 1817d3e5..40960256 100644 --- a/os_builders/roles/vm_baseline/tasks/wazuh.yml +++ b/os_builders/roles/common/wazuh/tasks/main.yml @@ -2,22 +2,22 @@ ansible.builtin.apt: name: python3-pip state: present - when: ansible_distribution == "Ubuntu" + when: ansible_os_family == "Debian" - name: Install wazuh on Ubuntu ansible.builtin.include_tasks: wazuh/install_wazuh_ubuntu.yml - when: ansible_distribution == "Ubuntu" + when: ansible_os_family == "Debian" - name: Install python3 on Rocky8 for Wazuh ansible.builtin.package: name: python3 state: present update_cache: true - when: ansible_distribution == "Rocky" and ansible_distribution_major_version == "8" + when: ansible_os_family == "RedHat" and ansible_distribution_major_version == "8" - name: Install wazuh on Rocky ansible.builtin.include_tasks: wazuh/install_wazuh_rocky.yml - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: configure wazuh-agent block: diff --git a/os_builders/roles/image_fixes/tasks/interfaces.yml b/os_builders/roles/image_fixes/tasks/interfaces.yml new file mode 100644 index 00000000..7df245ba --- /dev/null +++ b/os_builders/roles/image_fixes/tasks/interfaces.yml @@ -0,0 +1,12 @@ +- name: Check if we are in a container from the GitHub workflows + become: true + ansible.builtin.command: "cat /proc/self/cgroup" + register: in_container + +- name: Remove ens3 interface as we use eth0 + become: true + ansible.builtin.command: "nmcli connection delete ens3" + when: + - ansible_distribution == "Rocky" + - ansible_distribution_major_version == "8" + - in_container.stdout != "0::/" \ No newline at end of file diff --git a/os_builders/roles/image_fixes/tasks/main.yml b/os_builders/roles/image_fixes/tasks/main.yml new file mode 100644 index 00000000..0bc4d055 --- /dev/null +++ b/os_builders/roles/image_fixes/tasks/main.yml @@ -0,0 +1,2 @@ +- include_tasks: selinux.yml +- include_tasks: interfaces.yml \ No newline at end of file diff --git a/os_builders/roles/vm_baseline/tasks/disable_selinux.yml b/os_builders/roles/image_fixes/tasks/selinux.yml similarity index 84% rename from os_builders/roles/vm_baseline/tasks/disable_selinux.yml rename to os_builders/roles/image_fixes/tasks/selinux.yml index 23573ddf..0d0f532c 100644 --- a/os_builders/roles/vm_baseline/tasks/disable_selinux.yml +++ b/os_builders/roles/image_fixes/tasks/selinux.yml @@ -5,4 +5,4 @@ line: SELINUX=permissive state: present create: true - when: ansible_distribution == "Rocky" \ No newline at end of file + when: ansible_os_family == "RedHat" \ No newline at end of file diff --git a/os_builders/roles/nubes_bootcontext.yml b/os_builders/roles/nubes_bootcontext.yml deleted file mode 100644 index 35579c75..00000000 --- a/os_builders/roles/nubes_bootcontext.yml +++ /dev/null @@ -1,5 +0,0 @@ -- name: Install Nubes Boot Context - hosts: default - become: true - roles: - - role: nubes_boot_context diff --git a/os_builders/roles/prep_builder/tasks/main.yml b/os_builders/roles/prep_builder/tasks/main.yml index b2197040..cc399e0f 100644 --- a/os_builders/roles/prep_builder/tasks/main.yml +++ b/os_builders/roles/prep_builder/tasks/main.yml @@ -1,83 +1,31 @@ +--- - name: Install required packages - apt: - name: "{{ item }}" + ansible.builtin.apt: + name: + - gpg + - git state: present update_cache: yes - loop: - - gpg - - make - - git - - qemu-kvm - - libvirt-daemon-system - - libvirt-clients - - libguestfs-tools - - bridge-utils - - virtinst + become: true - name: Add Hashicorp apt key - apt_key: + ansible.builtin.apt_key: url: https://apt.releases.hashicorp.com/gpg state: present + become: true - name: Add Hashicorp apt repository - apt_repository: + ansible.builtin.apt_repository: repo: deb [arch=amd64] https://apt.releases.hashicorp.com {{ ansible_distribution_release }} main state: present filename: hashicorp update_cache: yes + become: true - name: Install Packer - apt: + ansible.builtin.apt: # K8s-image-builder requires a specific version of packer name: packer=1.9.5-1 state: present allow_downgrade: yes - -- name: Add user to required groups - user: - name: "{{ ansible_env.USER }}" - groups: "{{ item }}" - append: yes - loop: - - kvm - - libvirt - -- name: Enable libvirt service - service: - daemon_reload: yes - name: libvirtd - enabled: yes - state: started - -- name: Enable KVM module - community.general.modprobe: - name: kvm - state: present - -- name: Make Ubuntu vmlinuz readable by non-root users - # Workaround on literally only Ubuntu (no other OS family including debian) - # because keeping symbols of a public apt package, which we could just download - # and extract, root-only apparently makes sense to someone. - file: - path: /boot/vmlinuz-{{ ansible_kernel }} - mode: 0644 - state: file - -- name: Ensure that Ubuntu preserves perms of new kernel on upgrade - copy: - dest: /etc/kernel/postinst.d/vmlinuz-perms - content: | - #!/bin/sh - # https://bugs.launchpad.net/ubuntu/+source/linux/+bug/759725 - - set -e - version="$1" - if [ -z "$version" ]; then - exit 0 - fi - exec dpkg-statoverride --force-statoverride-add --update --add root root 0644 "/boot/vmlinuz-${version}" - mode: 0755 - owner: root - group: root - backup: yes - force: yes + become: true diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_rc_directories.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_rc_directories.yml deleted file mode 100644 index 3d564ac1..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_rc_directories.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: Cleanout rc.loacl - file: - path: "/etc/rc.d/rc.local" - state: absent - -- name: Cleanout /etc/nubes-bootcontext.sh - file: - path: "/etc/nubes-bootcontext.sh" - state: absent - -- name: Cleanout /etc/nubes-bootcontext.sh - file: - path: "/etc/rc*/S99-nubes-boot*" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_tmp.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_tmp.yml deleted file mode 100644 index 53718004..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanout_tmp.yml +++ /dev/null @@ -1,9 +0,0 @@ -- name: Cleanout /tmp/* - file: - path: "/tmp/*" - state: absent - -- name: Cleanout /var/tmp/* - file: - path: "/var/tmp/*" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_network_conf.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_network_conf.yml deleted file mode 100644 index 8c9d7fca..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_network_conf.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: Cleanout /etc/sysconfig/network-scripts/ifcfg-e* - file: - path: "/etc/sysconfig/network-scripts/ifcfg-e*" - state: absent - -- name: Cleanout /etc/udev/rules.d/70* - file: - path: "/etc/udev/rules.d/70*" - state: absent - -- name: Cleanout /etc/sysconfig/network - file: - path: "/etc/sysconfig/network" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_old_kernels.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_old_kernels.yml deleted file mode 100644 index 244b5eee..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_old_kernels.yml +++ /dev/null @@ -1,15 +0,0 @@ -- name: Cleanup old kernels Ubuntu - block: - - name: cleanup old kernels - shell: "dpkg --list | egrep -i 'linux-image|linux-headers|linux-modules' | cut -d ' ' -f 3 | grep -v $(uname -r) | grep -v 'linux-headers-generic' | grep -v 'linux-headers-virtual' | grep -v 'linux-image-virtual' | xargs apt-get remove -y" - become: true - when: ( ansible_facts.packages['linux-image'] | length > 1) - when: ansible_distribution == "Ubuntu" and "linux-image" in ansible_facts.packages - -- name: Cleanup old kernels Rocky - block: - - name: cleanup old kernels - command: - cmd: "dnf remove --oldinstallonly kernel -y" - when: ( ansible_facts.packages['kernel'] | length > 1) - when: ansible_distribution == "Rocky" and "kernel" in ansible_facts.packages diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_packages.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_packages.yml deleted file mode 100644 index 75c7cb30..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_packages.yml +++ /dev/null @@ -1,18 +0,0 @@ -- name: Cleanout *.rpm - file: - path: "*.rpm" - state: absent - -- name: Cleanout *.deb - file: - path: "*.deb" - state: absent - -- name: clean yum/dnf - command: yum clean all - when: ansible_distribution == "Rocky" - -- name: clean apt cache - ansible.builtin.apt: - clean: yes - when: ansible_distribution == "Ubuntu" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_quattor.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_quattor.yml deleted file mode 100644 index c4393074..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_quattor.yml +++ /dev/null @@ -1,25 +0,0 @@ -- name: Cleanout Quattor - block: - - name: Cleanout Quattor - stat: - path: /etc/ccm.conf - register: ccm_conf_exists - - - name: Cleanout ccm config file - file: - path: "/etc/ccm.conf" - state: absent - when: ccm_conf_exists - - - name: Cleanout quattor profile - file: - path: "/var/lib/profile*" - state: absent - when: ccm_conf_exists - - - name: Stop quattor listener - become: true - ansible.builtin.command: - cmd: "systemctl stop ncm-cdispd.service" - when: ccm_conf_exists - when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_sudoers.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_sudoers.yml deleted file mode 100644 index fd07a375..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_sudoers.yml +++ /dev/null @@ -1,4 +0,0 @@ -- name: Cleanout /etc/sudoers.d/cloud - file: - path: "/etc/sudoers.d/cloud" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_users.yml b/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_users.yml deleted file mode 100644 index 526e0b13..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/cleanup_users.yml +++ /dev/null @@ -1,12 +0,0 @@ -- name: Cleanup users - ansible.builtin.user: - name: "{{ item }}" - remove: true - state: absent - loop: - - "nagios" - -- name: mark next boot as first boot - file: - path: /var/lock/firstboot - state: touch diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/clear_audit_log.yml b/os_builders/roles/prep_vm_post_reboot/tasks/clear_audit_log.yml deleted file mode 100644 index d7e6a025..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/clear_audit_log.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: Clear EL audits - block: - - name: Clear Audit log - shell: "/bin/cat /dev/null > /var/log/audit/audit.log" - when: ansible_distribution == "Rocky" - -- name: Clear Audit log - shell: "/bin/cat /dev/null > /var/log/wtmp" - -- name: Clear Ubuntu audits - block: - - name: Clear Audit log - shell: "/bin/cat /dev/null > /var/log/auth.log" - when: ansible_distribution == "Ubuntu" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/get_package_facts.yml b/os_builders/roles/prep_vm_post_reboot/tasks/get_package_facts.yml deleted file mode 100644 index ac5ec81c..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/get_package_facts.yml +++ /dev/null @@ -1,3 +0,0 @@ -- name: Gather the package facts - ansible.builtin.package_facts: - manager: auto diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/logrotate.yml b/os_builders/roles/prep_vm_post_reboot/tasks/logrotate.yml deleted file mode 100644 index f368224a..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/logrotate.yml +++ /dev/null @@ -1,20 +0,0 @@ -- name: Remove duplicate /etc/logrotate.d/btmp - file: - path: "/etc/logrotate.d/btmp" - state: absent - when: ansible_distribution == "Rocky" - -- name: Remove duplicate /etc/logrotate.d/wtmp - file: - path: "/etc/logrotate.d/wtmp" - state: absent - when: ansible_distribution == "Rocky" - - -- name: Clear Audit log - shell: "sudo logrotate -f /etc/logrotate.conf" - -- name: Cleanout /var/log/*.gz - file: - path: "/var/log/*.gz" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/main.yml b/os_builders/roles/prep_vm_post_reboot/tasks/main.yml deleted file mode 100644 index 3aaebd1b..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/main.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Cleanup machine - -- include_tasks: get_package_facts.yml -- include_tasks: run_quattor.yml - when: ansible_distribution == "Rocky" -- include_tasks: get_package_facts.yml -- include_tasks: cleanout_tmp.yml -- include_tasks: cleanout_rc_directories.yml -- include_tasks: cleanup_network_conf.yml -- include_tasks: run_update_keys.yml -- include_tasks: set_locale.yml -- include_tasks: wazuh.yml -- include_tasks: cleanup_quattor.yml - when: ansible_distribution == "Rocky" -- include_tasks: run_pakiti.yml -- include_tasks: cleanup_users.yml -- include_tasks: cleanup_old_kernels.yml -- include_tasks: remove_host_ssh_keys.yml -- include_tasks: cleanup_packages.yml - -# Cleanup history of build -- include_tasks: logrotate.yml -- include_tasks: clear_audit_log.yml -- include_tasks: remove_shell_history.yml -- include_tasks: cleanup_sudoers.yml diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/remove_host_ssh_keys.yml b/os_builders/roles/prep_vm_post_reboot/tasks/remove_host_ssh_keys.yml deleted file mode 100644 index d242c817..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/remove_host_ssh_keys.yml +++ /dev/null @@ -1,4 +0,0 @@ -- name: Cleanout Host SSH Keys - file: - path: "/etc/ssh/*key*" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/remove_shell_history.yml b/os_builders/roles/prep_vm_post_reboot/tasks/remove_shell_history.yml deleted file mode 100644 index bffc66bb..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/remove_shell_history.yml +++ /dev/null @@ -1,9 +0,0 @@ -- name: Cleanout Host SSH Keys - file: - path: "/home/*/.bash_history" - state: absent - -- name: Cleanout Host SSH Keys - file: - path: "/root/.bash_history" - state: absent diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/run_pakiti.yml b/os_builders/roles/prep_vm_post_reboot/tasks/run_pakiti.yml deleted file mode 100644 index 67558898..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/run_pakiti.yml +++ /dev/null @@ -1,3 +0,0 @@ -- name: Run pakiti - command: - cmd: "pakiti2-client" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/run_quattor.yml b/os_builders/roles/prep_vm_post_reboot/tasks/run_quattor.yml deleted file mode 100644 index 933b8dde..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/run_quattor.yml +++ /dev/null @@ -1,4 +0,0 @@ -- name: run quattor - shell: "sudo quattor-fetch && sudo quattor-configure --all --verbose" - ignore_errors: true - when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/run_update_keys.yml b/os_builders/roles/prep_vm_post_reboot/tasks/run_update_keys.yml deleted file mode 100644 index 1b53df0d..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/run_update_keys.yml +++ /dev/null @@ -1,3 +0,0 @@ -- name: Update admin keys - command: - cmd: "/usr/local/sbin/update_keys.sh" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/set_locale.yml b/os_builders/roles/prep_vm_post_reboot/tasks/set_locale.yml deleted file mode 100644 index a82ed42a..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/set_locale.yml +++ /dev/null @@ -1,53 +0,0 @@ -- name: set locale to set - set_fact: - config_system_locale: "en_GB.UTF-8" - config_system_language: "en_GB.UTF-8" - -- name: check if locale exists - shell: "locale -a | grep -i {{ config_system_locale | regex_replace('-', '') | quote }}" - register: found_locale - changed_when: no - failed_when: no - -- name: create locale - command: "localedef -i {{ config_system_locale | regex_replace('(.*)\\..*', '\\1') | quote }} -f {{ config_system_locale | regex_replace('.*\\.(.*)', '\\1') | quote }} {{ config_system_locale | quote }}" - when: not ansible_check_mode and found_locale.rc != 0 - -- name: check if language exists - shell: "locale -a | grep -i {{ config_system_language | regex_replace('-', '') | quote }}" - register: found_language - changed_when: no - failed_when: no - -- name: create language - command: "localedef -i {{ config_system_language | regex_replace('(.*)\\..*', '\\1') | quote }} -f {{ config_system_language | regex_replace('.*\\.(.*)', '\\1') | quote }} {{ config_system_language | quote }}" - when: not ansible_check_mode and found_language.rc != 0 - -- name: Get current locale and language configuration - command: localectl status - register: locale_status - changed_when: false - -- name: Parse 'LANG' from current locale and language configuration - set_fact: - locale_lang: "{{ locale_status.stdout | regex_search('LANG=([^\n]+)', '\\1') | first }}" - -- name: Parse 'LANGUAGE' from current locale and language configuration - set_fact: - locale_language: "{{ locale_status.stdout | regex_search('LANGUAGE=([^\n]+)', '\\1') | default([locale_lang], true) | first }}" - -- name: Configure locale to '{{ config_system_locale }}' and language to '{{ config_system_language }}' - command: localectl set-locale LANG={{ config_system_locale }} LANGUAGE={{ config_system_language }} - -- name: Set locale keymap - command: localectl set-keymap gb - when: ansible_distribution == "Rocky" - -- name: Set locale keymap - command: loadkeys uk - when: ansible_distribution == "Ubuntu" - - -# - name: Set locale x11 keymap -# command: localeclt set-x11-keymap gb -# when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/prep_vm_post_reboot/tasks/wazuh.yml b/os_builders/roles/prep_vm_post_reboot/tasks/wazuh.yml deleted file mode 100644 index 44eacd49..00000000 --- a/os_builders/roles/prep_vm_post_reboot/tasks/wazuh.yml +++ /dev/null @@ -1,14 +0,0 @@ -- name: Set wazuh package name variable - set_fact: - wazuh_package_name: "wazuh-agent" - -- name: Stop wazuh-agent service - become: true - ansible.builtin.command: - cmd: "systemctl stop wazuh-agent.service" - when: wazuh_package_name in ansible_facts.packages - -- name: Clean Wazuh agent history - file: - path: "/var/ossec/etc/client.keys" - state: absent diff --git a/os_builders/roles/quattor/defaults/main.yaml b/os_builders/roles/quattor/defaults/main.yaml new file mode 100644 index 00000000..f1306fad --- /dev/null +++ b/os_builders/roles/quattor/defaults/main.yaml @@ -0,0 +1 @@ +quattor_version: 24.10.0 diff --git a/os_builders/roles/quattor/files/99-set-aquilon-profile.cfg b/os_builders/roles/quattor/files/99-set-aquilon-profile.cfg new file mode 100644 index 00000000..c58f05fe --- /dev/null +++ b/os_builders/roles/quattor/files/99-set-aquilon-profile.cfg @@ -0,0 +1,20 @@ +## template: jinja +#cloud-config +write_files: +- path: /etc/ccm.conf + content: | + ca_dir /etc/grid-security/certificates/ + cache_root /var/lib/ccm + debug 0 + force 0 + get_timeout 30 + lock_retries 3 + lock_wait 30 + profile https://aquilon.gridpp.rl.ac.uk/profiles/host-{{ ds.ec2_metadata.local_ipv4 | replace(".","-") }}.nubes.stfc.ac.uk.json + profile_failover http://aquilon.gridpp.rl.ac.uk/profiles/host-{{ ds.ec2_metadata.local_ipv4 | replace(".","-") }}.nubes.stfc.ac.uk.json + retrieve_retries 3 + retrieve_wait 30 + world_readable 0 + owner: 'root:root' + permissions: '0644' + defer: true \ No newline at end of file diff --git a/os_builders/roles/quattor/tasks/main.yaml b/os_builders/roles/quattor/tasks/main.yaml new file mode 100644 index 00000000..3548bf7b --- /dev/null +++ b/os_builders/roles/quattor/tasks/main.yaml @@ -0,0 +1,77 @@ +- name: Add EPEL repository + ansible.builtin.yum_repository: + name: epel-{{ ansible_facts.distribution_major_version }}-x86_64 + description: "EPEL {{ ansible_facts.distribution_major_version }} Repository" + baseurl: "http://mirrors.gridpp.rl.ac.uk/current/epel-{{ ansible_facts.distribution_major_version }}-x86_64/RPMS.base/" + enabled: true + gpgcheck: false + state: present + become: true + +- name: Add Powertools repository + ansible.builtin.yum_repository: + name: rocky-{{ ansible_facts.distribution_major_version }}x-x86_64-powertools + description: Rocky Mirror powertools + baseurl: "http://mirrors.gridpp.rl.ac.uk/current/rocky-{{ ansible_facts.distribution_major_version }}-x86_64/RPMS.powertools/" + enabled: true + gpgcheck: true + state: present + module_hotfixes: true + become: true + +- name: Add Rocky mirror repositories crb + ansible.builtin.yum_repository: + name: rocky-{{ ansible_facts.distribution_major_version }}x-x86_64-crb + description: Rocky Mirror crb + baseurl: "http://mirrors.gridpp.rl.ac.uk/current/rocky-{{ ansible_facts.distribution_major_version }}-x86_64/RPMS.crb/" + enabled: true + gpgcheck: true + state: present + module_hotfixes: true + become: true + when: ansible_facts.distribution_major_version == '9' + +- name: Add Rocky mirror repositories + ansible.builtin.yum_repository: + name: rocky-{{ ansible_facts.distribution_major_version }}x-x86_64-{{ item }} + description: Rocky Mirror {{ item }} + baseurl: "http://mirrors.gridpp.rl.ac.uk/current/rocky-{{ ansible_facts.distribution_major_version }}-x86_64/RPMS.{{ item }}/" + enabled: true + gpgcheck: true + state: present + module_hotfixes: true + with_items: + - appstream + - extras + - os + become: true + + +- name: Add Quattor repositories + ansible.builtin.yum_repository: + name: "{{ item }}" + description: "{{ item }} Repository" + baseurl: "http://mirrors.gridpp.rl.ac.uk/live/quattor-noarch/RPMS.{{ item }}/" + enabled: true + gpgcheck: false + state: present + become: true + loop: + - externals-el{{ ansible_facts.distribution_major_version }} + - quattor-{{ quattor_version }}-el{{ ansible_facts.distribution_major_version }} + +- name: Install packages + ansible.builtin.yum: + name: + - ncm-ncd + - ncm-spma + become: true + +- name: Create cloud-init config to run the script + ansible.builtin.copy: + src: 99-set-aquilon-profile.cfg + dest: /etc/cloud/cloud.cfg.d/ + owner: root + group: root + mode: "0644" + become: true diff --git a/os_builders/roles/run_packer/defaults/main.yml b/os_builders/roles/run_packer/defaults/main.yml deleted file mode 100644 index 0131d77f..00000000 --- a/os_builders/roles/run_packer/defaults/main.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Packer -auto_install_name: "stage-1" -auto_install_output_path: "packfiles/base_output" - -provisioner_name: "stage-2" -provisioner_output_path: "packfiles/output" - -# Build with VNC on by default unless the user opts out -packer_headless: "false" - -# Sysprep options -# ssh-userdir is disabled, to preserve the root SSH authorized_keys file -sysprep_options: >- - --operations defaults,user-account,-ssh-userdir - --remove-user-accounts packer diff --git a/os_builders/roles/run_packer/tasks/main.yml b/os_builders/roles/run_packer/tasks/main.yml deleted file mode 100644 index e6bb0101..00000000 --- a/os_builders/roles/run_packer/tasks/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -- include_tasks: packer.yml -- include_tasks: sysprep.yml diff --git a/os_builders/roles/run_packer/tasks/packer.yml b/os_builders/roles/run_packer/tasks/packer.yml deleted file mode 100644 index 2d281116..00000000 --- a/os_builders/roles/run_packer/tasks/packer.yml +++ /dev/null @@ -1,40 +0,0 @@ -- name: Print build variant - debug: - msg: "Building: {{ build_variant }}" - -- name: Check if auto installed image exists - find: - path: "{{ playbook_dir }}/{{ auto_install_output_path }}" - patterns: "*{{ build_variant }}*" - register: auto_install_image - -- name: Check autoinstall image, build if it does not exist locally - when: not auto_install_image.matched - block: - - name: Print autoinstall command for debugging - debug: - msg: "Running: cd packfiles && packer build --only='{{ auto_install_name }}*{{ build_variant }}*' ." - - - name: Running packer auto install build for {{ build_variant }}. This may take a while... - shell: - chdir: "{{ playbook_dir }}/../packfiles" - cmd: packer build --only='{{ auto_install_name }}*{{ build_variant }}*' . - environment: - PKR_VAR_headless: "{{ packer_headless }}" - register: packer_output - - - name: Auto install Output for {{ build_variant }} - debug: - msg: "{{ packer_output.stdout_lines }}" - -- name: (Re-)Provision image for {{ build_variant }} - shell: - chdir: "{{ playbook_dir }}/../packfiles" - cmd: packer build --force --only='{{ provisioner_name }}*{{ build_variant }}*' . - environment: - PKR_VAR_headless: "{{ packer_headless }}" - register: packer_output - -- name: Provisioner Output for {{ build_variant }} - debug: - msg: "{{ packer_output.stdout_lines }}" diff --git a/os_builders/roles/run_packer/tasks/sysprep.yml b/os_builders/roles/run_packer/tasks/sysprep.yml deleted file mode 100644 index 05bf20d7..00000000 --- a/os_builders/roles/run_packer/tasks/sysprep.yml +++ /dev/null @@ -1,10 +0,0 @@ -- name: Find output image - find: - path: "{{ playbook_dir }}/../{{ provisioner_output_path }}" - patterns: "*{{ build_variant }}*" - register: provisioner_output - -- name: Run virt-sysprep on each image - shell: - cmd: "virt-sysprep --format qcow2 -a {{ item }} {{ sysprep_options }}" - with_items: "{{ provisioner_output.files | map(attribute='path') | list }}" diff --git a/os_builders/roles/tidy_image/tasks/cleanup_old_kernels.yml b/os_builders/roles/tidy_image/tasks/cleanup_old_kernels.yml index 244b5eee..87e1af24 100644 --- a/os_builders/roles/tidy_image/tasks/cleanup_old_kernels.yml +++ b/os_builders/roles/tidy_image/tasks/cleanup_old_kernels.yml @@ -4,7 +4,7 @@ shell: "dpkg --list | egrep -i 'linux-image|linux-headers|linux-modules' | cut -d ' ' -f 3 | grep -v $(uname -r) | grep -v 'linux-headers-generic' | grep -v 'linux-headers-virtual' | grep -v 'linux-image-virtual' | xargs apt-get remove -y" become: true when: ( ansible_facts.packages['linux-image'] | length > 1) - when: ansible_distribution == "Ubuntu" and "linux-image" in ansible_facts.packages + when: ansible_os_family == "Debian" and "linux-image" in ansible_facts.packages - name: Cleanup old kernels Rocky block: @@ -12,4 +12,4 @@ command: cmd: "dnf remove --oldinstallonly kernel -y" when: ( ansible_facts.packages['kernel'] | length > 1) - when: ansible_distribution == "Rocky" and "kernel" in ansible_facts.packages + when: ansible_os_family == "RedHat" and "kernel" in ansible_facts.packages diff --git a/os_builders/roles/tidy_image/tasks/cleanup_packages.yml b/os_builders/roles/tidy_image/tasks/cleanup_packages.yml index 75c7cb30..66d4f922 100644 --- a/os_builders/roles/tidy_image/tasks/cleanup_packages.yml +++ b/os_builders/roles/tidy_image/tasks/cleanup_packages.yml @@ -10,9 +10,9 @@ - name: clean yum/dnf command: yum clean all - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: clean apt cache ansible.builtin.apt: clean: yes - when: ansible_distribution == "Ubuntu" + when: ansible_os_family == "Debian" diff --git a/os_builders/roles/tidy_image/tasks/cleanup_quattor.yml b/os_builders/roles/tidy_image/tasks/cleanup_quattor.yml deleted file mode 100644 index 41d712c1..00000000 --- a/os_builders/roles/tidy_image/tasks/cleanup_quattor.yml +++ /dev/null @@ -1,25 +0,0 @@ -- name: Cleanout Quattor - block: - - name: Cleanout Quattor - stat: - path: /etc/ccm.conf - register: ccm_conf_exists - - - name: Cleanout ccm config file - file: - path: "/etc/ccm.conf" - state: absent - when: ccm_conf_exists.stat.exists - - - name: Cleanout quattor profile - file: - path: "/var/lib/profile*" - state: absent - when: ccm_conf_exists.stat.exists - - - name: Stop quattor listener - become: true - ansible.builtin.command: - cmd: "systemctl stop ncm-cdispd.service" - when: ccm_conf_exists.stat.exists - when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/tidy_image/tasks/clear_audit_log.yml b/os_builders/roles/tidy_image/tasks/clear_audit_log.yml index d7e6a025..b38455c6 100644 --- a/os_builders/roles/tidy_image/tasks/clear_audit_log.yml +++ b/os_builders/roles/tidy_image/tasks/clear_audit_log.yml @@ -2,7 +2,7 @@ block: - name: Clear Audit log shell: "/bin/cat /dev/null > /var/log/audit/audit.log" - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: Clear Audit log shell: "/bin/cat /dev/null > /var/log/wtmp" @@ -11,4 +11,4 @@ block: - name: Clear Audit log shell: "/bin/cat /dev/null > /var/log/auth.log" - when: ansible_distribution == "Ubuntu" + when: ansible_os_family == "Debian" diff --git a/os_builders/roles/tidy_image/tasks/logrotate.yml b/os_builders/roles/tidy_image/tasks/logrotate.yml index f368224a..6391adcb 100644 --- a/os_builders/roles/tidy_image/tasks/logrotate.yml +++ b/os_builders/roles/tidy_image/tasks/logrotate.yml @@ -2,13 +2,13 @@ file: path: "/etc/logrotate.d/btmp" state: absent - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: Remove duplicate /etc/logrotate.d/wtmp file: path: "/etc/logrotate.d/wtmp" state: absent - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" - name: Clear Audit log diff --git a/os_builders/roles/tidy_image/tasks/main.yml b/os_builders/roles/tidy_image/tasks/main.yml index 6f7fbf86..f72e2107 100644 --- a/os_builders/roles/tidy_image/tasks/main.yml +++ b/os_builders/roles/tidy_image/tasks/main.yml @@ -2,17 +2,12 @@ - include_tasks: reboot.yml - include_tasks: get_package_facts.yml -- include_tasks: run_quattor.yml - when: ansible_distribution == "Rocky" - include_tasks: get_package_facts.yml - include_tasks: cleanout_tmp.yml - include_tasks: cleanout_rc_directories.yml - include_tasks: cleanup_network_conf.yml - include_tasks: run_update_keys.yml - include_tasks: set_locale.yml -- include_tasks: wazuh.yml -- include_tasks: cleanup_quattor.yml - when: ansible_distribution == "Rocky" - include_tasks: run_pakiti.yml - include_tasks: cleanup_users.yml - include_tasks: cleanup_old_kernels.yml diff --git a/os_builders/roles/tidy_image/tasks/run_quattor.yml b/os_builders/roles/tidy_image/tasks/run_quattor.yml deleted file mode 100644 index 933b8dde..00000000 --- a/os_builders/roles/tidy_image/tasks/run_quattor.yml +++ /dev/null @@ -1,4 +0,0 @@ -- name: run quattor - shell: "sudo quattor-fetch && sudo quattor-configure --all --verbose" - ignore_errors: true - when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/tidy_image/tasks/set_locale.yml b/os_builders/roles/tidy_image/tasks/set_locale.yml index adf2cd45..fd0a0674 100644 --- a/os_builders/roles/tidy_image/tasks/set_locale.yml +++ b/os_builders/roles/tidy_image/tasks/set_locale.yml @@ -46,13 +46,13 @@ - name: Set locale keymap command: localectl set-keymap gb - when: ansible_distribution == "Rocky" + when: ansible_os_family == "RedHat" # - name: Set locale keymap # command: loadkeys uk -# when: ansible_distribution == "Ubuntu" +# when: ansible_os_family == "Debian" # - name: Set locale x11 keymap # command: localeclt set-x11-keymap gb -# when: ansible_distribution == "Rocky" +# when: ansible_os_family == "RedHat" diff --git a/os_builders/roles/tidy_image/tasks/wazuh.yml b/os_builders/roles/tidy_image/tasks/wazuh.yml deleted file mode 100644 index 2bac3024..00000000 --- a/os_builders/roles/tidy_image/tasks/wazuh.yml +++ /dev/null @@ -1,15 +0,0 @@ -- name: Set wazuh package name variable - set_fact: - wazuh_package_name: "wazuh-agent" - -- name: Stop wazuh-agent service - become: true - ansible.builtin.command: - cmd: "systemctl stop wazuh-agent.service" - when: wazuh_package_name in ansible_facts.packages - ignore_errors: True - -- name: Clean Wazuh agent history - file: - path: "/var/ossec/etc/client.keys" - state: absent diff --git a/os_builders/roles/vm_baseline/tasks/get-package-facts.yml b/os_builders/roles/vm_baseline/tasks/get-package-facts.yml deleted file mode 100644 index ac5ec81c..00000000 --- a/os_builders/roles/vm_baseline/tasks/get-package-facts.yml +++ /dev/null @@ -1,3 +0,0 @@ -- name: Gather the package facts - ansible.builtin.package_facts: - manager: auto diff --git a/os_builders/roles/vm_baseline/tasks/main.yml b/os_builders/roles/vm_baseline/tasks/main.yml deleted file mode 100644 index 36056ebf..00000000 --- a/os_builders/roles/vm_baseline/tasks/main.yml +++ /dev/null @@ -1,17 +0,0 @@ -- include_tasks: update.yml -- include_tasks: get-package-facts.yml - -- include_tasks: cron.yml -- include_tasks: grub-cmdline.yml - -- include_tasks: ssh.yml -- include_tasks: rsyslog.yml -- include_tasks: openscap.yml -- include_tasks: wazuh.yml - -# Include Pakiti last, so it will report on the state of a complete system -- include_tasks: ukescienceca.yml -- include_tasks: pakiti.yml -- include_tasks: qemu-guest-agent.yml - -- include_tasks: disable_selinux.yml diff --git a/os_builders/roles/vm_baseline/tasks/openscap.yml b/os_builders/roles/vm_baseline/tasks/openscap.yml deleted file mode 100644 index 5ca7ac6b..00000000 --- a/os_builders/roles/vm_baseline/tasks/openscap.yml +++ /dev/null @@ -1,31 +0,0 @@ -- name: Install openscap - package: - name: libopenscap8 - state: present - when: ansible_distribution == 'Ubuntu' and ansible_distribution_version != "24.04" - register: result - retries: 5 - delay: 30 - until: result is not failed - -- name: Install openscap - package: - name: - - openscap-scanner - - openscap-utils - state: present - when: ansible_distribution == 'Ubuntu' and ansible_distribution_version == "24.04" - register: result - retries: 5 - delay: 30 - until: result is not failed - -- name: Install openscap - package: - name: openscap - state: present - when: ansible_distribution == "Rocky" - register: result - retries: 5 - delay: 30 - until: result is not failed \ No newline at end of file diff --git a/os_builders/roles/vm_baseline/tasks/ukescienceca.yml b/os_builders/roles/vm_baseline/tasks/ukescienceca.yml deleted file mode 100644 index 0eba0c67..00000000 --- a/os_builders/roles/vm_baseline/tasks/ukescienceca.yml +++ /dev/null @@ -1,7 +0,0 @@ -- name: Install UK eScience CA - ansible.builtin.include_tasks: ukscienceca/install_ukscienceca_ubuntu.yml - when: ansible_distribution == "Ubuntu" - -- name: Install UK eScience Root CA on RL - ansible.builtin.include_tasks: ukscienceca/install_ukscienceca_rocky.yml - when: ansible_distribution == "Rocky" diff --git a/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_rocky.yml b/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_rocky.yml deleted file mode 100644 index 39fbf365..00000000 --- a/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_rocky.yml +++ /dev/null @@ -1,16 +0,0 @@ -- name: add CA repo - yum_repository: - name: eScienceCAs - file: eScienceCAs - description: "Repository to install the UK eScience (and other IGTF) CA certs from" - baseurl: https://repository.egi.eu/sw/production/cas/1/current - enabled: true - gpgcheck: true - gpgkey: "https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-4" - -- name: Install UK eScience Packages - yum: - name: - - ca_UKeScienceRoot-2007 - - ca_UKeScienceCA-2B - state: present \ No newline at end of file diff --git a/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_ubuntu.yml b/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_ubuntu.yml deleted file mode 100644 index 0575c7fc..00000000 --- a/os_builders/roles/vm_baseline/tasks/ukscienceca/install_ukscienceca_ubuntu.yml +++ /dev/null @@ -1,26 +0,0 @@ -- name: Install required package - ansible.builtin.apt: - name: python3-debian - update_cache: true - state: present - -- name: Add EU Grid PMA repo - ansible.builtin.deb822_repository: - name: eu-grid-pma - uris: http://repository.egi.eu/sw/production/cas/1/current - signed_by: "https://dist.eugridpma.info/distribution/igtf/current/GPG-KEY-EUGridPMA-RPM-4" - suites: egi-igtf - components: core - state: present - -- name: Install UK eScience Root - ansible.builtin.apt: - name: ca-ukescienceroot-2007 - state: present - update_cache: yes - -- name: Install UK eScience CA - ansible.builtin.apt: - name: ca-ukescienceca-2b - state: present - update_cache: yes \ No newline at end of file diff --git a/os_builders/tidy_image.yml b/os_builders/tidy_image.yml new file mode 100644 index 00000000..024c4a6e --- /dev/null +++ b/os_builders/tidy_image.yml @@ -0,0 +1,10 @@ +--- +- name: Tidy STFC Cloud user image + hosts: all + pre_tasks: + - name: User warning + ansible.builtin.debug: + msg: "[Warning] Do not run on non-cloud machine" + + roles: + - role: tidy_image diff --git a/os_builders/vm_baseline.yml b/os_builders/vm_baseline.yml new file mode 100644 index 00000000..b20d8e30 --- /dev/null +++ b/os_builders/vm_baseline.yml @@ -0,0 +1,16 @@ +--- +- name: Prepare STFC Cloud user image with our security compliance baseline + hosts: all + pre_tasks: + - name: User warning + ansible.builtin.debug: + msg: "[Warning] Do not run on non-cloud machine" + + roles: + - role: common/container_registry + - role: common/nubes_bootcontext + - role: common/grub + - role: common/admin_keys + - role: common/wazuh + - role: common/pakiti + - role: common/rsyslog